diff --git a/.azure-pipelines/jobs/test.yml b/.azure-pipelines/jobs/test.yml index 150937ef4f..4c8fc42035 100644 --- a/.azure-pipelines/jobs/test.yml +++ b/.azure-pipelines/jobs/test.yml @@ -15,7 +15,7 @@ jobs: ${{ if eq(parameters.vmImage, 'vs2017-win2016') }}: # TODO remove once vs2017-win2016 has Python 3.7 Python37: - python.version: '>= 3.7.0-b2' + python.version: '>= 3.7.2' python.architecture: x64 ${{ if ne(parameters.vmImage, 'vs2017-win2016' )}}: Python37: diff --git a/Pipfile b/Pipfile index a8ddd5e6e7..8de9428f38 100644 --- a/Pipfile +++ b/Pipfile @@ -5,7 +5,7 @@ sphinx = "<=1.5.5" twine = "*" sphinx-click = "*" click = "*" -pytest-pypi = {path = "./tests/pytest-pypi", editable = true} +pytest_pypi = {path = "./tests/pytest-pypi", editable = true} stdeb = {version="*", markers="sys_platform == 'linux'"} black = {version="*", markers="python_version >= '3.6'"} pytz = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 1bdc1c2154..5938db73a4 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d7119fe8fa7be8224ff46509352efbd76dd17accf6a57580dbaf5762e613468b" + "sha256": "0cdfbd95f33a9edb69dede4bc868f24f7b770ba6b06ce73f6de5f175544e996d" }, "pipfile-spec": 6, "requires": {}, @@ -52,10 +52,10 @@ }, "attrs": { "hashes": [ - "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", - "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" + "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", + "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" ], - "version": "==18.2.0" + "version": "==19.1.0" }, "babel": { "hashes": [ @@ -119,6 +119,13 @@ "index": "pypi", "version": "==0.0.1" }, + "cached-property": { + "hashes": [ + "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f", + "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504" + ], + "version": "==1.5.1" + }, "cerberus": { "hashes": [ "sha256:f5c2e048fb15ecb3c088d192164316093fcfa602a74b3386eefb2983aa7e800a" @@ -165,9 +172,9 @@ }, "cursor": { "hashes": [ - "sha256:7e728934f555a84a1c8b0850b66efcb580d092acc927b7d15dd43eb27dd4c4c5" + "sha256:33f279a17789c04efd27a92501a0dad62bb011f8a4cdff93867c798d26508940" ], - "version": "==1.3.1" + "version": "==1.3.4" }, "distlib": { "hashes": [ @@ -323,36 +330,36 @@ }, "markupsafe": { "hashes": [ - "sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", - "sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", - "sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", - "sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", - "sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", - "sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", - "sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", - "sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", - "sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", - "sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", - "sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", - "sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", - "sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", - "sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", - "sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", - "sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", - "sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", - "sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", - "sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", - "sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", - "sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", - "sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", - "sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", - "sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", - "sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", - "sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", - "sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", - "sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1" - ], - "version": "==1.1.0" + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" + ], + "version": "==1.1.1" }, "mccabe": { "hashes": [ @@ -374,9 +381,15 @@ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" ], - "markers": "python_version <= '2.7'", "version": "==5.0.0" }, + "orderedmultidict": { + "hashes": [ + "sha256:24e3b730cf84e4a6a68be5cc760864905cf66abc89851e724bd5b4e849eaa96b", + "sha256:b89895ba6438038d0bdf88020ceff876cf3eae0d5c66a69b526fab31125db2c5" + ], + "version": "==1.0" + }, "packaging": { "hashes": [ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", @@ -415,10 +428,10 @@ }, "pbr": { "hashes": [ - "sha256:a7953f66e1f82e4b061f43096a4bcc058f7d3d41de9b94ac871770e8bdd831a2", - "sha256:d717573351cfe09f49df61906cd272abaa759b3e91744396b804965ff7bff38b" + "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843", + "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824" ], - "version": "==5.1.2" + "version": "==5.1.3" }, "pep517": { "hashes": [ @@ -458,17 +471,17 @@ }, "pluggy": { "hashes": [ - "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", - "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" + "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", + "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" ], - "version": "==0.8.1" + "version": "==0.9.0" }, "py": { "hashes": [ - "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", - "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" + "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", + "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" ], - "version": "==1.7.0" + "version": "==1.8.0" }, "pycodestyle": { "hashes": [ @@ -479,10 +492,10 @@ }, "pyflakes": { "hashes": [ - "sha256:5e8c00e30c464c99e0b501dc160b13a14af7f27d4dffb529c556e30a159e231d", - "sha256:f277f9ca3e55de669fba45b7393a1449009cff5a37d1af10ebb76c52765269cd" + "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", + "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" ], - "version": "==2.1.0" + "version": "==2.1.1" }, "pygments": { "hashes": [ @@ -568,10 +581,10 @@ }, "requirementslib": { "hashes": [ - "sha256:c26feee79853dedddab550cf79fb2fa83b4bc1a16eab58f2c870e8314caa6cc5", - "sha256:d302b780afbd1d60f49d368b535929d8ff4b6d972797f3777c9560d48abdded7" + "sha256:04c19bfe6f2c4dbfe3cc5115744c2079b0f3ce52c61c9d130a1ccf2b9896f812", + "sha256:fdfc75d0ce418e80fa3b573ff078a5732ec1f11415afc2d4280809a1b5b2575d" ], - "version": "==1.4.0" + "version": "==1.4.2" }, "resolvelib": { "hashes": [ @@ -710,10 +723,10 @@ }, "virtualenv": { "hashes": [ - "sha256:8b9abfc51c38b70f61634bf265e5beacf6fae11fc25d355d1871f49b8e45f0db", - "sha256:cceab52aa7d4df1e1871a70236eb2b89fcfe29b6b43510d9738689787c513261" + "sha256:6aebaf4dd2568a0094225ebbca987859e369e3e5c22dc7d52e5406d504890417", + "sha256:984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39" ], - "version": "==16.4.0" + "version": "==16.4.3" }, "virtualenv-clone": { "hashes": [ @@ -728,10 +741,10 @@ "spinner" ], "hashes": [ - "sha256:510408ec63a4b423967fd630bf0885c8d6a1d5d126f8bb1be6aba86a0da5e815", - "sha256:fc5cca7a14e92feaa6f85dd91da74d834904280a96a21190aecb4cd1d1048e0e" + "sha256:1a3d16d541de7ff098037260506a9efc5f6967176137988bd2cbfdd13b240ba0", + "sha256:68896b279f64ff078e06ffd41f77181ef7cdedbeaa5f453cae3cfdd97d41dbcf" ], - "version": "==0.3.0" + "version": "==0.3.1" }, "webencodings": { "hashes": [ @@ -749,10 +762,10 @@ }, "wheel": { "hashes": [ - "sha256:12363e6df5678ecf9daf8429f06f97e7106e701405898f24318ce7f0b79c611a", - "sha256:b79ffea026bc0dbd940868347ae9eee36789b6496b6623bd2dec7c7c540a8f99" + "sha256:66a8fd76f28977bb664b098372daef2b27f60dc4d1688cfab7b37a09448f0e9d", + "sha256:8eb4a788b3aec8abf5ff68d4165441bc57420c9f64ca5f471f58c3969fe08668" ], - "version": "==0.33.0" + "version": "==0.33.1" }, "yaspin": { "hashes": [ diff --git a/peeps/PEEP-005.md b/peeps/PEEP-005.md new file mode 100644 index 0000000000..2cc0279f91 --- /dev/null +++ b/peeps/PEEP-005.md @@ -0,0 +1,65 @@ +# PEEP-005: Do Not Remove Entries from the Lockfile When Using `--keep-outdated` + +**PROPOSED** + +This PEEP describes a change that would retain entries in the Lockfile even if they were not returned during resolution when the user passes the `--keep-outdated` flag. + +☤ + +The `--keep-outdated` flag is currently provided by Pipenv for the purpose of holding back outdated dependencies (i.e. dependencies that are not newly introduced). This proposal attempts to identify the reasoning behind the flag and identifies a need for a project-wide scoping. Finally, this proposal outlines the expected behavior of `--keep-outdated` under the specified circumstances, as well as the required changes to achieve full implementation. + +## Retaining Outdated Dependencies + +The purpose of retaining outdated dependencies is to allow the user to introduce a new package to their environment with a minimal impact on their existing environment. In an effort to achieve this, `keep_outdated` was proposed as both a flag and a Pipfile setting [in this issue](https://github.com/pypa/pipenv/issues/1255#issuecomment-354585775), originally described as follows: + +> pipenv lock --keep-outdated to request a minimal update that only adjusts the lock file to account for Pipfile changes (additions, removals, and changes to version constraints)... and pipenv install --keep-outdated needed to request only the minimal changes required to satisfy the installation request + +However, the current implementation always fully re-locks, rather than only locking the new dependencies. As a result, dependencies in the `Pipfile.lock` with markers for a python version different from that of the running interpreter will be removed, even if they have nothing to do with the current changeset. For instance, say you have the following dependency in your `Pipfile.lock`: + +```json +{ + "default": { + "backports.weakref": { + "hashes": [...], + "version": "==1.5", + "markers": "python_version<='3.4'" + } + } +} +``` + +If this lockfile were to be re-generated with Python 3, even with `--keep-outdated`, this entry would be removed. This makes it very difficult to maintain lockfiles which are compatible across major python versions, yet all that would be required to correct this would be a tweak to the implementation of `keep-outdated`. I believe this was the goal to begin with, but I feel this behavior should be documented and clarified before moving forward. + +## Desired Behavior + +1. The only changes that should occur in `Pipfile.lock` when `--keep-outdated` is passed should be changes resulting from new packages added or pin changes in the project `Pipfile`; +2. Existing packages in the project `Pipfile.lock` should remain in place, even if they are not returned during resolution; +3. New dependencies should be written to the lockfile; +4. Conflicts should be resolved as outlined below. + +## Conflict Resolution + +If a conflict should occur due to the presence in the `Pipfile.lock` of a dependency of a new package, the following steps should be undertaken before alerting the user: + +1. Determine whether the previously locked version of the dependency meets the constraints required of the new package; if so, pin that version; +2. If the previously locked version is not present in the `Pipfile` and is not a dependency of any other dependencies (i.e. has no presence in `pipenv graph`, etc), update the lockfile with the new version; +3. If there is a new or existing dependency which has a conflict with existing entries in the lockfile, perform an intermediate resolution step by checking: + a. If the new dependency can be satisfied by existing installs; + b. Whether conflicts can be upgraded without affecting locked dependencies; + c. If locked dependencies must be upgraded, whether those dependencies ultimately have any dependencies in the `Pipfile`; + d. If a traversal up the graph lands in the `Pipfile`, create _abstract dependencies_ from the `Pipfile` entries and determine whether they will still be satisfied by the new version; + e. If a new pin is required, ensure that any subdependencies of the newly pinned dependencies are therefore also re-pinned (simply prefer the updated lockfile instead of the cached version); + +4. Raise an Exception alerting the user that they either need to do a full lock or manually pin a version. + +## Necessary Changes + +In order to make these changes, we will need to modify the dependency resolution process. Overall, locking will require the following implementation changes: + +1. The ability to restore any entries that would otherwise be removed when the `--keep-outdated` flag is passed. The process already provides a caching mechanism, so we simply need to restore missing cache keys; +2. Conflict resolution steps: + a. Check an abstract dependency/candidate against a lockfile entry; + b. Requirements mapping for each dependency in the environment to determine if a lockfile entry is a descendent of any other entries; + + +Author: Dan Ryan diff --git a/pipenv/core.py b/pipenv/core.py index e3c61618e5..3326cf0dbf 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -744,8 +744,6 @@ def batch_install(deps_list, procs, failed_deps_queue, extra_indexes=extra_indexes, use_pep517=not retry, ) - if dep.is_vcs or dep.editable: - c.block() if procs.qsize() < nprocs: c.dep = dep procs.put(c) @@ -881,7 +879,7 @@ def do_create_virtualenv(python=None, site_packages=False, pypi_mirror=None): ) # Default to using sys.executable, if Python wasn't provided. - if python is None: + if not python: python = sys.executable click.echo( u"{0} {1} {3} {2}".format( @@ -1048,6 +1046,7 @@ def do_lock( err=True, ) + # Mutates the lockfile venv_resolve_deps( packages, which=which, @@ -1058,7 +1057,8 @@ def do_lock( allow_global=system, pypi_mirror=pypi_mirror, pipfile=packages, - lockfile=lockfile + lockfile=lockfile, + keep_outdated=keep_outdated ) # Support for --keep-outdated… @@ -1075,6 +1075,12 @@ def do_lock( lockfile[section_name][canonical_name] = cached_lockfile[ section_name ][canonical_name].copy() + for key in ["default", "develop"]: + packages = set(cached_lockfile[key].keys()) + new_lockfile = set(lockfile[key].keys()) + missing = packages - new_lockfile + for missing_pkg in missing: + lockfile[key][missing_pkg] = cached_lockfile[key][missing_pkg].copy() # Overwrite any develop packages with default packages. lockfile["develop"].update(overwrite_dev(lockfile.get("default", {}), lockfile["develop"])) if write: @@ -1456,7 +1462,7 @@ def pip_install( if "--hash" not in f.read(): ignore_hashes = True else: - ignore_hashes = True if not requirement.hashes else False + ignore_hashes = True if not requirement.hashes else ignore_hashes install_reqs = requirement.as_line(as_list=True, include_hashes=not ignore_hashes) if not requirement.markers: install_reqs = [escape_cmd(r) for r in install_reqs] @@ -2320,8 +2326,9 @@ def do_shell(three=None, python=False, fancy=False, shell_args=None, pypi_mirror project.project_directory, shell_args, ) - # Only set PIPENV_ACTIVE after finishing reading virtualenv_location + # Set an environment variable, so we know we're in the environment. + # Only set PIPENV_ACTIVE after finishing reading virtualenv_location # otherwise its value will be changed os.environ["PIPENV_ACTIVE"] = vistir.misc.fs_str("1") @@ -2553,11 +2560,6 @@ def do_check( # Run the PEP 508 checker in the virtualenv. cmd = _cmd + [vistir.compat.Path(pep508checker_path).as_posix()] c = run_command(cmd) - if is_verbose(): - click.echo("{0}{1}".format( - "Running command: ", - crayons.white("$ {0}".format(decode_for_output(" ".join(cmd))), bold=True) - )) if c.return_code is not None: try: results = simplejson.loads(c.out.strip()) diff --git a/pipenv/environment.py b/pipenv/environment.py index 602859e201..7ada63993b 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -389,33 +389,84 @@ def get_outdated_packages(self, pre=False): if pkg.latest_version._version > pkg.parsed_version._version ] - def get_package_requirements(self): + @classmethod + def _get_requirements_for_package(cls, node, key_tree, parent=None, chain=None): + if chain is None: + chain = [node.project_name] + + d = node.as_dict() + if parent: + d['required_version'] = node.version_spec if node.version_spec else 'Any' + else: + d['required_version'] = d['installed_version'] + + get_children = lambda n: key_tree.get(n.key, []) + + d['dependencies'] = [ + cls._get_requirements_for_package(c, key_tree, parent=node, + chain=chain+[c.project_name]) + for c in get_children(node) + if c.project_name not in chain + ] + + return d + + def get_package_requirements(self, pkg=None): from .vendor.pipdeptree import flatten, sorted_tree, build_dist_index, construct_tree - dist_index = build_dist_index(self.get_installed_packages()) + packages = self.get_installed_packages() + if pkg: + packages = [p for p in packages if p.key == pkg] + dist_index = build_dist_index(packages) tree = sorted_tree(construct_tree(dist_index)) branch_keys = set(r.key for r in flatten(tree.values())) - nodes = [p for p in tree.keys() if p.key not in branch_keys] + if pkg is not None: + nodes = [p for p in tree.keys() if p.key == pkg] + else: + nodes = [p for p in tree.keys() if p.key not in branch_keys] key_tree = dict((k.key, v) for k, v in tree.items()) - get_children = lambda n: key_tree.get(n.key, []) - - def aux(node, parent=None, chain=None): - if chain is None: - chain = [node.project_name] - d = node.as_dict() - if parent: - d['required_version'] = node.version_spec if node.version_spec else 'Any' - else: - d['required_version'] = d['installed_version'] - - d['dependencies'] = [ - aux(c, parent=node, chain=chain+[c.project_name]) - for c in get_children(node) - if c.project_name not in chain - ] + return [self._get_requirements_for_package(p, key_tree) for p in nodes] - return d - return [aux(p) for p in nodes] + @classmethod + def reverse_dependency(cls, node): + new_node = { + "package_name": node["package_name"], + "installed_version": node["installed_version"], + "required_version": node["required_version"] + } + for dependency in node.get("dependencies", []): + for dep in cls.reverse_dependency(dependency): + new_dep = dep.copy() + new_dep["parent"] = (node["package_name"], node["installed_version"]) + yield new_dep + yield new_node + + def reverse_dependencies(self): + from vistir.misc import unnest + rdeps = {} + for req in self.get_package_requirements(): + for d in self.reverse_dependency(req): + parents = None + name = d["package_name"] + pkg = { + name: { + "installed": d["installed_version"], + "required": d["required_version"] + } + } + parents = set(d.get("parent", [])) + pkg[name]["parents"] = parents + if rdeps.get(name): + if not (rdeps[name].get("required") or rdeps[name].get("installed")): + rdeps[name].update(pkg[name]) + rdeps[name]["parents"] = rdeps[name].get("parents", set()) | parents + else: + rdeps[name] = pkg[name] + for k in list(rdeps.keys()): + entry = rdeps[k] + if entry.get("parents"): + rdeps[k]["parents"] = set([p for p in unnest(entry["parents"])]) + return rdeps def get_working_set(self): """Retrieve the working set of installed packages for the environment. diff --git a/pipenv/exceptions.py b/pipenv/exceptions.py index 9007e73e7d..000c57153d 100644 --- a/pipenv/exceptions.py +++ b/pipenv/exceptions.py @@ -282,7 +282,7 @@ def __init__(self, message=None, **kwargs): class UninstallError(PipenvException): def __init__(self, package, command, return_values, return_code, **kwargs): extra = [crayons.blue("Attempted to run command: {0}".format( - crayons.yellow("$ {0}".format(command), bold=True) + crayons.yellow("$ {0!r}".format(command), bold=True) )),] extra.extend([crayons.blue(line.strip()) for line in return_values.splitlines()]) if isinstance(package, (tuple, list, set)): @@ -323,6 +323,15 @@ def __init__(self, path, **kwargs): PipenvException.__init__(self, message=decode_for_output(message)) +class DependencyConflict(PipenvException): + def __init__(self, message): + extra = [decode_for_output("{0} {1}".format( + crayons.red("ERROR:", bold=True), + crayons.white("A dependency conflict was detected and could not be resolved.", bold=True), + )),] + super(DependencyConflict, self).__init__(decode_for_output(message), extra=extra) + + class ResolutionFailure(PipenvException): def __init__(self, message, no_version_found=False): extra = ( @@ -354,3 +363,42 @@ def __init__(self, message, no_version_found=False): ) ) super(ResolutionFailure, self).__init__(decode_for_output(message), extra=extra) + + +class RequirementError(PipenvException): + + def __init__(self, req=None): + from .utils import VCS_LIST + keys = ("name", "path",) + VCS_LIST + ("line", "uri", "url", "relpath") + if req is not None: + possible_display_values = [getattr(req, value, None) for value in keys] + req_value = next(iter( + val for val in possible_display_values if val is not None + ), None) + if not req_value: + getstate_fn = getattr(req, "__getstate__", None) + slots = getattr(req, "__slots__", None) + keys_fn = getattr(req, "keys", None) + if getstate_fn: + req_value = getstate_fn() + elif slots: + slot_vals = [ + (k, getattr(req, k, None)) for k in slots + if getattr(req, k, None) + ] + req_value = "\n".join([ + " {0}: {1}".format(k, v) for k, v in slot_vals + ]) + elif keys_fn: + values = [(k, req.get(k)) for k in keys_fn() if req.get(k)] + req_value = "\n".join([ + " {0}: {1}".format(k, v) for k, v in values + ]) + else: + req_value = getattr(req.line_instance, "line", None) + message = "{0} {1}".format( + crayons.normal(decode_for_output("Failed creating requirement instance")), + crayons.white(decode_for_output("{0!r}".format(req_value))) + ) + extra = [crayons.normal(decode_for_output(str(req)))] + super(RequirementError, self).__init__(message, extra=extra) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index c7314b22ad..640317066e 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -58,6 +58,7 @@ def get_parser(): parser.add_argument("--pre", action="store_true", default=False) parser.add_argument("--clear", action="store_true", default=False) parser.add_argument("--verbose", "-v", action="count", default=False) + parser.add_argument("--dev", action="store_true", default=False) parser.add_argument("--debug", action="store_true", default=False) parser.add_argument("--system", action="store_true", default=False) parser.add_argument("--parse-only", action="store_true", default=False) @@ -88,6 +89,459 @@ def handle_parsed_args(parsed): return parsed +class Entry(object): + """A resolved entry from a resolver run""" + + def __init__(self, name, entry_dict, project, resolver, reverse_deps=None, dev=False): + super(Entry, self).__init__() + self.name = name + if isinstance(entry_dict, dict): + self.entry_dict = self.clean_initial_dict(entry_dict) + else: + self.entry_dict = entry_dict + self.project = project + section = "develop" if dev else "default" + pipfile_section = "dev-packages" if dev else "packages" + self.dev = dev + self.pipfile = project.parsed_pipfile.get(pipfile_section, {}) + self.lockfile = project.lockfile_content.get(section, {}) + self.pipfile_dict = self.pipfile.get(self.pipfile_name, {}) + self.lockfile_dict = self.lockfile.get(name, entry_dict) + self.resolver = resolver + self.reverse_deps = reverse_deps + self._entry = None + self._lockfile_entry = None + self._pipfile_entry = None + self._parent_deps = [] + self._flattened_parents = [] + self._requires = None + self._deptree = None + self._parents_in_pipfile = [] + + @staticmethod + def make_requirement(name=None, entry=None, from_ireq=False): + from pipenv.vendor.requirementslib.models.requirements import Requirement + if from_ireq: + return Requirement.from_ireq(entry) + return Requirement.from_pipfile(name, entry) + + @classmethod + def clean_initial_dict(cls, entry_dict): + if not entry_dict.get("version", "").startswith("=="): + entry_dict["version"] = cls.clean_specifier(entry_dict.get("version", "")) + if "name" in entry_dict: + del entry_dict["name"] + return entry_dict + + def get_cleaned_dict(self): + if self.is_updated: + self.validate_constraints() + self.ensure_least_updates_possible() + if self.entry.extras != self.lockfile_entry.extras: + self._entry.req.extras.extend(self.lockfile_entry.req.extras) + self.entry_dict["extras"] = self.entry.extras + entry_hashes = set(self.entry.hashes) + locked_hashes = set(self.lockfile_entry.hashes) + if entry_hashes != locked_hashes and not self.is_updated: + self.entry_dict["hashes"] = list(entry_hashes | locked_hashes) + self.entry_dict["name"] = self.name + self.entry_dict["version"] = self.strip_version(self.entry_dict["version"]) + return self.entry_dict + + @property + def lockfile_entry(self): + if self._lockfile_entry is None: + self._lockfile_entry = self.make_requirement(self.name, self.lockfile_dict) + return self._lockfile_entry + + @property + def pipfile_entry(self): + if self._pipfile_entry is None: + self._pipfile_entry = self.make_requirement(self.pipfile_name, self.pipfile_dict) + return self._pipfile_entry + + @property + def entry(self): + if self._entry is None: + self._entry = self.make_requirement(self.name, self.entry_dict) + return self._entry + + @property + def normalized_name(self): + return self.entry.normalized_name + + @property + def pipfile_name(self): + return self.project.get_package_name_in_pipfile(self.name, dev=self.dev) + + @property + def is_in_pipfile(self): + return True if self.pipfile_name else False + + @property + def pipfile_packages(self): + return self.project.pipfile_package_names["dev" if self.dev else "default"] + + def create_parent(self, name, specifier="*"): + parent = self.create(name, specifier, self.project, self.resolver, + self.reverse_deps, self.dev) + parent._deptree = self.deptree + return parent + + @property + def deptree(self): + if not self._deptree: + self._deptree = self.project.environment.get_package_requirements() + return self._deptree + + @classmethod + def create(cls, name, entry_dict, project, resolver, reverse_deps=None, dev=False): + return cls(name, entry_dict, project, resolver, reverse_deps, dev) + + @staticmethod + def clean_specifier(specifier): + from pipenv.vendor.packaging.specifiers import Specifier + if not any(specifier.startswith(k) for k in Specifier._operators.keys()): + if specifier.strip().lower() in ["any", "*"]: + return "*" + specifier = "=={0}".format(specifier) + elif specifier.startswith("==") and specifier.count("=") > 2: + specifier = "=={0}".format(specifier.lstrip("=")) + return specifier + + @staticmethod + def strip_version(specifier): + from pipenv.vendor.packaging.specifiers import Specifier + op = next(iter( + k for k in Specifier._operators.keys() if specifier.startswith(k) + ), None) + if op: + specifier = specifier[len(op):] + while op: + op = next(iter( + k for k in Specifier._operators.keys() if specifier.startswith(k) + ), None) + if op: + specifier = specifier[len(op):] + return specifier + + @property + def parent_deps(self): + if not self._parent_deps: + self._parent_deps = self.get_parent_deps(unnest=False) + return self._parent_deps + + @property + def flattened_parents(self): + if not self._flattened_parents: + self._flattened_parents = self.get_parent_deps(unnest=True) + return self._flattened_parents + + @property + def parents_in_pipfile(self): + if not self._parents_in_pipfile: + self._parents_in_pipfile = [ + p for p in self.flattened_parents + if p.normalized_name in self.pipfile_packages + ] + return self._parents_in_pipfile + + @property + def is_updated(self): + return self.entry.specifiers != self.lockfile_entry.specifiers + + @property + def requirements(self): + if not self._requires: + self._requires = next(iter( + self.project.environment.get_package_requirements(self.name) + ), None) + return self._requires + + @property + def updated_version(self): + version = self.entry.specifiers + return self.strip_version(version) + + @property + def updated_specifier(self): + return self.entry.specifiers + + @property + def original_specifier(self): + # type: () -> str + return self.lockfile_entry.specifiers + + @property + def original_version(self): + if self.original_specifier: + return self.strip_version(self.original_specifier) + return None + + def validate_specifiers(self): + if self.is_in_pipfile: + return self.pipfile_entry.requirement.specifier.contains(self.updated_version) + return True + + def get_dependency(self, name): + return next(iter( + dep for dep in self.requirements.get("dependencies", []) + if dep.get("package_name", "") == name + ), {}) + + def get_parent_deps(self, unnest=False): + from pipenv.vendor.packaging.specifiers import Specifier + parents = [] + for spec in self.reverse_deps.get(self.normalized_name, {}).get("parents", set()): + spec_index = next(iter(c for c in Specifier._operators if c in spec), None) + name = spec + parent = None + if spec_index is not None: + specifier = self.clean_specifier(spec[spec_index:]) + name = spec[:spec_index] + parent = self.create_parent(name, specifier) + else: + name = spec + parent = self.create_parent(name) + if parent is not None: + parents.append(parent) + if not unnest or parent.pipfile_name is not None: + continue + if self.reverse_deps.get(parent.normalized_name, {}).get("parents", set()): + parents.extend(parent.flattened_parents) + return parents + + def ensure_least_updates_possible(self): + """ + Mutate the current entry to ensure that we are making the smallest amount of + changes possible to the existing lockfile -- this will keep the old locked + versions of packages if they satisfy new constraints. + + :return: None + """ + constraints = self.get_constraints() + can_use_original = True + can_use_updated = True + satisfied_by_versions = set() + for constraint in constraints: + if not constraint.specifier.contains(self.original_version): + self.can_use_original = False + if not constraint.specifier.contains(self.updated_version): + self.can_use_updated = False + satisfied_by_value = getattr(constraint, "satisfied_by", None) + if satisfied_by_value: + satisfied_by = "{0}".format( + self.clean_specifier(str(satisfied_by_value.version)) + ) + satisfied_by_versions.add(satisfied_by) + if can_use_original: + self.entry_dict = self.lockfile_dict.copy() + elif can_use_updated: + if len(satisfied_by_versions) == 1: + self.entry_dict["version"] = next(iter( + sat_by for sat_by in satisfied_by_versions if sat_by + ), None) + hashes = None + if self.lockfile_entry.specifiers == satisfied_by: + ireq = self.lockfile_entry.as_ireq() + if not self.lockfile_entry.hashes and self.resolver._should_include_hash(ireq): + hashes = self.resolver.get_hash(ireq) + else: + hashes = self.lockfile_entry.hashes + else: + if self.resolver._should_include_hash(constraint): + hashes = self.resolver.get_hash(constraint) + if hashes: + self.entry_dict["hashes"] = list(hashes) + self._entry.hashes = frozenset(hashes) + else: + # check for any parents, since they depend on this and the current + # installed versions are not compatible with the new version, so + # we will need to update the top level dependency if possible + self.check_flattened_parents() + + def get_constraints(self): + """ + Retrieve all of the relevant constraints, aggregated from the pipfile, resolver, + and parent dependencies and their respective conflict resolution where possible. + + :return: A set of **InstallRequirement** instances representing constraints + :rtype: Set + """ + constraints = { + c for c in self.resolver.parsed_constraints + if c and c.name == self.entry.name + } + pipfile_constraint = self.get_pipfile_constraint() + if pipfile_constraint: + constraints.add(pipfile_constraint) + return constraints + + def get_pipfile_constraint(self): + """ + Retrieve the version constraint from the pipfile if it is specified there, + otherwise check the constraints of the parent dependencies and their conflicts. + + :return: An **InstallRequirement** instance representing a version constraint + """ + if self.is_in_pipfile: + return self.pipfile_entry.as_ireq() + return self.constraint_from_parent_conflicts() + + def constraint_from_parent_conflicts(self): + """ + Given a resolved entry with multiple parent dependencies with different + constraints, searches for the resolution that satisfies all of the parent + constraints. + + :return: A new **InstallRequirement** satisfying all parent constraints + :raises: :exc:`~pipenv.exceptions.DependencyConflict` if resolution is impossible + """ + # ensure that we satisfy the parent dependencies of this dep + from pipenv.vendor.packaging.specifiers import Specifier + parent_dependencies = set() + has_mismatch = False + can_use_original = True + for p in self.parent_deps: + # updated dependencies should be satisfied since they were resolved already + if p.is_updated: + continue + # parents with no requirements can't conflict + if not p.requirements: + continue + needed = p.requirements.get("dependencies", []) + entry_ref = p.get_dependency(self.name) + required = entry_ref.get("required_version", "*") + required = self.clean_specifier(required) + parent_requires = self.make_requirement(self.name, required) + parent_dependencies.add("{0} => {1} ({2})".format(p.name, self.name, required)) + if not parent_requires.requirement.specifier.contains(self.original_version): + can_use_original = False + if not parent_requires.requirement.specifier.contains(self.updated_version): + has_mismatch = True + if has_mismatch and not can_use_original: + from pipenv.exceptions import DependencyConflict + msg = ( + "Cannot resolve {0} ({1}) due to conflicting parent dependencies: " + "\n\t{2}".format( + self.name, self.updated_version, "\n\t".join(parent_dependencies) + ) + ) + raise DependencyConflict(msg) + elif can_use_original: + return self.lockfile_entry.as_ireq() + return self.entry.as_ireq() + + def validate_constraints(self): + """ + Retrieves the full set of available constraints and iterate over them, validating + that they exist and that they are not causing unresolvable conflicts. + + :return: True if the constraints are satisfied by the resolution provided + :raises: :exc:`pipenv.exceptions.DependencyConflict` if the constraints dont exist + """ + constraints = self.get_constraints() + for constraint in constraints: + try: + constraint.check_if_exists(False) + except Exception: + from pipenv.exceptions import DependencyConflict + msg = ( + "Cannot resolve conflicting version {0}{1} while {1}{2} is " + "locked.".format( + self.name, self.updated_specifier, self.old_name, self.old_specifiers + ) + ) + raise DependencyConflict(msg) + return True + + def check_flattened_parents(self): + for parent in self.parents_in_pipfile: + if not parent.updated_specifier: + continue + if not parent.validate_specifiers(): + from pipenv.exceptions import DependencyConflict + msg = ( + "Cannot resolve conflicting versions: (Root: {0}) {1}{2} (Pipfile) " + "Incompatible with {3}{4} (resolved)\n".format( + self.name, parent.pipfile_name, + parent.pipfile_entry.requirement.specifiers, parent.name, + parent.updated_specifiers + ) + ) + raise DependencyConflict(msg) + + def __getattribute__(self, key): + result = None + old_version = ["was_", "had_", "old_"] + new_version = ["is_", "has_", "new_"] + if any(key.startswith(v) for v in new_version): + entry = Entry.__getattribute__(self, "entry") + try: + keystart = key.index("_") + 1 + try: + result = getattr(entry, key[keystart:]) + except AttributeError: + result = getattr(entry, key) + except AttributeError: + result = super(Entry, self).__getattribute__(key) + return result + if any(key.startswith(v) for v in old_version): + lockfile_entry = Entry.__getattribute__(self, "lockfile_entry") + try: + keystart = key.index("_") + 1 + try: + result = getattr(lockfile_entry, key[keystart:]) + except AttributeError: + result = getattr(lockfile_entry, key) + except AttributeError: + result = super(Entry, self).__getattribute__(key) + return result + return super(Entry, self).__getattribute__(key) + + +def clean_outdated(results, resolver, project, dev=False): + from pipenv.vendor.requirementslib.models.requirements import Requirement + if not project.lockfile_exists: + return results + lockfile = project.lockfile_content + section = "develop" if dev else "default" + pipfile_section = "dev-packages" if dev else "packages" + pipfile = project.parsed_pipfile[pipfile_section] + reverse_deps = project.environment.reverse_dependencies() + deptree = project.environment.get_package_requirements() + overlapping_results = [r for r in results if r["name"] in lockfile[section]] + new_results = [r for r in results if r["name"] not in lockfile[section]] + for result in results: + name = result.get("name") + entry_dict = result.copy() + entry = Entry(name, entry_dict, project, resolver, reverse_deps=reverse_deps, dev=dev) + # The old entry was editable but this one isnt; prefer the old one + # TODO: Should this be the case for all locking? + if entry.was_editable and not entry.is_editable: + continue + # if the entry has not changed versions since the previous lock, + # don't introduce new markers since that is more restrictive + if entry.has_markers and not entry.had_markers and not entry.is_updated: + del entry.entry_dict["markers"] + entry._entry.req.req.marker = None + entry._entry.markers = "" + # do make sure we retain the original markers for entries that are not changed + elif entry.had_markers and not entry.has_markers and not entry.is_updated: + if entry._entry and entry._entry.req and entry._entry.req.req and ( + entry.lockfile_entry and entry.lockfile_entry.req and + entry.lockfile_entry.req.req and entry.lockfile_entry.req.req.marker + ): + entry._entry.req.req.marker = entry.lockfile_entry.req.req.marker + if entry.lockfile_entry and entry.lockfile_entry.markers: + entry._entry.markers = entry.lockfile_entry.markers + if entry.lockfile_dict and "markers" in entry.lockfile_dict: + entry.entry_dict["markers"] = entry.lockfile_dict["markers"] + entry_dict = entry.get_cleaned_dict() + new_results.append(entry_dict) + return new_results + + def parse_packages(packages, pre, clear, system, requirements_dir=None): from pipenv.vendor.requirementslib.models.requirements import Requirement from pipenv.vendor.vistir.contextmanagers import cd, temp_path @@ -145,8 +599,18 @@ def resolve(packages, pre, project, sources, clear, system, requirements_dir=Non if pypi_mirror_source else project.pipfile_sources ) - results = resolve(packages, pre=pre, project=project, sources=sources, clear=clear, - system=system, requirements_dir=requirements_dir) + keep_outdated = os.environ.get("PIPENV_KEEP_OUTDATED", False) + results, resolver = resolve( + packages, + pre=pre, + project=project, + sources=sources, + clear=clear, + system=system, + requirements_dir=requirements_dir, + ) + if keep_outdated: + results = clean_outdated(results, resolver, project) if write: with open(write, "w") as fh: if not results: diff --git a/pipenv/utils.py b/pipenv/utils.py index 252ed30f92..22c8653b3f 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -30,7 +30,7 @@ import parse from . import environments -from .exceptions import PipenvUsageError, PipenvCmdError +from .exceptions import PipenvUsageError, ResolutionFailure, RequirementError, PipenvCmdError from .pep508checker import lookup from .vendor.urllib3 import util as urllib3_util @@ -388,13 +388,21 @@ def parse_line( if indexes: url = indexes[0] line = " ".join(remainder) + req = None # type: Requirement try: req = Requirement.from_line(line) except ValueError: raise ResolutionFailure("Failed to resolve requirement from line: {0!s}".format(line)) if url: - index_lookup[req.normalized_name] = project.get_source( - url=url, refresh=True).get("name") + try: + index_lookup[req.normalized_name] = project.get_source( + url=url, refresh=True).get("name") + except TypeError: + pass + try: + req.normalized_name + except TypeError: + raise RequirementError(req=req) # strip the marker and re-add it later after resolution # but we will need a fallback in case resolution fails # eg pypiwin32 @@ -424,7 +432,10 @@ def get_deps_from_req(cls, req): _, entry = req.pipfile_entry parsed_line = req.req.parsed_line # type: Line setup_info = None # type: Any - name = req.normalized_name + try: + name = req.normalized_name + except TypeError: + raise RequirementError(req=req) setup_info = req.req.setup_info locked_deps[pep423_name(name)] = entry requirements = [v for v in getattr(setup_info, "requires", {}).values()] @@ -684,7 +695,9 @@ def get_hash(self, ireq, ireq_hashes=None): # The entire purpose of this approach is to include missing hashes. # This fixes a race condition in resolution for missing dependency caches # see pypa/pipenv#3289 - if self._should_include_hash(ireq) and ( + if not self._should_include_hash(ireq): + return set() + elif self._should_include_hash(ireq) and ( not ireq_hashes or ireq.link.scheme == "file" ): if not ireq_hashes: @@ -924,7 +937,8 @@ def venv_resolve_deps( pypi_mirror=None, dev=False, pipfile=None, - lockfile=None + lockfile=None, + keep_outdated=False ): """ Resolve dependencies for a pipenv project, acts as a portal to the target environment. @@ -945,6 +959,7 @@ def venv_resolve_deps( :param pipfile: A Pipfile section to operate on, defaults to None :type pipfile: Optional[Dict[str, Union[str, Dict[str, bool, List[str]]]]] :param Dict[str, Any] lockfile: A project lockfile to mutate, defaults to None + :param bool keep_outdated: Whether to retain outdated dependencies and resolve with them in mind, defaults to False :raises RuntimeError: Raised on resolution failure :return: Nothing :rtype: None @@ -995,6 +1010,8 @@ def venv_resolve_deps( os.environ["PIPENV_REQ_DIR"] = fs_str(req_dir) os.environ["PIP_NO_INPUT"] = fs_str("1") os.environ["PIPENV_SITE_DIR"] = get_pipenv_sitedir() + if keep_outdated: + os.environ["PIPENV_KEEP_OUTDATED"] = fs_str("1") with create_spinner(text=decode_for_output("Locking...")) as sp: # This conversion is somewhat slow on local and file-type requirements since # we now download those requirements / make temporary folders to perform @@ -1050,8 +1067,9 @@ def resolve_deps( os.environ["PIP_SRC"] = project.virtualenv_src_location backup_python_path = sys.executable results = [] + resolver = None if not deps: - return results + return results, resolver # First (proper) attempt: req_dir = req_dir if req_dir else os.environ.get("req_dir", None) if not req_dir: @@ -1059,7 +1077,7 @@ def resolve_deps( req_dir = create_tracked_tempdir(prefix="pipenv-", suffix="-requirements") with HackedPythonVersion(python_version=python, python_path=python_path): try: - resolved_tree, hashes, markers_lookup, resolver, skipped = actually_resolve_deps( + results, hashes, markers_lookup, resolver, skipped = actually_resolve_deps( deps, index_lookup, markers_lookup, @@ -1071,9 +1089,9 @@ def resolve_deps( ) except RuntimeError: # Don't exit here, like usual. - resolved_tree = None + results = None # Second (last-resort) attempt: - if resolved_tree is None: + if results is None: with HackedPythonVersion( python_version=".".join([str(s) for s in sys.version_info[:3]]), python_path=backup_python_path, @@ -1081,7 +1099,7 @@ def resolve_deps( try: # Attempt to resolve again, with different Python version information, # particularly for particularly particular packages. - resolved_tree, hashes, markers_lookup, resolver, skipped = actually_resolve_deps( + results, hashes, markers_lookup, resolver, skipped = actually_resolve_deps( deps, index_lookup, markers_lookup, @@ -1093,7 +1111,7 @@ def resolve_deps( ) except RuntimeError: sys.exit(1) - return resolved_tree + return results, resolver def is_star(val): @@ -1323,7 +1341,7 @@ def get_canonical_names(packages): if not isinstance(packages, Sequence): if not isinstance(packages, six.string_types): return packages - packages = [packages,] + packages = [packages] return set([canonicalize_name(pkg) for pkg in packages if pkg]) @@ -1752,11 +1770,11 @@ def parse_indexes(line): ) parser.add_argument( "--extra-index-url", "--extra-index", - metavar="extra_indexes",action="append", + metavar="extra_indexes", action="append", ) parser.add_argument("--trusted-host", metavar="trusted_hosts", action="append") args, remainder = parser.parse_known_args(line.split()) - index = [] if not args.index else [args.index,] + index = [] if not args.index else [args.index] extra_indexes = [] if not args.extra_index_url else args.extra_index_url indexes = index + extra_indexes trusted_hosts = args.trusted_host if args.trusted_host else [] diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py index 93b9d4f874..6a0f15f0e7 100644 --- a/tests/integration/test_lock.py +++ b/tests/integration/test_lock.py @@ -56,6 +56,7 @@ def test_lock_requirements_file(PipenvInstance, pypi): @pytest.mark.lock +@pytest.mark.keep_outdated def test_lock_keep_outdated(PipenvInstance, pypi): with PipenvInstance(pypi=pypi) as p: @@ -92,6 +93,55 @@ def test_lock_keep_outdated(PipenvInstance, pypi): assert lock['default']['pytest']['version'] == "==3.1.0" +@pytest.mark.lock +@pytest.mark.keep_outdated +def test_keep_outdated_doesnt_remove_lockfile_entries(PipenvInstance, pypi): + with PipenvInstance(chdir=True, pypi=pypi) as p: + p._pipfile.add("requests", "==2.18.4") + p._pipfile.add("colorama", {"version": "*", "markers": "os_name='FakeOS'"}) + p.pipenv("install") + p._pipfile.add("six", "*") + p.pipenv("lock --keep-outdated") + assert "colorama" in p.lockfile["default"] + assert p.lockfile["default"]["colorama"]["markers"] == "os_name='FakeOS'" + + +@pytest.mark.lock +@pytest.mark.keep_outdated +def test_keep_outdated_doesnt_upgrade_pipfile_pins(PipenvInstance, pypi): + with PipenvInstance(chdir=True, pypi=pypi) as p: + p._pipfile.add("urllib3", "==1.21.1") + c = p.pipenv("install") + assert c.ok + p._pipfile.add("requests", "==2.18.4") + c = p.pipenv("lock --keep-outdated") + assert c.ok + assert "requests" in p.lockfile["default"] + assert "urllib3" in p.lockfile["default"] + assert p.lockfile["default"]["requests"]["version"] == "==2.18.4" + assert p.lockfile["default"]["urllib3"]["version"] == "==1.21.1" + + +@pytest.mark.lock +@pytest.mark.keep_outdated +def test_keep_outdated_doesnt_update_satisfied_constraints(PipenvInstance, pypi): + with PipenvInstance(chdir=True, pypi=pypi) as p: + p._pipfile.add("requests", "==2.18.4") + c = p.pipenv("install") + assert c.ok + p._pipfile.add("requests", "*") + assert p.pipfile["packages"]["requests"] == "*" + c = p.pipenv("lock --keep-outdated") + assert c.ok + assert "requests" in p.lockfile["default"] + assert "urllib3" in p.lockfile["default"] + # ensure this didn't update requests + assert p.lockfile["default"]["requests"]["version"] == "==2.18.4" + c = p.pipenv("lock") + assert c.ok + assert p.lockfile["default"]["requests"]["version"] != "==2.18.4" + + @pytest.mark.lock @pytest.mark.complex @pytest.mark.needs_internet @@ -148,8 +198,8 @@ def test_lock_with_prereleases(PipenvInstance, pypi): @pytest.mark.lock -@pytest.mark.complex @pytest.mark.maya +@pytest.mark.complex @pytest.mark.needs_internet @flaky def test_complex_deps_lock_and_install_properly(PipenvInstance, pip_src_dir, pypi): @@ -169,8 +219,8 @@ def test_complex_deps_lock_and_install_properly(PipenvInstance, pip_src_dir, pyp assert c.return_code == 0 -@pytest.mark.extras @pytest.mark.lock +@pytest.mark.extras def test_lock_extras_without_install(PipenvInstance, pypi): with PipenvInstance(pypi=pypi) as p: with open(p.pipfile_path, 'w') as f: @@ -191,11 +241,11 @@ def test_lock_extras_without_install(PipenvInstance, pypi): assert "extra == 'socks'" not in c.out.strip() -@pytest.mark.extras @pytest.mark.lock +@pytest.mark.extras @pytest.mark.complex -@pytest.mark.skip(reason='Needs numpy to be mocked') @pytest.mark.needs_internet +@pytest.mark.skip(reason='Needs numpy to be mocked') def test_complex_lock_deep_extras(PipenvInstance, pypi): # records[pandas] requires tablib[pandas] which requires pandas. # This uses the real PyPI; Pandas has too many requirements to mock. @@ -216,10 +266,10 @@ def test_complex_lock_deep_extras(PipenvInstance, pypi): assert 'pandas' in p.lockfile['default'] -@pytest.mark.skip_lock @pytest.mark.index -@pytest.mark.needs_internet @pytest.mark.install # private indexes need to be uncached for resolution +@pytest.mark.skip_lock +@pytest.mark.needs_internet def test_private_index_skip_lock(PipenvInstance): with PipenvInstance() as p: with open(p.pipfile_path, 'w') as f: @@ -243,10 +293,10 @@ def test_private_index_skip_lock(PipenvInstance): assert c.return_code == 0 -@pytest.mark.requirements @pytest.mark.lock @pytest.mark.index @pytest.mark.install # private indexes need to be uncached for resolution +@pytest.mark.requirements @pytest.mark.needs_internet def test_private_index_lock_requirements(PipenvInstance): # Don't use the local fake pypi @@ -276,10 +326,10 @@ def test_private_index_lock_requirements(PipenvInstance): assert '--extra-index-url https://test.pypi.org/simple' in c.out.strip() -@pytest.mark.requirements @pytest.mark.lock @pytest.mark.index @pytest.mark.install # private indexes need to be uncached for resolution +@pytest.mark.requirements @pytest.mark.needs_internet def test_private_index_mirror_lock_requirements(PipenvInstance): # Don't use the local fake pypi @@ -316,8 +366,8 @@ def test_private_index_mirror_lock_requirements(PipenvInstance): assert '--extra-index-url {}'.format(mirror_url) not in c.out.strip() -@pytest.mark.install @pytest.mark.index +@pytest.mark.install def test_lock_updated_source(PipenvInstance, pypi): with PipenvInstance(pypi=pypi) as p: @@ -352,8 +402,8 @@ def test_lock_updated_source(PipenvInstance, pypi): assert 'requests' in p.lockfile['default'] -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock @pytest.mark.needs_internet def test_lock_editable_vcs_without_install(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: @@ -371,8 +421,8 @@ def test_lock_editable_vcs_without_install(PipenvInstance, pypi): assert c.return_code == 0 -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock @pytest.mark.needs_internet def test_lock_editable_vcs_with_ref_in_git(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: @@ -389,8 +439,8 @@ def test_lock_editable_vcs_with_ref_in_git(PipenvInstance, pypi): assert c.return_code == 0 -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock @pytest.mark.needs_internet def test_lock_editable_vcs_with_ref(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: @@ -407,9 +457,9 @@ def test_lock_editable_vcs_with_ref(PipenvInstance, pypi): assert c.return_code == 0 -@pytest.mark.extras -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock +@pytest.mark.extras @pytest.mark.needs_internet def test_lock_editable_vcs_with_extras_without_install(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: @@ -428,8 +478,8 @@ def test_lock_editable_vcs_with_extras_without_install(PipenvInstance, pypi): assert c.return_code == 0 -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock @pytest.mark.needs_internet def test_lock_editable_vcs_with_markers_without_install(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: @@ -491,8 +541,8 @@ def test_lockfile_with_empty_dict(PipenvInstance): @pytest.mark.lock -@pytest.mark.skip_lock @pytest.mark.install +@pytest.mark.skip_lock def test_lock_with_incomplete_source(PipenvInstance, pypi): with PipenvInstance(pypi=pypi, chdir=True) as p: with open(p.pipfile_path, 'w') as f: @@ -546,8 +596,8 @@ def test_lock_missing_cache_entries_gets_all_hashes(monkeypatch, PipenvInstance, assert len(p.lockfile["default"]["scandir"]["hashes"]) > 1 -@pytest.mark.lock @pytest.mark.vcs +@pytest.mark.lock def test_vcs_lock_respects_top_level_pins(PipenvInstance, pypi): """Test that locking VCS dependencies respects top level packages pinned in Pipfiles"""