diff --git a/.vsts-ci/linux.yml b/.vsts-ci/linux.yml index 7968801c5e..95c62dc51f 100644 --- a/.vsts-ci/linux.yml +++ b/.vsts-ci/linux.yml @@ -1,3 +1,22 @@ +name: Pipenv Build Rules +trigger: + batch: true + branches: + include: + - master + paths: + exclude: + - docs/* + - news/* + - README.md + - pipenv/*.txt + - CHANGELOG.rst + - CONTRIBUTING.md + - CODE_OF_CONDUCT.md + - .gitignore + - .gitattributes + - .editorconfig + phases: - template: phases/test.yml parameters: diff --git a/.vsts-ci/steps/run-tests.yml b/.vsts-ci/steps/run-tests.yml index 4c2640ab37..a7b99a1317 100644 --- a/.vsts-ci/steps/run-tests.yml +++ b/.vsts-ci/steps/run-tests.yml @@ -12,7 +12,7 @@ steps: $env:TEMP='T:\' Write-Host "##vso[task.setvariable variable=TMP]T:\" $env:TEMP='T:\' - D:\.venv\Scripts\pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor -k 'test_get_vcs_refs or test_install_editable_git_tag' --junitxml=test-results.xml tests + D:\.venv\Scripts\pipenv run pytest -ra --ignore=pipenv\patched --ignore=pipenv\vendor --junitxml=test-results.xml tests displayName: Run integration tests - task: PublishTestResults@2 diff --git a/.vsts-ci/windows.yml b/.vsts-ci/windows.yml index a397a23c67..e423c3abba 100644 --- a/.vsts-ci/windows.yml +++ b/.vsts-ci/windows.yml @@ -1,3 +1,22 @@ +name: Pipenv Build Rules +trigger: + batch: true + branches: + include: + - master + paths: + exclude: + - docs/* + - news/* + - README.md + - pipenv/*.txt + - CHANGELOG.rst + - CONTRIBUTING.md + - CODE_OF_CONDUCT.md + - .gitignore + - .gitattributes + - .editorconfig + phases: - template: phases/test.yml parameters: diff --git a/news/3090.bugfix.rst b/news/3090.bugfix.rst new file mode 100644 index 0000000000..af772c3d63 --- /dev/null +++ b/news/3090.bugfix.rst @@ -0,0 +1 @@ +Fixed a bug in ``requirementslib`` which prevented successful installation from mercurial repositories. diff --git a/news/3094.bugfix.rst b/news/3094.bugfix.rst new file mode 100644 index 0000000000..e17b2e3273 --- /dev/null +++ b/news/3094.bugfix.rst @@ -0,0 +1 @@ +Fixed random resource warnings when using pyenv or any other subprocess calls. diff --git a/news/3102.bugfix.rst b/news/3102.bugfix.rst new file mode 100644 index 0000000000..2224ac0bd9 --- /dev/null +++ b/news/3102.bugfix.rst @@ -0,0 +1 @@ +Fixed an issue in ``delegator.py`` related to subprocess calls when using ``PopenSpawn`` to stream output, which sometimes threw unexpected ``EOF`` errors. diff --git a/news/3109.bugfix.rst b/news/3109.bugfix.rst new file mode 100644 index 0000000000..af5718b6ae --- /dev/null +++ b/news/3109.bugfix.rst @@ -0,0 +1 @@ +Fixed issues with broken subprocess calls leaking resource handles and causing random and sporadic failures. diff --git a/news/3113.bugfix.rst b/news/3113.bugfix.rst new file mode 100644 index 0000000000..af43b87df8 --- /dev/null +++ b/news/3113.bugfix.rst @@ -0,0 +1 @@ +Fixed an issue resolving virtualenv paths for users without ``platlib`` values on their systems. diff --git a/news/3114.bugfix.rst b/news/3114.bugfix.rst new file mode 100644 index 0000000000..2224ac0bd9 --- /dev/null +++ b/news/3114.bugfix.rst @@ -0,0 +1 @@ +Fixed an issue in ``delegator.py`` related to subprocess calls when using ``PopenSpawn`` to stream output, which sometimes threw unexpected ``EOF`` errors. diff --git a/news/3117.bugfix.rst b/news/3117.bugfix.rst new file mode 100644 index 0000000000..2224ac0bd9 --- /dev/null +++ b/news/3117.bugfix.rst @@ -0,0 +1 @@ +Fixed an issue in ``delegator.py`` related to subprocess calls when using ``PopenSpawn`` to stream output, which sometimes threw unexpected ``EOF`` errors. diff --git a/news/3121.bugfix.rst b/news/3121.bugfix.rst new file mode 100644 index 0000000000..fb815c428c --- /dev/null +++ b/news/3121.bugfix.rst @@ -0,0 +1 @@ +Updated ``pythonfinder`` to correct an issue with unnesting of nested paths when searching for python versions. diff --git a/news/3121.vendor.rst b/news/3121.vendor.rst new file mode 100644 index 0000000000..fb815c428c --- /dev/null +++ b/news/3121.vendor.rst @@ -0,0 +1 @@ +Updated ``pythonfinder`` to correct an issue with unnesting of nested paths when searching for python versions. diff --git a/pipenv/core.py b/pipenv/core.py index ee1658bf6e..59b9a29c4c 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1355,7 +1355,8 @@ def pip_install( write_to_tmpfile = False if requirement: needs_hashes = not requirement.editable and not ignore_hashes and r is None - write_to_tmpfile = needs_hashes + has_subdir = requirement.is_vcs and requirement.req.subdirectory + write_to_tmpfile = needs_hashes or has_subdir if not trusted_hosts: trusted_hosts = [] diff --git a/pipenv/patched/notpip/_internal/utils/temp_dir.py b/pipenv/patched/notpip/_internal/utils/temp_dir.py index ba472b0d79..893dc975e6 100644 --- a/pipenv/patched/notpip/_internal/utils/temp_dir.py +++ b/pipenv/patched/notpip/_internal/utils/temp_dir.py @@ -3,8 +3,10 @@ import logging import os.path import tempfile +import warnings from pipenv.patched.notpip._internal.utils.misc import rmtree +from pipenv.vendor.vistir.compat import finalize, ResourceWarning logger = logging.getLogger(__name__) @@ -45,6 +47,20 @@ def __init__(self, path=None, delete=None, kind="temp"): self.path = path self.delete = delete self.kind = kind + self._finalizer = None + if path: + self._register_finalizer() + + def _register_finalizer(self): + if self.delete and self.path: + self._finalizer = finalize( + self, + self._cleanup, + self.path, + warn_message=None + ) + else: + self._finalizer = None def __repr__(self): return "<{} {!r}>".format(self.__class__.__name__, self.path) @@ -72,11 +88,27 @@ def create(self): self.path = os.path.realpath( tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) ) + self._register_finalizer() logger.debug("Created temporary directory: {}".format(self.path)) + @classmethod + def _cleanup(cls, name, warn_message=None): + try: + rmtree(name) + except OSError: + pass + else: + if warn_message: + warnings.warn(warn_message, ResourceWarning) + def cleanup(self): """Remove the temporary directory created and reset state """ - if self.path is not None and os.path.exists(self.path): - rmtree(self.path) - self.path = None + if getattr(self._finalizer, "detach", None) and self._finalizer.detach(): + if os.path.exists(self.path): + try: + rmtree(self.path) + except OSError: + pass + else: + self.path = None diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py index c18716706b..2a0743a338 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -19,11 +19,10 @@ InstallRequirement, SafeFileCache ) -os.environ["PIP_SHIMS_BASE_MODULE"] = str("notpip") +os.environ["PIP_SHIMS_BASE_MODULE"] = str("pipenv.patched.notpip") from pip_shims.shims import do_import, VcsSupport, WheelCache from packaging.requirements import Requirement from packaging.specifiers import SpecifierSet, Specifier -from packaging.markers import Op, Value, Variable, Marker InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) from pipenv.patched.notpip._internal.resolve import Resolver as PipResolver @@ -31,7 +30,7 @@ from pipenv.environments import PIPENV_CACHE_DIR as CACHE_DIR from ..exceptions import NoCandidateFound from ..utils import (fs_str, is_pinned_requirement, lookup_table, dedup, - make_install_requirement, clean_requires_python) + make_install_requirement, clean_requires_python) from .base import BaseRepository try: @@ -243,6 +242,7 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= dist = None ireq.isolated = False ireq._wheel_cache = wheel_cache + try: from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer except ImportError: @@ -295,7 +295,18 @@ def resolve_reqs(self, download_dir, ireq, wheel_cache, setup_requires={}, dist= resolver = PipResolver(**resolver_kwargs) resolver.require_hashes = False results = resolver._resolve_one(reqset, ireq) - reqset.cleanup_files() + + cleanup_fn = getattr(reqset, "cleanup_files", None) + if cleanup_fn is not None: + try: + cleanup_fn() + except OSError: + pass + + if ireq.editable and (not ireq.source_dir or not os.path.exists(ireq.source_dir)): + if ireq.editable: + self._source_dir = TemporaryDirectory(fs_str("source")) + ireq.ensure_has_source_dir(self.source_dir) if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): # Collect setup_requires info from local eggs. diff --git a/pipenv/project.py b/pipenv/project.py index f7e57245ad..d25dba9a56 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -15,7 +15,6 @@ import pipfile.api import six import vistir -import virtualenv as _virtualenv import toml from .cmdparse import Script @@ -84,6 +83,8 @@ def default(self, obj): if isinstance(obj, (ContainerElement, TokenElement)): return obj.primitive_value + elif isinstance(obj, vistir.compat.Path): + obj = obj.as_posix() return super(_LockFileEncoder, self).default(obj) def encode(self, obj): @@ -988,7 +989,35 @@ def _pyversion(self): def env_paths(self): location = self.virtualenv_location if self.virtualenv_location else sys.prefix prefix = vistir.compat.Path(location) - home, lib, inc, bin_ = _virtualenv.path_locations(prefix) + import importlib + try: + _virtualenv = importlib.import_module("virtualenv") + except ImportError: + with vistir.contextmanagers.temp_path(): + from string import Formatter + formatter = Formatter() + import sysconfig + if getattr(sys, "real_prefix", None): + scheme = sysconfig._get_default_scheme() + sysconfig._INSTALL_SCHEMES["posix_prefix"]["purelib"] + if not scheme: + scheme = "posix_prefix" if not sys.platform == "win32" else "nt" + is_purelib = "purelib" in sysconfig._INSTALL_SCHEMES[scheme] + lib_key = "purelib" if is_purelib else "platlib" + lib = sysconfig._INSTALL_SCHEMES[scheme][lib_key] + fields = [field for _, field, _, _ in formatter.parse() if field] + config = { + "py_version_short": self._pyversion, + } + for field in fields: + if field not in config: + config[field] = prefix + sys.path = [ + os.path.join(sysconfig._INSTALL_SCHEMES[scheme][lib_key], "site-packages"), + ] + sys.path + six.reload_module(importlib) + _virtualenv = importlib.import_module("virtualenv") + home, lib, inc, bin_ = _virtualenv.path_locations(prefix.absolute().as_posix()) paths = { "lib": lib, "include": inc, diff --git a/pipenv/resolver.py b/pipenv/resolver.py index b5ea41d9ed..9ef46878c9 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -70,7 +70,6 @@ def resolve(packages, pre, project, sources, clear, system, requirements_dir=Non ) from pipenv.core import project - sources = ( replace_pypi_sources(project.pipfile_sources, pypi_mirror_source) if pypi_mirror_source @@ -111,8 +110,8 @@ def main(): parsed, remaining = parser.parse_known_args() # sys.argv = remaining parsed = handle_parsed_args(parsed) - _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, parsed.requirements_dir, - parsed.packages) + _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, + parsed.requirements_dir, parsed.packages) if __name__ == "__main__": diff --git a/pipenv/utils.py b/pipenv/utils.py index 9fa6e571f7..c0979863c9 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -345,7 +345,6 @@ def venv_resolve_deps( return [] req_dir = create_tracked_tempdir(prefix="pipenv", suffix="requirements") - cmd = [ which("python", allow_global=allow_global), Path(resolver.__file__.rstrip("co")).as_posix() @@ -364,7 +363,6 @@ def venv_resolve_deps( os.environ["PIPENV_VERBOSITY"] = str(environments.PIPENV_VERBOSITY) os.environ["PIPENV_REQ_DIR"] = fs_str(req_dir) os.environ["PIP_NO_INPUT"] = fs_str("1") - out = to_native_string("") EOF.__module__ = "pexpect.exceptions" with spinner(text=fs_str("Locking..."), spinner_name=environments.PIPENV_SPINNER, @@ -430,6 +428,8 @@ def resolve_deps( index_lookup = {} markers_lookup = {} python_path = which("python", allow_global=allow_global) + if not os.environ.get("PIP_SRC"): + os.environ["PIP_SRC"] = project.virtualenv_src_location backup_python_path = sys.executable results = [] if not deps: @@ -1116,23 +1116,19 @@ def get_vcs_deps( packages = getattr(project, section) except AttributeError: return [], [] - if os.environ.get("PIP_SRC"): - src_dir = Path( - os.environ.get("PIP_SRC", os.path.join(project.virtualenv_location, "src")) - ) - src_dir.mkdir(mode=0o775, exist_ok=True) - else: - src_dir = create_tracked_tempdir(prefix="pipenv-lock-dir") for pkg_name, pkg_pipfile in packages.items(): requirement = Requirement.from_pipfile(pkg_name, pkg_pipfile) name = requirement.normalized_name commit_hash = None if requirement.is_vcs: - with locked_repository(requirement) as repo: - commit_hash = repo.get_commit_hash() - lockfile[name] = requirement.pipfile_entry[1] - lockfile[name]['ref'] = commit_hash - reqs.append(requirement) + try: + with locked_repository(requirement) as repo: + commit_hash = repo.get_commit_hash() + lockfile[name] = requirement.pipfile_entry[1] + lockfile[name]['ref'] = commit_hash + reqs.append(requirement) + except OSError: + continue return reqs, lockfile @@ -1257,11 +1253,11 @@ def is_virtual_environment(path): @contextmanager def locked_repository(requirement): from .vendor.vistir.path import create_tracked_tempdir - src_dir = create_tracked_tempdir(prefix="pipenv-src") if not requirement.is_vcs: return original_base = os.environ.pop("PIP_SHIMS_BASE_MODULE", None) os.environ["PIP_SHIMS_BASE_MODULE"] = fs_str("pipenv.patched.notpip") + src_dir = create_tracked_tempdir(prefix="pipenv-", suffix="-src") try: with requirement.req.locked_vcs_repo(src_dir=src_dir) as repo: yield repo diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py index fd5ac99d6f..ca07b42f76 100644 --- a/pipenv/vendor/pythonfinder/utils.py +++ b/pipenv/vendor/pythonfinder/utils.py @@ -1,6 +1,7 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, print_function +import itertools import locale import os import subprocess @@ -21,6 +22,9 @@ except ImportError: from backports.functools_lru_cache import lru_cache +six.add_move(six.MovedAttribute("Iterable", "collections", "collections.abc")) +from six.moves import Iterable + PYTHON_IMPLEMENTATIONS = ( "python", "ironpython", "jython", "pypy", "anaconda", "miniconda", @@ -123,7 +127,21 @@ def filter_pythons(path): return filter(lambda x: path_is_python(x), path.iterdir()) +# def unnest(item): +# if isinstance(next((i for i in item), None), (list, tuple)): +# return chain(*filter(None, item)) +# return chain(filter(None, item)) + + def unnest(item): - if isinstance(next((i for i in item), None), (list, tuple)): - return chain(*filter(None, item)) - return chain(filter(None, item)) + if isinstance(item, Iterable) and not isinstance(item, six.string_types): + item, target = itertools.tee(item, 2) + else: + target = item + for el in target: + if isinstance(el, Iterable) and not isinstance(el, six.string_types): + el, el_copy = itertools.tee(el, 2) + for sub in unnest(el_copy): + yield sub + else: + yield el diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py index ba40e5f51f..8ceccd792d 100644 --- a/pipenv/vendor/requirementslib/__init__.py +++ b/pipenv/vendor/requirementslib/__init__.py @@ -1,5 +1,5 @@ # -*- coding=utf-8 -*- -__version__ = '1.2.1' +__version__ = '1.2.2' import logging diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py index 3a029cbcda..b3bc013239 100644 --- a/pipenv/vendor/requirementslib/models/requirements.py +++ b/pipenv/vendor/requirementslib/models/requirements.py @@ -16,7 +16,7 @@ from packaging.requirements import Requirement as PackagingRequirement from packaging.specifiers import Specifier, SpecifierSet from packaging.utils import canonicalize_name -from pip_shims.shims import _strip_extras, parse_version, path_to_url, url_to_path +from pip_shims.shims import _strip_extras, parse_version, path_to_url, url_to_path, Link from six.moves.urllib import parse as urllib_parse from six.moves.urllib.parse import unquote from vistir.compat import FileNotFoundError, Path @@ -41,11 +41,11 @@ ) -@attr.s +@attr.s(slots=True) class NamedRequirement(BaseRequirement): name = attr.ib() version = attr.ib(validator=attr.validators.optional(validate_specifiers)) - req = attr.ib() + req = attr.ib(type=PkgResourcesRequirement) extras = attr.ib(default=attr.Factory(list)) editable = attr.ib(default=False) @@ -109,21 +109,28 @@ def pipfile_part(self): ) -@attr.s +@attr.s(slots=True) class FileRequirement(BaseRequirement): """File requirements for tar.gz installable files or wheels or setup.py containing directories.""" + #: Path to the relevant `setup.py` location setup_path = attr.ib(default=None) + #: path to hit - without any of the VCS prefixes (like git+ / http+ / etc) path = attr.ib(default=None, validator=attr.validators.optional(validate_path)) - # : path to hit - without any of the VCS prefixes (like git+ / http+ / etc) - editable = attr.ib(default=False, type=bool) - extras = attr.ib(default=attr.Factory(list), type=list) - uri = attr.ib(type=six.string_types) + #: Whether the package is editable + editable = attr.ib(default=False) + #: Extras if applicable + extras = attr.ib(default=attr.Factory(list)) + #: URI of the package + uri = attr.ib() + #: Link object representing the package to clone link = attr.ib() - name = attr.ib(type=six.string_types) - req = attr.ib(type=PkgResourcesRequirement) - _has_hashed_name = False + _has_hashed_name = attr.ib(default=False) + #: Package name + name = attr.ib() + #: A :class:`~pkg_resources.Requirement` isntance + req = attr.ib() _uri_scheme = attr.ib(default=None) @classmethod @@ -470,14 +477,19 @@ def pipfile_part(self): return {name: pipfile_dict} -@attr.s +@attr.s(slots=True) class VCSRequirement(FileRequirement): + #: Whether the repository is editable editable = attr.ib(default=None) + #: URI for the repository uri = attr.ib(default=None) + #: path to the repository, if it's local path = attr.ib(default=None, validator=attr.validators.optional(validate_path)) + #: vcs type, i.e. git/hg/svn vcs = attr.ib(validator=attr.validators.optional(validate_vcs), default=None) - # : vcs reference name (branch / commit / tag) + #: vcs reference name (branch / commit / tag) ref = attr.ib(default=None) + #: Subdirectory to use for installation if applicable subdirectory = attr.ib(default=None) _repo = attr.ib(default=None) _base_line = attr.ib(default=None) @@ -585,16 +597,28 @@ def get_checkout_dir(self, src_dir=None): def get_vcs_repo(self, src_dir=None): from .vcs import VCSRepository checkout_dir = self.get_checkout_dir(src_dir=src_dir) - url = "{0}#egg={1}".format(self.vcs_uri, self.name) + url = build_vcs_link( + self.vcs, + self.uri, + name=self.name, + ref=self.ref, + subdirectory=self.subdirectory, + extras=self.extras + ) vcsrepo = VCSRepository( url=url, name=self.name, ref=self.ref if self.ref else None, checkout_directory=checkout_dir, - vcs_type=self.vcs + vcs_type=self.vcs, + subdirectory=self.subdirectory ) if not self.is_local: vcsrepo.obtain() + if self.subdirectory: + self.setup_path = os.path.join(checkout_dir, self.subdirectory, "setup.py") + else: + self.setup_path = os.path.join(checkout_dir, "setup.py") return vcsrepo def get_commit_hash(self): @@ -612,15 +636,15 @@ def update_repo(self, src_dir=None, ref=None): if not self.is_local and ref is not None: self.repo.checkout_ref(ref) repo_hash = self.repo.get_commit_hash() + self.req.revision = repo_hash return repo_hash @contextmanager def locked_vcs_repo(self, src_dir=None): + if not src_dir: + src_dir = create_tracked_tempdir(prefix="requirementslib-", suffix="-src") vcsrepo = self.get_vcs_repo(src_dir=src_dir) - if self.ref and not self.is_local: - vcsrepo.checkout_ref(self.ref) - self.ref = self.get_commit_hash() - self.req.revision = self.ref + self.req.revision = vcsrepo.get_commit_hash() # Remove potential ref in the end of uri after ref is parsed if "@" in self.link.show_url and "@" in self.uri: @@ -1070,7 +1094,7 @@ def as_ireq(self): if self.editable or self.req.editable: if ireq_line.startswith("-e "): ireq_line = ireq_line[len("-e "):] - with ensure_setup_py(self.req.path): + with ensure_setup_py(self.req.setup_path): ireq = ireq_from_editable(ireq_line) else: ireq = ireq_from_line(ireq_line) diff --git a/pipenv/vendor/requirementslib/models/vcs.py b/pipenv/vendor/requirementslib/models/vcs.py index fb2e6bc30f..4efb9bd319 100644 --- a/pipenv/vendor/requirementslib/models/vcs.py +++ b/pipenv/vendor/requirementslib/models/vcs.py @@ -1,7 +1,6 @@ # -*- coding=utf-8 -*- import attr from pip_shims import VcsSupport, parse_version, pip_version -import vistir import os @@ -14,6 +13,7 @@ class VCSRepository(object): name = attr.ib() checkout_directory = attr.ib() vcs_type = attr.ib() + subdirectory = attr.ib(default=None) commit_sha = attr.ib(default=None) ref = attr.ib(default=None) repo_instance = attr.ib() @@ -31,35 +31,31 @@ def is_local(self): return url.startswith("file") def obtain(self): - if not os.path.exists(self.checkout_directory): + if (os.path.exists(self.checkout_directory) and not + self.repo_instance.is_repository_directory(self.checkout_directory)): + self.repo_instance.unpack(self.checkout_directory) + elif not os.path.exists(self.checkout_directory): self.repo_instance.obtain(self.checkout_directory) - if self.ref: - self.checkout_ref(self.ref) - self.commit_sha = self.get_commit_hash(self.ref) else: - if not self.commit_sha: - self.commit_sha = self.get_commit_hash() + if self.ref: + self.checkout_ref(self.ref) + if not self.commit_sha: + self.commit_sha = self.get_commit_hash() def checkout_ref(self, ref): if not self.repo_instance.is_commit_id_equal( - self.checkout_directory, self.get_commit_hash(ref) + self.checkout_directory, self.get_commit_hash() ) and not self.repo_instance.is_commit_id_equal(self.checkout_directory, ref): if not self.is_local: self.update(ref) def update(self, ref): target_ref = self.repo_instance.make_rev_options(ref) - sha = self.repo_instance.get_revision_sha(self.checkout_directory, target_ref.arg_rev) - target_rev = target_ref.make_new(sha) if parse_version(pip_version) > parse_version("18.0"): self.repo_instance.update(self.checkout_directory, self.url, target_ref) else: self.repo_instance.update(self.checkout_directory, target_ref) - self.commit_hash = self.get_commit_hash(ref) + self.commit_sha = self.get_commit_hash() def get_commit_hash(self, ref=None): - if ref: - target_ref = self.repo_instance.make_rev_options(ref) - return self.repo_instance.get_revision_sha(self.checkout_directory, target_ref.arg_rev) - # return self.repo_instance.get_revision(self.checkout_directory) return self.repo_instance.get_revision(self.checkout_directory) diff --git a/pipenv/vendor/resolvelib/LICENSE b/pipenv/vendor/resolvelib/LICENSE new file mode 100644 index 0000000000..b9077766e9 --- /dev/null +++ b/pipenv/vendor/resolvelib/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 35a32eb6ce..18352c2ebe 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -27,7 +27,7 @@ requests==2.20.0 idna==2.7 urllib3==1.24 certifi==2018.10.15 -requirementslib==1.2.1 +requirementslib==1.2.2 attrs==18.2.0 distlib==0.2.8 packaging==18.0 diff --git a/pipenv/vendor/vistir/__init__.py b/pipenv/vendor/vistir/__init__.py index 912ab0a436..c8a995faa0 100644 --- a/pipenv/vendor/vistir/__init__.py +++ b/pipenv/vendor/vistir/__init__.py @@ -1,7 +1,12 @@ # -*- coding=utf-8 -*- from __future__ import absolute_import, unicode_literals -from .compat import NamedTemporaryFile, TemporaryDirectory, partialmethod, to_native_string +from .compat import ( + NamedTemporaryFile, + TemporaryDirectory, + partialmethod, + to_native_string, +) from .contextmanagers import ( atomic_open_for_write, cd, @@ -10,12 +15,23 @@ temp_path, spinner, ) -from .misc import load_path, partialclass, run, shell_escape +from .misc import ( + load_path, + partialclass, + run, + shell_escape, + decode_for_output, + to_text, + to_bytes, + take, + chunked, + divide, +) from .path import mkdir_p, rmtree, create_tracked_tempdir, create_tracked_tempfile from .spin import VistirSpinner, create_spinner -__version__ = '0.2.2' +__version__ = "0.2.3" __all__ = [ @@ -38,5 +54,11 @@ "create_spinner", "create_tracked_tempdir", "create_tracked_tempfile", - "to_native_string" + "to_native_string", + "decode_for_output", + "to_text", + "to_bytes", + "take", + "chunked", + "divide", ] diff --git a/pipenv/vendor/vistir/path.py b/pipenv/vendor/vistir/path.py index 68e6d464df..ba00815996 100644 --- a/pipenv/vendor/vistir/path.py +++ b/pipenv/vendor/vistir/path.py @@ -8,6 +8,7 @@ import posixpath import shutil import stat +import sys import warnings import six @@ -166,11 +167,12 @@ def is_readonly_path(fn): Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)` """ - from .misc import to_bytes + from .compat import to_native_string - fn = to_bytes(fn, encoding="utf-8") + fn = to_native_string(fn) if os.path.exists(fn): - return bool(os.stat(fn).st_mode & stat.S_IREAD) and not os.access(fn, os.W_OK) + file_stat = os.stat(fn).st_mode + return not bool(file_stat & stat.S_IWRITE) or not os.access(fn, os.W_OK) return False @@ -182,9 +184,10 @@ def mkdir_p(newdir, mode=0o777): :raises: OSError if a file is encountered along the way """ # http://code.activestate.com/recipes/82465-a-friendly-mkdir/ - from .misc import to_bytes, to_text + from .misc import to_text + from .compat import to_native_string - newdir = to_bytes(newdir, "utf-8") + newdir = to_native_string(newdir) if os.path.exists(newdir): if not os.path.isdir(newdir): raise OSError( @@ -193,9 +196,9 @@ def mkdir_p(newdir, mode=0o777): ) ) else: - head, tail = os.path.split(to_bytes(newdir, encoding="utf-8")) + head, tail = os.path.split(newdir) # Make sure the tail doesn't point to the asame place as the head - curdir = to_bytes(".", encoding="utf-8") + curdir = to_native_string(".") tail_and_head_match = ( os.path.relpath(tail, start=os.path.basename(head)) == curdir ) @@ -242,7 +245,7 @@ def create_tracked_tempdir(*args, **kwargs): tempdir = TemporaryDirectory(*args, **kwargs) TRACKED_TEMPORARY_DIRECTORIES.append(tempdir) atexit.register(tempdir.cleanup) - warnings.simplefilter("default", ResourceWarning) + warnings.simplefilter("ignore", ResourceWarning) return tempdir.name @@ -266,12 +269,20 @@ def set_write_bit(fn): :param str fn: The target filename or path """ - from .misc import to_bytes, locale_encoding + from .compat import to_native_string - fn = to_bytes(fn, encoding=locale_encoding) + fn = to_native_string(fn) if not os.path.exists(fn): return - os.chmod(fn, stat.S_IWRITE | stat.S_IWUSR | stat.S_IRUSR) + file_stat = os.stat(fn).st_mode + os.chmod(fn, file_stat | stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) + if not os.path.isdir(fn): + return + for root, dirs, files in os.walk(fn, topdown=False): + for dir_ in [os.path.join(root,d) for d in dirs]: + set_write_bit(dir_) + for file_ in [os.path.join(root, f) for f in files]: + set_write_bit(file_) def rmtree(directory, ignore_errors=False): @@ -288,14 +299,14 @@ def rmtree(directory, ignore_errors=False): Setting `ignore_errors=True` may cause this to silently fail to delete the path """ - from .misc import locale_encoding, to_bytes + from .compat import to_native_string - directory = to_bytes(directory, encoding=locale_encoding) + directory = to_native_string(directory) try: shutil.rmtree( directory, ignore_errors=ignore_errors, onerror=handle_remove_readonly ) - except (IOError, OSError) as exc: + except (IOError, OSError, FileNotFoundError) as exc: # Ignore removal failures where the file doesn't exist if exc.errno == errno.ENOENT: pass @@ -316,23 +327,24 @@ def handle_remove_readonly(func, path, exc): :func:`set_write_bit` on the target path and try again. """ # Check for read-only attribute - from .compat import ResourceWarning - from .misc import to_bytes + from .compat import ResourceWarning, FileNotFoundError, to_native_string - PERM_ERRORS = (errno.EACCES, errno.EPERM) + PERM_ERRORS = (errno.EACCES, errno.EPERM, errno.ENOENT) default_warning_message = ( "Unable to remove file due to permissions restriction: {!r}" ) # split the initial exception out into its type, exception, and traceback exc_type, exc_exception, exc_tb = exc - path = to_bytes(path, encoding="utf-8") + path = to_native_string(path) if is_readonly_path(path): # Apply write permission and call original function set_write_bit(path) try: func(path) - except (OSError, IOError) as e: - if e.errno in PERM_ERRORS: + except (OSError, IOError, FileNotFoundError) as e: + if e.errno == errno.ENOENT: + return + elif e.errno in PERM_ERRORS: warnings.warn(default_warning_message.format(path), ResourceWarning) return @@ -340,17 +352,20 @@ def handle_remove_readonly(func, path, exc): set_write_bit(path) try: func(path) - except (OSError, IOError) as e: + except (OSError, IOError, FileNotFoundError) as e: if e.errno in PERM_ERRORS: warnings.warn(default_warning_message.format(path), ResourceWarning) + pass elif e.errno == errno.ENOENT: # File already gone - return + pass else: raise - return else: - raise - raise exc + return + elif exc_exception.errno == errno.ENOENT: + pass + else: + raise exc_exception def walk_up(bottom): diff --git a/pipenv/vendor/vistir/spin.py b/pipenv/vendor/vistir/spin.py index 6b6e498f3d..20587d9db7 100644 --- a/pipenv/vendor/vistir/spin.py +++ b/pipenv/vendor/vistir/spin.py @@ -48,7 +48,7 @@ def __exit__(self, exc_type, exc_val, traceback): if exc_type: import traceback from .misc import decode_for_output - self.write_err(decode_for_output(traceback.format_exception(traceback))) + self.write_err(decode_for_output(traceback.format_exception(*sys.exc_info()))) self._close_output_buffer() return False diff --git a/tasks/vendoring/patches/patched/pip18.patch b/tasks/vendoring/patches/patched/pip18.patch index 150ee32f55..f4e607c158 100644 --- a/tasks/vendoring/patches/patched/pip18.patch +++ b/tasks/vendoring/patches/patched/pip18.patch @@ -19,6 +19,73 @@ index 96f3b65c..cc5b3d15 100644 "python": platform.python_version(), "implementation": { "name": platform.python_implementation(), +diff --git a/pipenv/patched/pip/_internal/utils/temp_dir.py b/pipenv/patched/pip/_internal/utils/temp_dir.py +index edc506bf..84d57dac 100644 +--- a/pipenv/patched/pip/_internal/utils/temp_dir.py ++++ b/pipenv/patched/pip/_internal/utils/temp_dir.py +@@ -3,8 +3,10 @@ from __future__ import absolute_import + import logging + import os.path + import tempfile ++import warnings + + from pip._internal.utils.misc import rmtree ++from pipenv.vendor.vistir.compat import finalize, ResourceWarning + + logger = logging.getLogger(__name__) + +@@ -45,6 +47,20 @@ class TempDirectory(object): + self.path = path + self.delete = delete + self.kind = kind ++ self._finalizer = None ++ if path: ++ self._register_finalizer() ++ ++ def _register_finalizer(self): ++ if self.delete and self.path: ++ self._finalizer = finalize( ++ self, ++ self._cleanup, ++ self.path, ++ warn_message=None ++ ) ++ else: ++ self._finalizer = None + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.path) +@@ -72,11 +88,27 @@ class TempDirectory(object): + self.path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) + ) ++ self._register_finalizer() + logger.debug("Created temporary directory: {}".format(self.path)) + ++ @classmethod ++ def _cleanup(cls, name, warn_message=None): ++ try: ++ rmtree(name) ++ except OSError: ++ pass ++ else: ++ if warn_message: ++ warnings.warn(warn_message, ResourceWarning) ++ + def cleanup(self): + """Remove the temporary directory created and reset state + """ +- if self.path is not None and os.path.exists(self.path): +- rmtree(self.path) +- self.path = None ++ if getattr(self._finalizer, "detach", None) and self._finalizer.detach(): ++ if os.path.exists(self.path): ++ try: ++ rmtree(self.path) ++ except OSError: ++ pass ++ else: ++ self.path = None diff --git a/pipenv/patched/pip/_internal/index.py b/pipenv/patched/pip/_internal/index.py index 8c2f24f1..cdd48874 100644 --- a/pipenv/patched/pip/_internal/index.py diff --git a/tasks/vendoring/patches/patched/piptools.patch b/tasks/vendoring/patches/patched/piptools.patch index 1db5ef44bd..3799ccf49a 100644 --- a/tasks/vendoring/patches/patched/piptools.patch +++ b/tasks/vendoring/patches/patched/piptools.patch @@ -12,30 +12,56 @@ index 1fa3805..c0ecec8 100644 install_req_from_editable, ) diff --git a/pipenv/patched/piptools/_compat/pip_compat.py b/pipenv/patched/piptools/_compat/pip_compat.py -index 28da51f..de9b435 100644 +index 28da51f..c466ef0 100644 --- a/pipenv/patched/piptools/_compat/pip_compat.py +++ b/pipenv/patched/piptools/_compat/pip_compat.py -@@ -1,12 +1,13 @@ +@@ -1,45 +1,55 @@ # -*- coding=utf-8 -*- - import importlib +-import importlib -import pip -+from pip_shims import pip_version - import pkg_resources +-import pkg_resources ++__all__ = [ ++ "InstallRequirement", ++ "parse_requirements", ++ "RequirementSet", ++ "user_cache_dir", ++ "FAVORITE_HASH", ++ "is_file_url", ++ "url_to_path", ++ "PackageFinder", ++ "FormatControl", ++ "Wheel", ++ "Command", ++ "cmdoptions", ++ "get_installed_distributions", ++ "PyPI", ++ "SafeFileCache", ++ "InstallationError", ++ "parse_version", ++ "pip_version", ++ "install_req_from_editable", ++ "install_req_from_line", ++ "user_cache_dir" ++] -def do_import(module_path, subimport=None, old_path=None): -+def do_import(module_path, subimport=None, old_path=None, vendored_name=None): - old_path = old_path or module_path +- old_path = old_path or module_path - prefixes = ["pip._internal", "pip"] -+ prefix = vendored_name if vendored_name else "pip" -+ prefixes = ["{0}._internal".format(prefix), "{0}".format(prefix)] - paths = [module_path, old_path] - search_order = ["{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None] - package = subimport if subimport else None -@@ -21,25 +22,28 @@ def do_import(module_path, subimport=None, old_path=None): - return getattr(imported, package) - - +- paths = [module_path, old_path] +- search_order = ["{0}.{1}".format(p, pth) for p in prefixes for pth in paths if pth is not None] +- package = subimport if subimport else None +- for to_import in search_order: +- if not subimport: +- to_import, _, package = to_import.rpartition(".") +- try: +- imported = importlib.import_module(to_import) +- except ImportError: +- continue +- else: +- return getattr(imported, package) +- +- -InstallRequirement = do_import('req.req_install', 'InstallRequirement') -parse_requirements = do_import('req.req_file', 'parse_requirements') -RequirementSet = do_import('req.req_set', 'RequirementSet') @@ -50,34 +76,38 @@ index 28da51f..de9b435 100644 -cmdoptions = do_import('cli.cmdoptions', old_path='cmdoptions') -get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils') -PyPI = do_import('models.index', 'PyPI') -+InstallRequirement = do_import('req.req_install', 'InstallRequirement', vendored_name="notpip") -+parse_requirements = do_import('req.req_file', 'parse_requirements', vendored_name="notpip") -+RequirementSet = do_import('req.req_set', 'RequirementSet', vendored_name="notpip") -+user_cache_dir = do_import('utils.appdirs', 'user_cache_dir', vendored_name="notpip") -+FAVORITE_HASH = do_import('utils.hashes', 'FAVORITE_HASH', vendored_name="notpip") -+is_file_url = do_import('download', 'is_file_url', vendored_name="notpip") -+url_to_path = do_import('download', 'url_to_path', vendored_name="notpip") -+PackageFinder = do_import('index', 'PackageFinder', vendored_name="notpip") -+FormatControl = do_import('index', 'FormatControl', vendored_name="notpip") -+Wheel = do_import('wheel', 'Wheel', vendored_name="notpip") -+Command = do_import('cli.base_command', 'Command', old_path='basecommand', vendored_name="notpip") -+cmdoptions = do_import('cli.cmdoptions', old_path='cmdoptions', vendored_name="notpip") -+get_installed_distributions = do_import('utils.misc', 'get_installed_distributions', old_path='utils', vendored_name="notpip") -+PyPI = do_import('models.index', 'PyPI', vendored_name='notpip') -+SafeFileCache = do_import('download', 'SafeFileCache', vendored_name='notpip') -+InstallationError = do_import('exceptions', 'InstallationError', vendored_name='notpip') ++from pipenv.vendor.appdirs import user_cache_dir ++from pip_shims.shims import ( ++ InstallRequirement, ++ parse_requirements, ++ RequirementSet, ++ FAVORITE_HASH, ++ is_file_url, ++ url_to_path, ++ PackageFinder, ++ FormatControl, ++ Wheel, ++ Command, ++ cmdoptions, ++ get_installed_distributions, ++ PyPI, ++ SafeFileCache, ++ InstallationError, ++ parse_version, ++ pip_version, ++) # pip 18.1 has refactored InstallRequirement constructors use by pip-tools. -if pkg_resources.parse_version(pip.__version__) < pkg_resources.parse_version('18.1'): -+if pkg_resources.parse_version(pip_version) < pkg_resources.parse_version('18.1'): ++if parse_version(pip_version) < parse_version('18.1'): install_req_from_line = InstallRequirement.from_line install_req_from_editable = InstallRequirement.from_editable else: - install_req_from_line = do_import('req.constructors', 'install_req_from_line') - install_req_from_editable = do_import('req.constructors', 'install_req_from_editable') -+ install_req_from_line = do_import('req.constructors', 'install_req_from_line', vendored_name="notpip") -+ install_req_from_editable = do_import('req.constructors', 'install_req_from_editable', vendored_name="notpip") -+ ++ from pip_shims.shims import ( ++ install_req_from_editable, install_req_from_line ++ ) diff --git a/pipenv/patched/piptools/repositories/local.py b/pipenv/patched/piptools/repositories/local.py index 08dabe1..480ad1e 100644 --- a/pipenv/patched/piptools/repositories/local.py @@ -92,7 +122,7 @@ index 08dabe1..480ad1e 100644 else: return self.repository.find_best_match(ireq, prereleases) diff --git a/pipenv/patched/piptools/repositories/pypi.py b/pipenv/patched/piptools/repositories/pypi.py -index bf69803..a1a3906 100644 +index bf69803..31b85b9 100644 --- a/pipenv/patched/piptools/repositories/pypi.py +++ b/pipenv/patched/piptools/repositories/pypi.py @@ -1,7 +1,7 @@ @@ -104,7 +134,7 @@ index bf69803..a1a3906 100644 import hashlib import os from contextlib import contextmanager -@@ -15,13 +15,23 @@ from .._compat import ( +@@ -15,13 +15,22 @@ from .._compat import ( Wheel, FAVORITE_HASH, TemporaryDirectory, @@ -113,11 +143,10 @@ index bf69803..a1a3906 100644 + InstallRequirement, + SafeFileCache ) -+os.environ["PIP_SHIMS_BASE_MODULE"] = str("notpip") ++os.environ["PIP_SHIMS_BASE_MODULE"] = str("pip") +from pip_shims.shims import do_import, VcsSupport, WheelCache +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet, Specifier -+from packaging.markers import Op, Value, Variable, Marker +InstallationError = do_import(("exceptions.InstallationError", "7.0", "9999")) +from pip._internal.resolve import Resolver as PipResolver + @@ -128,11 +157,11 @@ index bf69803..a1a3906 100644 -from ..utils import (fs_str, is_pinned_requirement, lookup_table, - make_install_requirement) +from ..utils import (fs_str, is_pinned_requirement, lookup_table, dedup, -+ make_install_requirement, clean_requires_python) ++ make_install_requirement, clean_requires_python) from .base import BaseRepository try: -@@ -31,10 +41,44 @@ except ImportError: +@@ -31,10 +40,44 @@ except ImportError: def RequirementTracker(): yield @@ -181,7 +210,7 @@ index bf69803..a1a3906 100644 class PyPIRepository(BaseRepository): -@@ -46,8 +90,9 @@ class PyPIRepository(BaseRepository): +@@ -46,8 +89,9 @@ class PyPIRepository(BaseRepository): config), but any other PyPI mirror can be used if index_urls is changed/configured on the Finder. """ @@ -192,7 +221,7 @@ index bf69803..a1a3906 100644 self.pip_options = pip_options index_urls = [pip_options.index_url] + pip_options.extra_index_urls -@@ -73,6 +118,10 @@ class PyPIRepository(BaseRepository): +@@ -73,6 +117,10 @@ class PyPIRepository(BaseRepository): # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement self._dependencies_cache = {} @@ -203,7 +232,7 @@ index bf69803..a1a3906 100644 # Setup file paths self.freshen_build_caches() -@@ -113,10 +162,13 @@ class PyPIRepository(BaseRepository): +@@ -113,10 +161,13 @@ class PyPIRepository(BaseRepository): if ireq.editable: return ireq # return itself as the best match @@ -219,7 +248,7 @@ index bf69803..a1a3906 100644 # Reuses pip's internal candidate sort key to sort matching_candidates = [candidates_by_version[ver] for ver in matching_versions] -@@ -126,25 +178,86 @@ class PyPIRepository(BaseRepository): +@@ -126,25 +177,87 @@ class PyPIRepository(BaseRepository): # Turn the candidate into a pinned InstallRequirement return make_install_requirement( @@ -236,8 +265,7 @@ index bf69803..a1a3906 100644 + def gen(ireq): + if self.DEFAULT_INDEX_URL not in self.finder.index_urls: + return - -- def resolve_reqs(self, download_dir, ireq, wheel_cache): ++ + url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) + releases = self.session.get(url).json()['releases'] + @@ -266,7 +294,8 @@ index bf69803..a1a3906 100644 + try: + if ireq not in self._json_dep_cache: + self._json_dep_cache[ireq] = [g for g in gen(ireq)] -+ + +- def resolve_reqs(self, download_dir, ireq, wheel_cache): + return set(self._json_dep_cache[ireq]) + except Exception: + return set() @@ -291,6 +320,7 @@ index bf69803..a1a3906 100644 + dist = None + ireq.isolated = False + ireq._wheel_cache = wheel_cache ++ try: from pip._internal.operations.prepare import RequirementPreparer - from pip._internal.resolve import Resolver as PipResolver @@ -330,7 +360,7 @@ index bf69803..a1a3906 100644 } resolver = None preparer = None -@@ -177,15 +291,98 @@ class PyPIRepository(BaseRepository): +@@ -177,15 +291,109 @@ class PyPIRepository(BaseRepository): resolver_kwargs['preparer'] = preparer reqset = RequirementSet() ireq.is_direct = True @@ -339,9 +369,21 @@ index bf69803..a1a3906 100644 resolver = PipResolver(**resolver_kwargs) resolver.require_hashes = False results = resolver._resolve_one(reqset, ireq) - reqset.cleanup_files() +- reqset.cleanup_files() - return set(results) ++ cleanup_fn = getattr(reqset, "cleanup_files", None) ++ if cleanup_fn is not None: ++ try: ++ cleanup_fn() ++ except OSError: ++ pass ++ ++ if ireq.editable and (not ireq.source_dir or not os.path.exists(ireq.source_dir)): ++ if ireq.editable: ++ self._source_dir = TemporaryDirectory(fs_str("source")) ++ ireq.ensure_has_source_dir(self.source_dir) ++ + if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): + # Collect setup_requires info from local eggs. + # Do this after we call the preparer on these reqs to make sure their @@ -432,7 +474,7 @@ index bf69803..a1a3906 100644 """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). -@@ -200,6 +397,7 @@ class PyPIRepository(BaseRepository): +@@ -200,6 +408,7 @@ class PyPIRepository(BaseRepository): # If a download_dir is passed, pip will unnecessarely # archive the entire source directory download_dir = None @@ -440,7 +482,7 @@ index bf69803..a1a3906 100644 elif ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. -@@ -214,7 +412,8 @@ class PyPIRepository(BaseRepository): +@@ -214,7 +423,8 @@ class PyPIRepository(BaseRepository): wheel_cache = WheelCache(CACHE_DIR, self.pip_options.format_control) prev_tracker = os.environ.get('PIP_REQ_TRACKER') try: @@ -450,7 +492,7 @@ index bf69803..a1a3906 100644 finally: if 'PIP_REQ_TRACKER' in os.environ: if prev_tracker: -@@ -236,6 +435,10 @@ class PyPIRepository(BaseRepository): +@@ -236,6 +446,10 @@ class PyPIRepository(BaseRepository): if ireq.editable: return set() @@ -461,7 +503,7 @@ index bf69803..a1a3906 100644 if not is_pinned_requirement(ireq): raise TypeError( "Expected pinned requirement, got {}".format(ireq)) -@@ -243,24 +446,22 @@ class PyPIRepository(BaseRepository): +@@ -243,24 +457,22 @@ class PyPIRepository(BaseRepository): # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5f5c193019..0ab0ab22ed 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -56,9 +56,6 @@ def check_github_ssh(): return res -WE_HAVE_INTERNET = check_internet() -WE_HAVE_GITHUB_SSH_KEYS = check_github_ssh() - TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi') prepare_pypi_packages(PYPI_VENDOR_DIR) @@ -71,10 +68,13 @@ def pytest_runtest_setup(item): pytest.skip('requires github ssh') -@pytest.yield_fixture +@pytest.fixture def pathlib_tmpdir(request, tmpdir): yield Path(str(tmpdir)) - tmpdir.remove(ignore_errors=True) + try: + tmpdir.remove(ignore_errors=True) + except Exception: + pass # Borrowed from pip's test runner filesystem isolation @@ -102,6 +102,10 @@ def isolate(pathlib_tmpdir): os.environ["WORKON_HOME"] = fs_str(os.path.join(home_dir, ".virtualenvs")) +WE_HAVE_INTERNET = check_internet() +WE_HAVE_GITHUB_SSH_KEYS = check_github_ssh() + + class _PipenvInstance(object): """An instance of a Pipenv Project...""" def __init__(self, pypi=None, pipfile=True, chdir=False, path=None, home_dir=None): @@ -207,14 +211,12 @@ def PipenvInstance(): yield _PipenvInstance -@pytest.fixture(scope='module') -def pip_src_dir(request): +@pytest.fixture(autouse=True) +def pip_src_dir(request, pathlib_tmpdir): old_src_dir = os.environ.get('PIP_SRC', '') - new_src_dir = TemporaryDirectory(prefix='pipenv-', suffix='-testsrc') - os.environ['PIP_SRC'] = fs_str(new_src_dir.name) + os.environ['PIP_SRC'] = pathlib_tmpdir.as_posix() def finalize(): - new_src_dir.cleanup() os.environ['PIP_SRC'] = fs_str(old_src_dir) request.addfinalizer(finalize)