Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor operations.prepare.prepare_linked_requirement #8411

Merged
merged 4 commits into from
Jun 30, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
162 changes: 78 additions & 84 deletions src/pip/_internal/operations/prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,109 +376,104 @@ def _download_should_save(self):
"Could not find or access download directory '{}'"
.format(self.download_dir))

def prepare_linked_requirement(
self,
req, # type: InstallRequirement
parallel_builds=False, # type: bool
):
# type: (...) -> AbstractDistribution
"""Prepare a requirement that would be obtained from req.link
"""
assert req.link
link = req.link
McSinyx marked this conversation as resolved.
Show resolved Hide resolved

# TODO: Breakup into smaller functions
if link.scheme == 'file':
path = link.file_path
def _log_preparing_link(self, req):
# type: (InstallRequirement) -> None
"""Log the way the link prepared."""
if req.link.is_file:
path = req.link.file_path
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req.req or req)

download_dir = self.download_dir
if link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir

if link.is_wheel:
def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds):
# type: (InstallRequirement, Optional[str], bool) -> None
"""Ensure source_dir of a linked InstallRequirement."""
# Since source_dir is only set for editable requirements.
if req.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
# When installing a wheel, we use the unpacked wheel.
autodelete_unpacked = False
pradyunsg marked this conversation as resolved.
Show resolved Hide resolved
else:
# We always delete unpacked sdists after pip runs.
autodelete_unpacked = True
assert req.source_dir is None
req.ensure_has_source_dir(
self.build_dir,
autodelete=autodelete_unpacked,
parallel_builds=parallel_builds,
)

with indent_log():
# Since source_dir is only set for editable requirements.
assert req.source_dir is None
req.ensure_has_source_dir(
self.build_dir,
autodelete=autodelete_unpacked,
parallel_builds=parallel_builds,
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir)
)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
" pre-existing build directory ({}). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
.format(req, req.source_dir)
)

# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
McSinyx marked this conversation as resolved.
Show resolved Hide resolved
if self.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if link.is_vcs:
raise VcsHashUnsupported()
elif link.is_existing_dir():
raise DirectoryUrlHashUnsupported()
if not req.original_link and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()

hashes = req.hashes(trust_internet=not self.require_hashes)
if self.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
def _get_linked_req_hashes(self, req):
# type: (InstallRequirement) -> Hashes
# By the time this is called, the requirement's link should have
# been checked so we can tell what kind of requirements req is
# and raise some more informative errors than otherwise.
# (For example, we can raise VcsHashUnsupported for a VCS URL
# rather than HashMissing.)
if not self.require_hashes:
return req.hashes(trust_internet=True)

# We could check these first 2 conditions inside unpack_url
# and save repetition of conditions, but then we would
# report less-useful error messages for unhashable
# requirements, complaining that there's no hash provided.
if req.link.is_vcs:
raise VcsHashUnsupported()
if req.link.is_existing_dir():
raise DirectoryUrlHashUnsupported()

# Unpinned packages are asking for trouble when a new version
# is uploaded. This isn't a security check, but it saves users
# a surprising hash mismatch in the future.
# file:/// URLs aren't pinnable, so don't complain about them
# not being pinned.
if req.original_link is None and not req.is_pinned:
raise HashUnpinned()

# If known-good hashes are missing for this requirement,
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
return req.hashes(trust_internet=False) or MissingHashes()

def prepare_linked_requirement(self, req, parallel_builds=False):
# type: (InstallRequirement, bool) -> AbstractDistribution
"""Prepare a requirement to be obtained from req.link."""
assert req.link
link = req.link
self._log_preparing_link(req)
if link.is_wheel and self.wheel_download_dir:
# Download wheels to a dedicated dir when doing `pip wheel`.
download_dir = self.wheel_download_dir
else:
download_dir = self.download_dir
pradyunsg marked this conversation as resolved.
Show resolved Hide resolved

with indent_log():
self._ensure_link_req_src_dir(req, download_dir, parallel_builds)
try:
local_file = unpack_url(
link, req.source_dir, self.downloader, download_dir,
hashes=hashes,
hashes=self._get_linked_req_hashes(req)
)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because of error %s',
req,
exc,
)
pradyunsg marked this conversation as resolved.
Show resolved Hide resolved
raise InstallationError(
'Could not install requirement {} because of HTTP '
'error {} for URL {}'.format(req, exc, link)
Expand All @@ -502,9 +497,8 @@ def prepare_linked_requirement(
)
if not os.path.exists(download_location):
shutil.copy(local_file.path, download_location)
logger.info(
'Saved %s', display_path(download_location)
)
download_path = display_path(download_location)
logger.info('Saved %s', download_path)
pradyunsg marked this conversation as resolved.
Show resolved Hide resolved

if self._download_should_save:
# Make a .zip of the source_dir we already created.
Expand Down