Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ansible-galaxy - enforce string versions and fix installing prereleases from scm sources #79112

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions changelogs/fragments/79112-a-g-version-validation.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
minor_changes:
- ansible-galaxy collection - consolidate message when an invalid version is encountered so it's
the same regardless of codepath.
bugfixes:
- >
ansible-galaxy collection - allow pre-releases without --pre if the pre-release is already installed
or if the requirement is a pre-release.
- >
ansible-galaxy collection - enforce str versions when loading user-requested requirements
(https://github.com/ansible/ansible/issues/78067, https://github.com/ansible/ansible/issues/79109).
- >
ansible-galaxy collection - fix installing pre-releases from virtual sources
(https://github.com/ansible/ansible/issues/79168).
137 changes: 72 additions & 65 deletions lib/ansible/galaxy/collection/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -674,60 +674,10 @@ def install_collections(
:param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
"""
existing_collections = {
Requirement(coll.fqcn, coll.ver, coll.src, coll.type, None)
for coll in find_existing_collections(output_path, artifacts_manager)
}

unsatisfied_requirements = set(
chain.from_iterable(
(
Requirement.from_dir_path(sub_coll, artifacts_manager)
for sub_coll in (
artifacts_manager.
get_direct_collection_dependencies(install_req).
keys()
)
)
if install_req.is_subdirs else (install_req, )
for install_req in collections
),
)
requested_requirements_names = {req.fqcn for req in unsatisfied_requirements}

# NOTE: Don't attempt to reevaluate already installed deps
# NOTE: unless `--force` or `--force-with-deps` is passed
unsatisfied_requirements -= set() if force or force_deps else {
req
for req in unsatisfied_requirements
for exs in existing_collections
if req.fqcn == exs.fqcn and meets_requirements(exs.ver, req.ver)
}

if not unsatisfied_requirements and not upgrade:
display.display(
'Nothing to do. All requested collections are already '
'installed. If you want to reinstall them, '
'consider using `--force`.'
)
return

# FIXME: This probably needs to be improved to
# FIXME: properly match differing src/type.
existing_non_requested_collections = {
coll for coll in existing_collections
if coll.fqcn not in requested_requirements_names
}

preferred_requirements = (
[] if force_deps
else existing_non_requested_collections if force
else existing_collections
)
preferred_collections = {
# NOTE: No need to include signatures if the collection is already installed
Candidate(coll.fqcn, coll.ver, coll.src, coll.type, None)
for coll in preferred_requirements
for coll in find_existing_collections(output_path, artifacts_manager)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We shouldn't start scanning the disk if force_deps — all this is being discarded on the next line in 50% of cases.

}
preferred_collections = existing_collections if not force_deps else set()
with _display_progress("Process install dependency map"):
dependency_map = _resolve_depenency_map(
collections,
Expand All @@ -739,7 +689,15 @@ def install_collections(
upgrade=upgrade,
include_signatures=not disable_gpg_verify,
offline=offline,
force=force,
)
if not dependency_map:
display.display(
'Nothing to do. All requested collections are already '
'installed. If you want to reinstall them, '
'consider using `--force`.'
)
return

keyring_exists = artifacts_manager.keyring is not None
with _display_progress("Starting collection install process"):
Expand Down Expand Up @@ -1793,6 +1751,7 @@ def _resolve_depenency_map(
upgrade, # type: bool
include_signatures, # type: bool
offline, # type: bool
force=False, # type: bool
): # type: (...) -> dict[str, Candidate]
"""Return the resolved dependency map."""
if not HAS_RESOLVELIB:
Expand All @@ -1819,20 +1778,59 @@ def _resolve_depenency_map(
elif not req.specifier.contains(RESOLVELIB_VERSION.vstring):
raise AnsibleError(f"ansible-galaxy requires {req.name}{req.specifier}")

collection_dep_resolver = build_collection_dependency_resolver(
galaxy_apis=galaxy_apis,
concrete_artifacts_manager=concrete_artifacts_manager,
user_requirements=requested_requirements,
preferred_candidates=preferred_candidates,
with_deps=not no_deps,
with_pre_releases=allow_pre_release,
upgrade=upgrade,
include_signatures=include_signatures,
offline=offline,
)
try:
return collection_dep_resolver.resolve(
requested_requirements,
collection_dep_resolver = build_collection_dependency_resolver(
galaxy_apis=galaxy_apis,
concrete_artifacts_manager=concrete_artifacts_manager,
user_requirements=requested_requirements,
preferred_candidates=preferred_candidates,
with_deps=not no_deps,
with_pre_releases=allow_pre_release,
upgrade=upgrade,
include_signatures=include_signatures,
offline=offline,
force=force,
)
except TypeError as e:
# malformed requirements
raise AnsibleError(f"{e}") from e

# This is arguably a bug, but this short-circuit is here for backwards compatibility...
# Context: when the dependency resolver runs, tarfile and url requirements are always
# reinstalled because only galaxy type uses the preferred candidates in _find_matches.
# However, the dependency resolver doesn't always run for url/tarfile types,
# only if a collection by that name + version isn't installed at all or if another
# collection is (potentially) missing.
# TL;DR: unrelated requirements (which may not even end up installed) cause all tarfile/url
# requirements to be reinstalled, whereas without this they would always be reinstalled.
if (
# Exclude all conditions which require dependency resolution
preferred_candidates is not None # None for --force-with-deps/download
and not force
and not upgrade
and requested_requirements == collection_dep_resolver.provider.unrolled_user_requirements # no ephemeral deps
):
for req in collection_dep_resolver.provider.unrolled_user_requirements:
installed = False
for exs in collection_dep_resolver.provider._preferred_candidates:
if req.fqcn == exs.fqcn and not meets_requirements(exs.ver, req.ver):
break
elif req.fqcn == exs.fqcn and meets_requirements(exs.ver, req.ver):
installed = True
else:
if installed:
continue
satisfied = False
break
else:
satisfied = True

if satisfied:
return {}

try:
dependency_mapping = collection_dep_resolver.resolve(
collection_dep_resolver.provider.unrolled_user_requirements,
max_rounds=2000000, # NOTE: same constant pip uses
).mapping
except CollectionDependencyResolutionImpossible as dep_exc:
Expand Down Expand Up @@ -1884,3 +1882,12 @@ def _resolve_depenency_map(
raise AnsibleError('\n'.join(error_msg_lines)) from dep_exc
except ValueError as exc:
raise AnsibleError(to_native(exc)) from exc

# NOTE: Return {} for a lack of unsatisfied collections.
# All callers pass installed collections as preferred candidates, and don't have access to the finalized preferred candidates.
if dependency_mapping and all(
candidate in collection_dep_resolver.provider._preferred_candidates
for candidate in dependency_mapping.values()
):
return {}
return dependency_mapping
2 changes: 2 additions & 0 deletions lib/ansible/galaxy/dependency_resolution/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def build_collection_dependency_resolver(
upgrade=False, # type: bool
include_signatures=True, # type: bool
offline=False, # type: bool
force=False, # type: bool
): # type: (...) -> CollectionDependencyResolver
"""Return a collection dependency resolver.

Expand All @@ -50,6 +51,7 @@ def build_collection_dependency_resolver(
with_pre_releases=with_pre_releases,
upgrade=upgrade,
include_signatures=include_signatures,
force=force,
),
CollectionDependencyReporter(),
)
28 changes: 27 additions & 1 deletion lib/ansible/galaxy/dependency_resolution/dataclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@
from ansible.galaxy.collection import HAS_PACKAGING, PkgReq
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
from ansible.module_utils.six import string_types
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
from ansible.utils.version import SemanticVersion


_ALLOW_CONCRETE_POINTER_IN_SOURCE = False # NOTE: This is a feature flag
Expand Down Expand Up @@ -428,11 +430,13 @@ def from_requirement_dict(cls, collection_req, art_mgr, validate_signature_optio
if req_type not in {'galaxy', 'subdirs'} and req_version == '*':
req_version = art_mgr.get_direct_collection_version(tmp_inst_req)

return cls(
req = cls(
req_name, req_version,
req_source, req_type,
req_signature_sources,
)
req.assert_version()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Making assertions when an object holding invalid data is already initialized is an anti-pattern. This should be checked in or before init...

return req

def __repr__(self):
return (
Expand Down Expand Up @@ -551,6 +555,28 @@ def is_online_index_pointer(self):
def source_info(self):
return self._source_info

def assert_version(self):
if not self.is_scm:
version_req = "A SemVer-compliant version or '*' is required. See https://semver.org to learn how to compose it correctly."
else:
version_req = "A string version is required."

name = self.fqcn if self.fqcn is not None else self
version_err = f"Invalid version found for the collection '{name}': {self.ver} ({type(self.ver)}). {version_req}"
if not isinstance(self.ver, string_types):
raise TypeError(version_err)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about a less invasive approach? #81694


# FIXME: validate individual requirements are valid semver
req_qualifiers = [">", "<", "!", "="]
if any(qualifier in self.ver for qualifier in req_qualifiers):
return

if not (self.is_scm or self.ver == '*'):
try:
SemanticVersion(self.ver)
except ValueError as ex:
raise TypeError(version_err) from ex


RequirementNamedTuple = namedtuple('Requirement', ('fqcn', 'ver', 'src', 'type', 'signature_sources')) # type: ignore[name-match]

Expand Down
Loading