diff --git a/conda_lock/conda_lock.py b/conda_lock/conda_lock.py index 48b1d7171..2d9ad39a3 100644 --- a/conda_lock/conda_lock.py +++ b/conda_lock/conda_lock.py @@ -69,7 +69,7 @@ TimeMeta, UpdateSpecification, ) -from conda_lock.lookup import set_lookup_location +from conda_lock.lookup import DEFAULT_MAPPING_URL from conda_lock.models.channel import Channel from conda_lock.models.lock_spec import LockSpecification from conda_lock.models.pip_repository import PipRepository @@ -270,6 +270,7 @@ def make_lock_files( # noqa: C901 metadata_yamls: Sequence[pathlib.Path] = (), with_cuda: Optional[str] = None, strip_auth: bool = False, + mapping_url: str, ) -> None: """ Generate a lock file from the src files provided @@ -324,6 +325,7 @@ def make_lock_files( # noqa: C901 channel_overrides=channel_overrides, platform_overrides=platform_overrides, required_categories=required_categories if filter_categories else None, + mapping_url=mapping_url, ) # Load existing lockfile if it exists @@ -403,6 +405,7 @@ def make_lock_files( # noqa: C901 metadata_yamls=metadata_yamls, strip_auth=strip_auth, virtual_package_repo=virtual_package_repo, + mapping_url=mapping_url, ) if not original_lock_content: @@ -733,6 +736,7 @@ def _solve_for_arch( virtual_package_repo: FakeRepoData, update_spec: Optional[UpdateSpecification] = None, strip_auth: bool = False, + mapping_url: str, ) -> List[LockedDependency]: """ Solve specification for a single platform @@ -758,13 +762,14 @@ def _solve_for_arch( update=update_spec.update, platform=platform, channels=channels, + mapping_url=mapping_url, ) if requested_deps_by_name["pip"]: if "python" not in conda_deps: raise ValueError("Got pip specs without Python") pip_deps = solve_pypi( - requested_deps_by_name["pip"], + pip_specs=requested_deps_by_name["pip"], use_latest=update_spec.update, pip_locked={ dep.name: dep for dep in update_spec.locked if dep.manager == "pip" @@ -782,6 +787,7 @@ def _solve_for_arch( pip_repositories=pip_repositories, allow_pypi_requests=spec.allow_pypi_requests, strip_auth=strip_auth, + mapping_url=mapping_url, ) else: pip_deps = {} @@ -828,6 +834,7 @@ def create_lockfile_from_spec( metadata_yamls: Sequence[pathlib.Path] = (), strip_auth: bool = False, virtual_package_repo: FakeRepoData, + mapping_url: str, ) -> Lockfile: """ Solve or update specification @@ -847,6 +854,7 @@ def create_lockfile_from_spec( virtual_package_repo=virtual_package_repo, update_spec=update_spec, strip_auth=strip_auth, + mapping_url=mapping_url, ) for dep in deps: @@ -1132,6 +1140,7 @@ def run_lock( metadata_choices: AbstractSet[MetadataOption] = frozenset(), metadata_yamls: Sequence[pathlib.Path] = (), strip_auth: bool = False, + mapping_url: str, ) -> None: if len(environment_files) == 0: environment_files = handle_no_specified_source_files(lockfile_path) @@ -1158,6 +1167,7 @@ def run_lock( metadata_choices=metadata_choices, metadata_yamls=metadata_yamls, strip_auth=strip_auth, + mapping_url=mapping_url, ) @@ -1365,8 +1375,11 @@ def lock( logging.basicConfig(level=log_level) # Set Pypi <--> Conda lookup file location - if pypi_to_conda_lookup_file: - set_lookup_location(pypi_to_conda_lookup_file) + mapping_url = ( + DEFAULT_MAPPING_URL + if pypi_to_conda_lookup_file is None + else pypi_to_conda_lookup_file + ) metadata_enum_choices = set(MetadataOption(md) for md in metadata_choices) @@ -1408,6 +1421,7 @@ def lock( metadata_choices=metadata_enum_choices, metadata_yamls=[pathlib.Path(path) for path in metadata_yamls], strip_auth=strip_auth, + mapping_url=mapping_url, ) if strip_auth: with tempfile.TemporaryDirectory() as tempdir: diff --git a/conda_lock/conda_solver.py b/conda_lock/conda_solver.py index a6468cfb3..165280d44 100644 --- a/conda_lock/conda_solver.py +++ b/conda_lock/conda_solver.py @@ -115,6 +115,7 @@ def solve_conda( update: List[str], platform: str, channels: List[Channel], + mapping_url: str, ) -> Dict[str, LockedDependency]: """ Solve (or update a previous solution of) conda specs for the given platform @@ -205,6 +206,7 @@ def normalize_url(url: str) -> str: apply_categories( requested={k: v for k, v in specs.items() if v.manager == "conda"}, planned=planned, + mapping_url=mapping_url, ) return planned diff --git a/conda_lock/lockfile/__init__.py b/conda_lock/lockfile/__init__.py index 53698cb7b..92c0cddef 100644 --- a/conda_lock/lockfile/__init__.py +++ b/conda_lock/lockfile/__init__.py @@ -66,10 +66,12 @@ def _truncate_main_category( def apply_categories( + *, requested: Dict[str, Dependency], planned: Mapping[str, Union[List[LockedDependency], LockedDependency]], categories: Sequence[str] = ("main", "dev"), convert_to_pip_names: bool = False, + mapping_url: str, ) -> None: """map each package onto the root request the with the highest-priority category""" @@ -98,14 +100,15 @@ def extract_planned_items( return [ item for item in planned_items - if dep_name(item.manager, item.name) not in deps + if dep_name(manager=item.manager, dep=item.name, mapping_url=mapping_url) + not in deps ] - def dep_name(manager: str, dep: str) -> str: + def dep_name(*, manager: str, dep: str, mapping_url: str) -> str: # If we operate on lists of pip names and this is a conda dependency, we # convert the name to a pip name. if convert_to_pip_names and manager == "conda": - return conda_name_to_pypi_name(dep) + return conda_name_to_pypi_name(dep, mapping_url=mapping_url) return dep for name, request in requested.items(): @@ -123,7 +126,9 @@ def dep_name(manager: str, dep: str) -> str: for planned_item in planned_items: todo.extend( - dep_name(planned_item.manager, dep) + dep_name( + manager=planned_item.manager, dep=dep, mapping_url=mapping_url + ) for dep in planned_item.dependencies # exclude virtual packages if not (dep in deps or dep.startswith("__")) diff --git a/conda_lock/lookup.py b/conda_lock/lookup.py index f2a8801c1..7ec96ec5a 100644 --- a/conda_lock/lookup.py +++ b/conda_lock/lookup.py @@ -2,7 +2,7 @@ import logging import time -from functools import cached_property +from functools import lru_cache from pathlib import Path from typing import Dict @@ -11,12 +11,15 @@ from filelock import FileLock, Timeout from packaging.utils import NormalizedName, canonicalize_name +from packaging.utils import canonicalize_name as canonicalize_pypi_name from platformdirs import user_cache_path from typing_extensions import TypedDict logger = logging.getLogger(__name__) +DEFAULT_MAPPING_URL = "https://raw.githubusercontent.com/regro/cf-graph-countyfair/master/mappings/pypi/grayskull_pypi_mapping.yaml" + class MappingEntry(TypedDict): conda_name: str @@ -25,90 +28,74 @@ class MappingEntry(TypedDict): pypi_name: NormalizedName -class _LookupLoader: - _mapping_url: str = "https://raw.githubusercontent.com/regro/cf-graph-countyfair/master/mappings/pypi/grayskull_pypi_mapping.yaml" - - @property - def mapping_url(self) -> str: - return self._mapping_url - - @mapping_url.setter - def mapping_url(self, value: str) -> None: - if self._mapping_url != value: - self._mapping_url = value - # Invalidate cache - try: - del self.pypi_lookup - except AttributeError: - pass - try: - del self.conda_lookup - except AttributeError: - pass - - @cached_property - def pypi_lookup(self) -> Dict[NormalizedName, MappingEntry]: - url = self.mapping_url - if url.startswith("http://") or url.startswith("https://"): - content = cached_download_file(url) +@lru_cache(maxsize=None) +def _get_pypi_lookup(mapping_url: str) -> Dict[NormalizedName, MappingEntry]: + url = mapping_url + if url.startswith("http://") or url.startswith("https://"): + content = cached_download_file(url) + else: + if url.startswith("file://"): + path = url[len("file://") :] else: - if url.startswith("file://"): - path = url[len("file://") :] - else: - path = url - content = Path(path).read_bytes() - logger.debug("Parsing PyPI mapping") - load_start = time.monotonic() - yaml = ruamel.yaml.YAML(typ="safe") - lookup = yaml.load(content) - load_duration = time.monotonic() - load_start - logger.debug(f"Loaded {len(lookup)} entries in {load_duration:.2f}s") - # lowercase and kebabcase the pypi names - assert lookup is not None - lookup = {canonicalize_name(k): v for k, v in lookup.items()} - for v in lookup.values(): - v["pypi_name"] = canonicalize_name(v["pypi_name"]) - return lookup - - @cached_property - def conda_lookup(self) -> Dict[str, MappingEntry]: - return {record["conda_name"]: record for record in self.pypi_lookup.values()} - - -LOOKUP_OBJECT = _LookupLoader() - - -def get_forward_lookup() -> Dict[NormalizedName, MappingEntry]: - global LOOKUP_OBJECT - return LOOKUP_OBJECT.pypi_lookup - - -def get_lookup() -> Dict[str, MappingEntry]: - """ - Reverse grayskull name mapping to map conda names onto PyPI + path = url + content = Path(path).read_bytes() + logger.debug("Parsing PyPI mapping") + load_start = time.monotonic() + yaml = ruamel.yaml.YAML(typ="safe") + lookup = yaml.load(content) + load_duration = time.monotonic() - load_start + logger.debug(f"Loaded {len(lookup)} entries in {load_duration:.2f}s") + # lowercase and kebabcase the pypi names + assert lookup is not None + lookup = {canonicalize_name(k): v for k, v in lookup.items()} + for v in lookup.values(): + v["pypi_name"] = canonicalize_name(v["pypi_name"]) + return lookup + + +def pypi_name_to_conda_name(name: str, mapping_url: str) -> str: + """Convert a PyPI package name to a conda package name. + + >>> from conda_lock.lookup import DEFAULT_MAPPING_URL + >>> pypi_name_to_conda_name("build", mapping_url=DEFAULT_MAPPING_URL) + 'python-build' + + >>> pypi_name_to_conda_name("zpfqzvrj", mapping_url=DEFAULT_MAPPING_URL) + 'zpfqzvrj' """ - global LOOKUP_OBJECT - return LOOKUP_OBJECT.conda_lookup + cname = canonicalize_pypi_name(name) + if cname in _get_pypi_lookup(mapping_url): + lookup = _get_pypi_lookup(mapping_url)[cname] + res = lookup.get("conda_name") or lookup.get("conda_forge") + if res is not None: + return res + else: + logging.warning( + f"Could not find conda name for {cname}. Assuming identity." + ) + return cname + else: + return cname -def set_lookup_location(lookup_url: str) -> None: - global LOOKUP_OBJECT - LOOKUP_OBJECT.mapping_url = lookup_url +@lru_cache(maxsize=None) +def _get_conda_lookup(mapping_url: str) -> Dict[str, MappingEntry]: + """ + Reverse grayskull name mapping to map conda names onto PyPI + """ + return { + record["conda_name"]: record + for record in _get_pypi_lookup(mapping_url).values() + } -def conda_name_to_pypi_name(name: str) -> NormalizedName: +def conda_name_to_pypi_name(name: str, mapping_url: str) -> NormalizedName: """return the pypi name for a conda package""" - lookup = get_lookup() + lookup = _get_conda_lookup(mapping_url=mapping_url) cname = canonicalize_name(name) return lookup.get(cname, {"pypi_name": cname})["pypi_name"] -def pypi_name_to_conda_name(name: str) -> str: - """return the conda name for a pypi package""" - cname = canonicalize_name(name) - return get_forward_lookup().get(cname, {"conda_name": cname})["conda_name"] - - def cached_download_file(url: str) -> bytes: """Download a file and cache it in the user cache directory. @@ -138,26 +125,25 @@ def cached_download_file(url: str) -> bytes: destination_etag = destination_mapping.with_suffix(".etag") destination_lock = destination_mapping.with_suffix(".lock") - # Return the contents immediately if the file is fresh - try: - mtime = destination_mapping.stat().st_mtime - age = current_time - mtime - if age < DONT_CHECK_IF_NEWER_THAN_SECONDS: - contents = destination_mapping.read_bytes() - logger.debug( - f"Using cached mapping {destination_mapping} without " - f"checking for updates" - ) - return contents - except FileNotFoundError: - pass - # Wait for any other process to finish downloading the file. # Use the ETag to avoid downloading the file if it hasn't changed. # Otherwise, download the file and cache the contents and ETag. while True: try: with FileLock(destination_lock, timeout=5): + # Return the contents immediately if the file is fresh + try: + mtime = destination_mapping.stat().st_mtime + age = current_time - mtime + if age < DONT_CHECK_IF_NEWER_THAN_SECONDS: + contents = destination_mapping.read_bytes() + logger.debug( + f"Using cached mapping {destination_mapping} without " + f"checking for updates" + ) + return contents + except FileNotFoundError: + pass # Get the ETag from the last download, if it exists if destination_mapping.exists() and destination_etag.exists(): logger.debug(f"Old ETag found at {destination_etag}") diff --git a/conda_lock/pypi_solver.py b/conda_lock/pypi_solver.py index fedddf952..bf5bd4c09 100644 --- a/conda_lock/pypi_solver.py +++ b/conda_lock/pypi_solver.py @@ -433,6 +433,7 @@ def _compute_hash(link: Link, lock_spec_hash: Optional[str]) -> HashModel: def solve_pypi( + *, pip_specs: Dict[str, lock_spec.Dependency], use_latest: List[str], pip_locked: Dict[str, LockedDependency], @@ -444,6 +445,7 @@ def solve_pypi( allow_pypi_requests: bool = True, verbose: bool = False, strip_auth: bool = False, + mapping_url: str, ) -> Dict[str, LockedDependency]: """ Solve pip dependencies for the given platform @@ -503,7 +505,9 @@ def solve_pypi( if locked_dep.manager != "pip" and "python" not in locked_dep.dependencies: continue try: - pypi_name = conda_name_to_pypi_name(locked_dep.name).lower() + pypi_name = conda_name_to_pypi_name( + locked_dep.name, mapping_url=mapping_url + ).lower() except KeyError: continue # Prefer the Python package when its name collides with the Conda package @@ -572,13 +576,18 @@ def solve_pypi( # is essentially a dictionary of: # - pip package name -> list of LockedDependency that are needed for this package for conda_name, locked_dep in conda_locked.items(): - pypi_name = conda_name_to_pypi_name(conda_name) + pypi_name = conda_name_to_pypi_name(conda_name, mapping_url=mapping_url) if pypi_name in planned: planned[pypi_name].append(locked_dep) else: planned[pypi_name] = [locked_dep] - apply_categories(requested=pip_specs, planned=planned, convert_to_pip_names=True) + apply_categories( + requested=pip_specs, + planned=planned, + convert_to_pip_names=True, + mapping_url=mapping_url, + ) return {dep.name: dep for dep in requirements} diff --git a/conda_lock/src_parser/__init__.py b/conda_lock/src_parser/__init__.py index 7fa4f3b98..f659d678b 100644 --- a/conda_lock/src_parser/__init__.py +++ b/conda_lock/src_parser/__init__.py @@ -48,8 +48,7 @@ def _parse_platforms_from_srcs(src_files: List[pathlib.Path]) -> List[str]: def _parse_source_files( - src_files: List[pathlib.Path], - platforms: List[str], + src_files: List[pathlib.Path], *, platforms: List[str], mapping_url: str ) -> List[LockSpecification]: """ Parse a sequence of dependency specifications from source files @@ -64,11 +63,19 @@ def _parse_source_files( desired_envs: List[LockSpecification] = [] for src_file in src_files: if src_file.name == "meta.yaml": - desired_envs.append(parse_meta_yaml_file(src_file, platforms)) + desired_envs.append(parse_meta_yaml_file(src_file, platforms=platforms)) elif src_file.name == "pyproject.toml": - desired_envs.append(parse_pyproject_toml(src_file, platforms)) + desired_envs.append( + parse_pyproject_toml( + src_file, platforms=platforms, mapping_url=mapping_url + ) + ) else: - desired_envs.append(parse_environment_file(src_file, platforms)) + desired_envs.append( + parse_environment_file( + src_file, platforms=platforms, mapping_url=mapping_url + ) + ) return desired_envs @@ -79,6 +86,7 @@ def make_lock_spec( pip_repository_overrides: Optional[Sequence[str]] = None, platform_overrides: Optional[Sequence[str]] = None, required_categories: Optional[AbstractSet[str]] = None, + mapping_url: str, ) -> LockSpecification: """Generate the lockfile specs from a set of input src_files. If required_categories is set filter out specs that do not match those""" platforms = ( @@ -87,7 +95,9 @@ def make_lock_spec( else _parse_platforms_from_srcs(src_files) ) or DEFAULT_PLATFORMS - lock_specs = _parse_source_files(src_files, platforms) + lock_specs = _parse_source_files( + src_files, platforms=platforms, mapping_url=mapping_url + ) aggregated_lock_spec = aggregate_lock_specs(lock_specs, platforms) diff --git a/conda_lock/src_parser/environment_yaml.py b/conda_lock/src_parser/environment_yaml.py index b397032f1..43f9b2a25 100644 --- a/conda_lock/src_parser/environment_yaml.py +++ b/conda_lock/src_parser/environment_yaml.py @@ -28,8 +28,10 @@ def parse_conda_requirement(req: str) -> Tuple[str, str]: def _parse_environment_file_for_platform( content: str, + *, category: str, platform: str, + mapping_url: str, ) -> List[Dependency]: """ Parse dependencies from a conda environment specification for an @@ -71,7 +73,11 @@ def _parse_environment_file_for_platform( continue dependency = parse_python_requirement( - spec, manager="pip", category=category, normalize_name=False + spec, + manager="pip", + category=category, + normalize_name=False, + mapping_url=mapping_url, ) if evaluate_marker(dependency.markers, platform): # The above condition will skip adding the dependency if a @@ -80,7 +86,9 @@ def _parse_environment_file_for_platform( dependencies.append(dependency) # ensure pip is in target env - dependencies.append(parse_python_requirement("pip", manager="conda")) + dependencies.append( + parse_python_requirement("pip", manager="conda", mapping_url=mapping_url) + ) return dependencies @@ -102,6 +110,7 @@ def parse_platforms_from_env_file(environment_file: pathlib.Path) -> List[str]: def parse_environment_file( environment_file: pathlib.Path, platforms: List[str], + mapping_url: str, ) -> LockSpecification: """Parse a simple environment-yaml file for dependencies assuming the target platforms. @@ -131,7 +140,9 @@ def parse_environment_file( # Parse with selectors for each target platform dep_map = { - platform: _parse_environment_file_for_platform(content, category, platform) + platform: _parse_environment_file_for_platform( + content, category=category, platform=platform, mapping_url=mapping_url + ) for platform in platforms } diff --git a/conda_lock/src_parser/meta_yaml.py b/conda_lock/src_parser/meta_yaml.py index fc6acac6e..1570e9a2f 100644 --- a/conda_lock/src_parser/meta_yaml.py +++ b/conda_lock/src_parser/meta_yaml.py @@ -85,6 +85,7 @@ def _return_value(self, value=None): # type: ignore def parse_meta_yaml_file( meta_yaml_file: pathlib.Path, + *, platforms: List[str], ) -> LockSpecification: """Parse a simple meta-yaml file for dependencies assuming the target platforms. diff --git a/conda_lock/src_parser/pyproject_toml.py b/conda_lock/src_parser/pyproject_toml.py index c170958a7..b398b591b 100644 --- a/conda_lock/src_parser/pyproject_toml.py +++ b/conda_lock/src_parser/pyproject_toml.py @@ -30,7 +30,7 @@ from conda_lock.common import get_in from conda_lock.interfaces.vendored_grayskull import encode_poetry_version -from conda_lock.lookup import get_forward_lookup as get_lookup +from conda_lock.lookup import pypi_name_to_conda_name from conda_lock.models.lock_spec import ( Dependency, LockSpecification, @@ -70,22 +70,6 @@ ) -def normalize_pypi_name(name: str) -> str: - cname = canonicalize_pypi_name(name) - if cname in get_lookup(): - lookup = get_lookup()[cname] - res = lookup.get("conda_name") or lookup.get("conda_forge") - if res is not None: - return res - else: - logging.warning( - f"Could not find conda name for {cname}. Assuming identity." - ) - return cname - else: - return cname - - def poetry_version_to_conda_version(version_string: Optional[str]) -> Optional[str]: """Convert a Poetry-style version string to a Conda-compatible version string. @@ -191,6 +175,7 @@ def parse_poetry_pyproject_toml( path: pathlib.Path, platforms: List[str], contents: Mapping[str, Any], + mapping_url: str, ) -> LockSpecification: """ Parse dependencies from a poetry pyproject.toml file @@ -292,7 +277,7 @@ def parse_poetry_pyproject_toml( ) if manager == "conda": - name = normalize_pypi_name(depname) + name = pypi_name_to_conda_name(depname, mapping_url=mapping_url) version = poetry_version_to_conda_version(poetry_version_spec) else: name = depname @@ -456,39 +441,49 @@ def unpack_git_url(url: str) -> Tuple[str, Optional[str]]: def parse_python_requirement( requirement: str, + *, + mapping_url: str, manager: Literal["conda", "pip"] = "conda", category: str = "main", normalize_name: bool = True, ) -> Dependency: """Parse a requirements.txt like requirement to a conda spec. - >>> parse_python_requirement("my_package") # doctest: +NORMALIZE_WHITESPACE + >>> from conda_lock.lookup import DEFAULT_MAPPING_URL + >>> parse_python_requirement( + ... "my_package", + ... mapping_url=DEFAULT_MAPPING_URL, + ... ) # doctest: +NORMALIZE_WHITESPACE VersionedDependency(name='my-package', manager='conda', category='main', extras=[], markers=None, version='*', build=None, conda_channel=None, hash=None) >>> parse_python_requirement( - ... "My_Package[extra]==1.23" + ... "My_Package[extra]==1.23", + ... mapping_url=DEFAULT_MAPPING_URL, ... ) # doctest: +NORMALIZE_WHITESPACE VersionedDependency(name='my-package', manager='conda', category='main', extras=['extra'], markers=None, version='==1.23', build=None, conda_channel=None, hash=None) >>> parse_python_requirement( - ... "conda-lock @ git+https://github.com/conda/conda-lock.git@v2.4.1" + ... "conda-lock @ git+https://github.com/conda/conda-lock.git@v2.4.1", + ... mapping_url=DEFAULT_MAPPING_URL, ... ) # doctest: +NORMALIZE_WHITESPACE VCSDependency(name='conda-lock', manager='conda', category='main', extras=[], markers=None, source='https://github.com/conda/conda-lock.git', vcs='git', rev='v2.4.1') >>> parse_python_requirement( - ... "some-package @ https://some-repository.org/some-package-1.2.3.tar.gz" + ... "some-package @ https://some-repository.org/some-package-1.2.3.tar.gz", + ... mapping_url=DEFAULT_MAPPING_URL, ... ) # doctest: +NORMALIZE_WHITESPACE URLDependency(name='some-package', manager='conda', category='main', extras=[], markers=None, url='https://some-repository.org/some-package-1.2.3.tar.gz', hashes=['']) >>> parse_python_requirement( - ... "some-package ; sys_platform == 'darwin'" + ... "some-package ; sys_platform == 'darwin'", + ... mapping_url=DEFAULT_MAPPING_URL, ... ) # doctest: +NORMALIZE_WHITESPACE VersionedDependency(name='some-package', manager='conda', category='main', extras=[], markers="sys_platform == 'darwin'", version='*', build=None, @@ -506,7 +501,7 @@ def parse_python_requirement( conda_version = ",".join(sorted(conda_version.split(","))) if normalize_name: - conda_dep_name = normalize_pypi_name(name) + conda_dep_name = pypi_name_to_conda_name(name, mapping_url=mapping_url) else: conda_dep_name = name extras = list(parsed_req.extras) @@ -548,11 +543,13 @@ def parse_python_requirement( def parse_requirements_pyproject_toml( pyproject_toml_path: pathlib.Path, + *, platforms: List[str], contents: Mapping[str, Any], prefix: Sequence[str], main_tag: str, optional_tag: str, + mapping_url: str, dev_tags: AbstractSet[str] = {"dev", "test"}, ) -> LockSpecification: """ @@ -577,7 +574,10 @@ def parse_requirements_pyproject_toml( for dep in get_in(list(path), contents, []): dependencies.append( parse_python_requirement( - dep, manager=default_non_conda_source, category=category + dep, + manager=default_non_conda_source, + category=category, + mapping_url=mapping_url, ) ) @@ -590,6 +590,7 @@ def parse_pdm_pyproject_toml( path: pathlib.Path, platforms: List[str], contents: Mapping[str, Any], + mapping_url: str, ) -> LockSpecification: """ PDM support. First, a regular PEP621 pass; then, add all dependencies listed @@ -597,8 +598,9 @@ def parse_pdm_pyproject_toml( """ res = parse_requirements_pyproject_toml( path, - platforms, - contents, + platforms=platforms, + contents=contents, + mapping_url=mapping_url, prefix=("project",), main_tag="dependencies", optional_tag="optional-dependencies", @@ -614,7 +616,10 @@ def parse_pdm_pyproject_toml( dev_reqs.extend( [ parse_python_requirement( - dep, manager=default_non_conda_source, category="dev" + dep, + manager=default_non_conda_source, + category="dev", + mapping_url=mapping_url, ) for dep in deps ] @@ -636,7 +641,9 @@ def parse_platforms_from_pyproject_toml( def parse_pyproject_toml( pyproject_toml: pathlib.Path, + *, platforms: List[str], + mapping_url: str, ) -> LockSpecification: with pyproject_toml.open("rb") as fp: contents = toml_load(fp) @@ -696,4 +703,6 @@ def parse_pyproject_toml( "Could not detect build-system in pyproject.toml. Assuming poetry" ) - return parse(pyproject_toml, platforms, contents) + return parse( + pyproject_toml, platforms=platforms, contents=contents, mapping_url=mapping_url + ) diff --git a/tests/test_conda_lock.py b/tests/test_conda_lock.py index 45975519a..287537a84 100644 --- a/tests/test_conda_lock.py +++ b/tests/test_conda_lock.py @@ -61,7 +61,7 @@ LockedDependency, MetadataOption, ) -from conda_lock.lookup import _LookupLoader +from conda_lock.lookup import DEFAULT_MAPPING_URL, conda_name_to_pypi_name from conda_lock.models.channel import Channel from conda_lock.models.lock_spec import Dependency, VCSDependency, VersionedDependency from conda_lock.models.pip_repository import PipRepository @@ -391,6 +391,7 @@ def test_lock_poetry_ibis( platforms=["linux-64"], extras={"test", "dev", "docs"}, filter_categories=True, + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file(pyproject.parent / DEFAULT_LOCKFILE_NAME) @@ -406,7 +407,9 @@ def test_lock_poetry_ibis( def test_parse_environment_file(gdal_environment: Path): - res = parse_environment_file(gdal_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + gdal_environment, platforms=DEFAULT_PLATFORMS, mapping_url=DEFAULT_MAPPING_URL + ) assert all( x in res.dependencies[plat] for x in [ @@ -438,7 +441,9 @@ def test_parse_environment_file(gdal_environment: Path): def test_parse_environment_file_with_pip(pip_environment: Path): - res = parse_environment_file(pip_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + pip_environment, platforms=DEFAULT_PLATFORMS, mapping_url=DEFAULT_MAPPING_URL + ) for plat in DEFAULT_PLATFORMS: assert [dep for dep in res.dependencies[plat] if dep.manager == "pip"] == [ VersionedDependency( @@ -452,7 +457,9 @@ def test_parse_environment_file_with_pip(pip_environment: Path): def test_parse_environment_file_with_git(git_environment: Path): - res = parse_environment_file(git_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + git_environment, platforms=DEFAULT_PLATFORMS, mapping_url=DEFAULT_MAPPING_URL + ) for plat in DEFAULT_PLATFORMS: assert [dep for dep in res.dependencies[plat] if dep.manager == "pip"] == [ VCSDependency( @@ -467,7 +474,11 @@ def test_parse_environment_file_with_git(git_environment: Path): def test_parse_environment_file_with_git_tag(git_tag_environment: Path): - res = parse_environment_file(git_tag_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + git_tag_environment, + platforms=DEFAULT_PLATFORMS, + mapping_url=DEFAULT_MAPPING_URL, + ) for plat in DEFAULT_PLATFORMS: assert [dep for dep in res.dependencies[plat] if dep.manager == "pip"] == [ VCSDependency( @@ -483,13 +494,19 @@ def test_parse_environment_file_with_git_tag(git_tag_environment: Path): def test_parse_env_file_with_no_defaults(nodefaults_environment: Path): - res = parse_environment_file(nodefaults_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + nodefaults_environment, + platforms=DEFAULT_PLATFORMS, + mapping_url=DEFAULT_MAPPING_URL, + ) assert res.channels == [Channel.from_string("conda-forge")] def test_parse_env_file_with_filters_no_args(filter_conda_environment: Path): platforms = parse_platforms_from_env_file(filter_conda_environment) - res = parse_environment_file(filter_conda_environment, platforms) + res = parse_environment_file( + filter_conda_environment, platforms=platforms, mapping_url=DEFAULT_MAPPING_URL + ) assert all(x in res.platforms for x in ["osx-arm64", "osx-64", "linux-64"]) assert res.channels == [Channel.from_string("conda-forge")] @@ -534,7 +551,11 @@ def test_parse_env_file_with_filters_no_args(filter_conda_environment: Path): def test_parse_env_file_with_filters_defaults(filter_conda_environment: Path): - res = parse_environment_file(filter_conda_environment, DEFAULT_PLATFORMS) + res = parse_environment_file( + filter_conda_environment, + platforms=DEFAULT_PLATFORMS, + mapping_url=DEFAULT_MAPPING_URL, + ) assert all(x in res.platforms for x in DEFAULT_PLATFORMS) assert res.channels == [Channel.from_string("conda-forge")] @@ -577,7 +598,7 @@ def test_parse_platforms_from_multi_sources(multi_source_env): def test_choose_wheel() -> None: solution = solve_pypi( - { + pip_specs={ "fastavro": VersionedDependency( name="fastavro", manager="pip", @@ -605,6 +626,7 @@ def test_choose_wheel() -> None: }, python_version="3.9.7", platform="linux-64", + mapping_url=DEFAULT_MAPPING_URL, ) assert len(solution) == 1 assert solution["fastavro"].categories == {"main"} @@ -689,7 +711,7 @@ def test_parse_pip_requirement( def test_parse_meta_yaml_file(meta_yaml_environment: Path): platforms = ["linux-64", "osx-64"] - res = parse_meta_yaml_file(meta_yaml_environment, platforms) + res = parse_meta_yaml_file(meta_yaml_environment, platforms=platforms) for plat in platforms: specs = {dep.name: dep for dep in res.dependencies[plat]} assert all(x in specs for x in ["python", "numpy"]) @@ -701,7 +723,9 @@ def test_parse_meta_yaml_file(meta_yaml_environment: Path): def test_parse_poetry(poetry_pyproject_toml: Path): - res = parse_pyproject_toml(poetry_pyproject_toml, ["linux-64"]) + res = parse_pyproject_toml( + poetry_pyproject_toml, platforms=["linux-64"], mapping_url=DEFAULT_MAPPING_URL + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -723,7 +747,11 @@ def test_parse_poetry(poetry_pyproject_toml: Path): def test_parse_poetry_default_pip(poetry_pyproject_toml_default_pip: Path): - res = parse_pyproject_toml(poetry_pyproject_toml_default_pip, ["linux-64"]) + res = parse_pyproject_toml( + poetry_pyproject_toml_default_pip, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -743,7 +771,11 @@ def test_parse_poetry_default_pip(poetry_pyproject_toml_default_pip: Path): def test_parse_poetry_skip_non_conda_lock( poetry_pyproject_toml_skip_non_conda_lock: Path, ): - res = parse_pyproject_toml(poetry_pyproject_toml_skip_non_conda_lock, ["linux-64"]) + res = parse_pyproject_toml( + poetry_pyproject_toml_skip_non_conda_lock, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -760,7 +792,11 @@ def test_parse_poetry_skip_non_conda_lock( def test_parse_poetry_git(poetry_pyproject_toml_git: Path): - res = parse_pyproject_toml(poetry_pyproject_toml_git, ["linux-64"]) + res = parse_pyproject_toml( + poetry_pyproject_toml_git, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = {dep.name: dep for dep in res.dependencies["linux-64"]} @@ -771,7 +807,11 @@ def test_parse_poetry_git(poetry_pyproject_toml_git: Path): def test_parse_poetry_no_pypi(poetry_pyproject_toml_no_pypi: Path): platforms = parse_platforms_from_pyproject_toml(poetry_pyproject_toml_no_pypi) - res = parse_pyproject_toml(poetry_pyproject_toml_no_pypi, platforms) + res = parse_pyproject_toml( + poetry_pyproject_toml_no_pypi, + platforms=platforms, + mapping_url=DEFAULT_MAPPING_URL, + ) assert res.allow_pypi_requests is False @@ -781,6 +821,7 @@ def test_poetry_no_pypi_multiple_pyprojects( ): spec = make_lock_spec( src_files=poetry_pyproject_toml_no_pypi_other_projects, + mapping_url=DEFAULT_MAPPING_URL, ) assert ( spec.allow_pypi_requests is True @@ -790,6 +831,7 @@ def test_poetry_no_pypi_multiple_pyprojects( *poetry_pyproject_toml_no_pypi_other_projects, poetry_pyproject_toml_no_pypi, ], + mapping_url=DEFAULT_MAPPING_URL, ) assert ( spec.allow_pypi_requests is False @@ -809,7 +851,9 @@ def contains_pypi(pool): def test_spec_poetry(poetry_pyproject_toml: Path): - spec = make_lock_spec(src_files=[poetry_pyproject_toml]) + spec = make_lock_spec( + src_files=[poetry_pyproject_toml], mapping_url=DEFAULT_MAPPING_URL + ) for plat in spec.platforms: deps = {d.name for d in spec.dependencies[plat]} assert "tomlkit" in deps @@ -819,6 +863,7 @@ def test_spec_poetry(poetry_pyproject_toml: Path): spec = make_lock_spec( src_files=[poetry_pyproject_toml], required_categories={"main", "dev"}, + mapping_url=DEFAULT_MAPPING_URL, ) for plat in spec.platforms: deps = {d.name for d in spec.dependencies[plat]} @@ -829,6 +874,7 @@ def test_spec_poetry(poetry_pyproject_toml: Path): spec = make_lock_spec( src_files=[poetry_pyproject_toml], required_categories={"main"}, + mapping_url=DEFAULT_MAPPING_URL, ) for plat in spec.platforms: deps = {d.name for d in spec.dependencies[plat]} @@ -838,7 +884,9 @@ def test_spec_poetry(poetry_pyproject_toml: Path): def test_parse_flit(flit_pyproject_toml: Path): - res = parse_pyproject_toml(flit_pyproject_toml, ["linux-64"]) + res = parse_pyproject_toml( + flit_pyproject_toml, platforms=["linux-64"], mapping_url=DEFAULT_MAPPING_URL + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -858,7 +906,11 @@ def test_parse_flit(flit_pyproject_toml: Path): def test_parse_flit_default_pip(flit_pyproject_toml_default_pip: Path): - res = parse_pyproject_toml(flit_pyproject_toml_default_pip, ["linux-64"]) + res = parse_pyproject_toml( + flit_pyproject_toml_default_pip, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -876,7 +928,11 @@ def test_parse_flit_default_pip(flit_pyproject_toml_default_pip: Path): def test_parse_flit_skip_non_conda_lock( flit_pyproject_toml_skip_non_conda_lock: Path, ): - res = parse_pyproject_toml(flit_pyproject_toml_skip_non_conda_lock, ["linux-64"]) + res = parse_pyproject_toml( + flit_pyproject_toml_skip_non_conda_lock, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -893,7 +949,9 @@ def test_parse_flit_skip_non_conda_lock( def test_parse_pdm(pdm_pyproject_toml: Path): - res = parse_pyproject_toml(pdm_pyproject_toml, ["linux-64"]) + res = parse_pyproject_toml( + pdm_pyproject_toml, platforms=["linux-64"], mapping_url=DEFAULT_MAPPING_URL + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -917,7 +975,11 @@ def test_parse_pdm(pdm_pyproject_toml: Path): def test_parse_pdm_default_pip(pdm_pyproject_toml_default_pip: Path): - res = parse_pyproject_toml(pdm_pyproject_toml_default_pip, ["linux-64"]) + res = parse_pyproject_toml( + pdm_pyproject_toml_default_pip, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -936,7 +998,11 @@ def test_parse_pdm_default_pip(pdm_pyproject_toml_default_pip: Path): def test_parse_pdm_skip_non_conda_lock( pdm_pyproject_toml_skip_non_conda_lock: Path, ): - res = parse_pyproject_toml(pdm_pyproject_toml_skip_non_conda_lock, ["linux-64"]) + res = parse_pyproject_toml( + pdm_pyproject_toml_skip_non_conda_lock, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -954,7 +1020,9 @@ def test_parse_pdm_skip_non_conda_lock( def test_parse_pyproject_channel_toml(pyproject_channel_toml: Path): - res = parse_pyproject_toml(pyproject_channel_toml, ["linux-64"]) + res = parse_pyproject_toml( + pyproject_channel_toml, platforms=["linux-64"], mapping_url=DEFAULT_MAPPING_URL + ) specs = { dep.name: typing.cast(VersionedDependency, dep) @@ -968,7 +1036,11 @@ def test_parse_poetry_invalid_optionals(pyproject_optional_toml: Path): filename = pyproject_optional_toml.name with pytest.warns(Warning) as record: - _ = parse_pyproject_toml(pyproject_optional_toml, ["linux-64"]) + _ = parse_pyproject_toml( + pyproject_optional_toml, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) assert len(record) >= 4 messages = [str(w.message) for w in record] @@ -1060,7 +1132,7 @@ def test_run_lock( monkeypatch.chdir(zlib_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([zlib_environment], conda_exe=conda_exe) + run_lock([zlib_environment], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) def test_run_lock_channel_toml( @@ -1069,7 +1141,9 @@ def test_run_lock_channel_toml( monkeypatch.chdir(pyproject_channel_toml.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([pyproject_channel_toml], conda_exe=conda_exe) + run_lock( + [pyproject_channel_toml], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL + ) def test_run_lock_with_input_metadata( @@ -1087,6 +1161,7 @@ def test_run_lock_with_input_metadata( MetadataOption.InputSha, ] ), + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file(zlib_environment.parent / DEFAULT_LOCKFILE_NAME) @@ -1123,7 +1198,7 @@ def test_msys2_channel_included_in_defaults_on_windows( monkeypatch.chdir(msys2_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([msys2_environment], conda_exe=conda_exe) + run_lock([msys2_environment], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) lockfile = parse_conda_lock_file(msys2_environment.parent / DEFAULT_LOCKFILE_NAME) m2_zlib_packages = [ package for package in lockfile.package if package.name == "m2-zlib" @@ -1154,6 +1229,7 @@ def test_run_lock_with_time_metadata( MetadataOption.TimeStamp, ] ), + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file(TIME_DIR / DEFAULT_LOCKFILE_NAME) @@ -1209,6 +1285,7 @@ def test_run_lock_with_git_metadata( MetadataOption.GitUserEmail, ] ), + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file( git_metadata_zlib_environment.parent / DEFAULT_LOCKFILE_NAME @@ -1247,6 +1324,7 @@ def test_run_lock_with_custom_metadata( [custom_metadata_environment / "environment.yml"], conda_exe=conda_exe, metadata_yamls=[custom_json_metadata, custom_yaml_metadata], + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file( custom_yaml_metadata.parent / DEFAULT_LOCKFILE_NAME @@ -1268,6 +1346,7 @@ def test_run_lock_blas_mkl( [blas_mkl_environment], conda_exe=conda_exe, platforms=["linux-64", "win-64", "osx-64"], + mapping_url=DEFAULT_MAPPING_URL, ) @@ -1333,16 +1412,26 @@ def test_run_lock_with_update( if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") pre_environment = update_environment.parent / "environment-preupdate.yml" - run_lock([pre_environment], conda_exe="mamba") + run_lock([pre_environment], conda_exe="mamba", mapping_url=DEFAULT_MAPPING_URL) # files should be ready now - run_lock([pre_environment], conda_exe=conda_exe, update=["pydantic"]) + run_lock( + [pre_environment], + conda_exe=conda_exe, + update=["pydantic"], + mapping_url=DEFAULT_MAPPING_URL, + ) pre_lock = { p.name: p for p in parse_conda_lock_file( update_environment.parent / DEFAULT_LOCKFILE_NAME ).package } - run_lock([update_environment], conda_exe=conda_exe, update=["pydantic"]) + run_lock( + [update_environment], + conda_exe=conda_exe, + update=["pydantic"], + mapping_url=DEFAULT_MAPPING_URL, + ) post_lock = { p.name: p for p in parse_conda_lock_file( @@ -1384,6 +1473,7 @@ def test_run_lock_with_update_filter_platform( conda_exe=conda_exe, update=["zlib"], platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, ) post_lock = { (p.name, p.platform): p for p in parse_conda_lock_file(lockfile_path).package @@ -1405,8 +1495,8 @@ def test_remove_dependency( environment_dir = pre_env.parent monkeypatch.chdir(environment_dir) - run_lock([pre_env], conda_exe=conda_exe) - run_lock([post_env], conda_exe=conda_exe) + run_lock([pre_env], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) + run_lock([post_env], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) post_lock = [ p.name for p in parse_conda_lock_file(environment_dir / DEFAULT_LOCKFILE_NAME).package @@ -1427,8 +1517,8 @@ def test_move_dependency_from_pip_section( environment_dir = pre_env.parent monkeypatch.chdir(environment_dir) - run_lock([pre_env], conda_exe=conda_exe) - run_lock([post_env], conda_exe=conda_exe) + run_lock([pre_env], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) + run_lock([post_env], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) post_lock = [ p.name for p in parse_conda_lock_file(environment_dir / DEFAULT_LOCKFILE_NAME).package @@ -1443,10 +1533,12 @@ def test_run_lock_with_locked_environment_files( """run_lock() with default args uses source files from lock""" monkeypatch.chdir(update_environment.parent) pre_environment = update_environment.parent / "environment-preupdate.yml" - run_lock([pre_environment], conda_exe="mamba") + run_lock([pre_environment], conda_exe="mamba", mapping_url=DEFAULT_MAPPING_URL) make_lock_files = MagicMock() monkeypatch.setattr("conda_lock.conda_lock.make_lock_files", make_lock_files) - run_lock([], conda_exe=conda_exe, update=["pydantic"]) + run_lock( + [], conda_exe=conda_exe, update=["pydantic"], mapping_url=DEFAULT_MAPPING_URL + ) src_files = make_lock_files.call_args.kwargs["src_files"] assert [p.resolve() for p in src_files] == [ @@ -1467,13 +1559,24 @@ def test_run_lock_relative_source_path( monkeypatch.chdir(source_paths) environment = Path("sources/environment.yaml") lockfile = Path("lockfile/conda-lock.yml") - run_lock([environment], lockfile_path=lockfile, conda_exe="mamba") + run_lock( + [environment], + lockfile_path=lockfile, + conda_exe="mamba", + mapping_url=DEFAULT_MAPPING_URL, + ) lock_content = parse_conda_lock_file(lockfile) locked_environment = lock_content.metadata.sources[0] assert Path(locked_environment) == Path("../sources/environment.yaml") make_lock_files = MagicMock() monkeypatch.setattr("conda_lock.conda_lock.make_lock_files", make_lock_files) - run_lock([], lockfile_path=lockfile, conda_exe=conda_exe, update=["pydantic"]) + run_lock( + [], + lockfile_path=lockfile, + conda_exe=conda_exe, + update=["pydantic"], + mapping_url=DEFAULT_MAPPING_URL, + ) src_files = make_lock_files.call_args.kwargs["src_files"] assert [p.resolve() for p in src_files] == [environment.resolve()] @@ -1489,7 +1592,11 @@ def test_git_gh_408( monkeypatch.chdir(test_git_package_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([test_git_package_environment], conda_exe=conda_exe) + run_lock( + [test_git_package_environment], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) def test_run_lock_with_pip( @@ -1498,7 +1605,7 @@ def test_run_lock_with_pip( monkeypatch.chdir(pip_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([pip_environment], conda_exe=conda_exe) + run_lock([pip_environment], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL) @pytest.fixture @@ -1512,7 +1619,11 @@ def test_os_name_marker( monkeypatch.chdir(os_name_marker_environment.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([os_name_marker_environment], conda_exe=conda_exe) + run_lock( + [os_name_marker_environment], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) lockfile = parse_conda_lock_file( os_name_marker_environment.parent / DEFAULT_LOCKFILE_NAME ) @@ -1528,7 +1639,11 @@ def test_run_lock_with_pip_environment_different_names_same_deps( monkeypatch.chdir(pip_environment_different_names_same_deps.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([pip_environment_different_names_same_deps], conda_exe=conda_exe) + run_lock( + [pip_environment_different_names_same_deps], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) def test_run_lock_with_pip_hash_checking( @@ -1540,7 +1655,11 @@ def test_run_lock_with_pip_hash_checking( monkeypatch.chdir(work_dir) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([pip_hash_checking_environment], conda_exe=conda_exe) + run_lock( + [pip_hash_checking_environment], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) lockfile = parse_conda_lock_file(work_dir / DEFAULT_LOCKFILE_NAME) hashes = {package.name: package.hash for package in lockfile.package} @@ -1555,7 +1674,9 @@ def test_run_lock_uppercase_pip( monkeypatch.chdir(env_with_uppercase_pip.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([env_with_uppercase_pip], conda_exe=conda_exe) + run_lock( + [env_with_uppercase_pip], conda_exe=conda_exe, mapping_url=DEFAULT_MAPPING_URL + ) def test_run_lock_with_local_package( @@ -1567,7 +1688,9 @@ def test_run_lock_with_local_package( if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - lock_spec = make_lock_spec(src_files=[pip_local_package_environment]) + lock_spec = make_lock_spec( + src_files=[pip_local_package_environment], mapping_url=DEFAULT_MAPPING_URL + ) assert not any( p.manager == "pip" for platform in lock_spec.platforms @@ -1593,6 +1716,7 @@ def test_run_lock_with_input_hash_check( platforms=["linux-64"], conda_exe=conda_exe, check_input_hash=True, + mapping_url=DEFAULT_MAPPING_URL, ) stat = lockfile.stat() created = stat.st_mtime_ns @@ -1608,6 +1732,7 @@ def test_run_lock_with_input_hash_check( platforms=["linux-64"], conda_exe=conda_exe, check_input_hash=True, + mapping_url=DEFAULT_MAPPING_URL, ) stat = lockfile.stat() assert stat.st_mtime_ns == created @@ -1654,6 +1779,7 @@ def test_poetry_version_parsing_constraints( lockfile_path=Path(DEFAULT_LOCKFILE_NAME), metadata_yamls=(), virtual_package_repo=vpr, + mapping_url=DEFAULT_MAPPING_URL, ) python = next(p for p in lockfile_contents.package if p.name == "python") @@ -1671,7 +1797,12 @@ def test_run_with_channel_inversion( the higher priority conda-forge channel. """ monkeypatch.chdir(channel_inversion.parent) - run_lock([channel_inversion], conda_exe=mamba_exe, platforms=["linux-64"]) + run_lock( + [channel_inversion], + conda_exe=mamba_exe, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) lockfile = parse_conda_lock_file(channel_inversion.parent / DEFAULT_LOCKFILE_NAME) for package in lockfile.package: if package.name == "zlib": @@ -1870,6 +2001,7 @@ def test_solve_arch_multiple_categories(): channels=channels, pip_repositories=[], virtual_package_repo=vpr, + mapping_url=DEFAULT_MAPPING_URL, ) python_deps = [dep for dep in locked_deps if dep.name == "python"] assert len(python_deps) == 1 @@ -2559,19 +2691,15 @@ def test_lookup_sources(): Path(__file__).parent / "test-lookup" / "emoji-to-python-dateutil-lookup.yml" ) url = f"file://{lookup.absolute()}" - LOOKUP_OBJECT = _LookupLoader() - LOOKUP_OBJECT.mapping_url = url - assert LOOKUP_OBJECT.conda_lookup["emoji"]["pypi_name"] == "python-dateutil" + assert conda_name_to_pypi_name("emoji", url) == "python-dateutil" # Test that the lookup can be read from a straight filename url = str(lookup.absolute()) - LOOKUP_OBJECT = _LookupLoader() - LOOKUP_OBJECT.mapping_url = url - assert LOOKUP_OBJECT.conda_lookup["emoji"]["pypi_name"] == "python-dateutil" + assert conda_name_to_pypi_name("emoji", url) == "python-dateutil" # Test that the default remote lookup contains expected nontrivial mappings - LOOKUP_OBJECT = _LookupLoader() - assert LOOKUP_OBJECT.conda_lookup["python-build"]["pypi_name"] == "build" + url = DEFAULT_MAPPING_URL + assert conda_name_to_pypi_name("python-build", url) == "build" @pytest.fixture @@ -2679,7 +2807,12 @@ def test_pip_finds_recent_manylinux_wheels( `conda_lock/pypi_solver.py` is out of date. """ monkeypatch.chdir(lightgbm_environment.parent) - run_lock([lightgbm_environment], conda_exe=conda_exe, platforms=["linux-64"]) + run_lock( + [lightgbm_environment], + conda_exe=conda_exe, + platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, + ) lockfile = parse_conda_lock_file( lightgbm_environment.parent / DEFAULT_LOCKFILE_NAME ) @@ -2738,6 +2871,7 @@ def test_pip_respects_glibc_version( conda_exe=str(conda_exe), platforms=["linux-64"], virtual_package_spec=env_file.parent / "virtual-packages.yml", + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file(env_file.parent / DEFAULT_LOCKFILE_NAME) @@ -2843,7 +2977,9 @@ def test_platformenv_linux_platforms(): def test_parse_environment_file_with_pip_and_platform_selector(): """See https://github.com/conda/conda-lock/pull/564 for the context.""" env_file = TESTS_DIR / "test-pip-with-platform-selector" / "environment.yml" - spec = parse_environment_file(env_file, platforms=["linux-64", "osx-arm64"]) + spec = parse_environment_file( + env_file, platforms=["linux-64", "osx-arm64"], mapping_url=DEFAULT_MAPPING_URL + ) assert spec.platforms == ["linux-64", "osx-arm64"] assert spec.dependencies["osx-arm64"] == [ VersionedDependency(name="tomli", manager="conda", version="") @@ -2871,6 +3007,7 @@ def test_pip_full_whl_url( [env_file], conda_exe=str(conda_exe), platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, ) lockfile = parse_conda_lock_file(env_file.parent / DEFAULT_LOCKFILE_NAME) @@ -2905,6 +3042,7 @@ def test_when_merging_lockfiles_content_hashes_are_updated( environment_files=[work_path / "environment-preupdate.yml"], conda_exe=str(conda_exe), platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, ) def get_content_hashes_for_lock_file(lock_file: Path) -> typing.Dict[str, str]: @@ -2916,6 +3054,7 @@ def get_content_hashes_for_lock_file(lock_file: Path) -> typing.Dict[str, str]: environment_files=[work_path / "environment-postupdate.yml"], conda_exe=str(conda_exe), platforms=["linux-64"], + mapping_url=DEFAULT_MAPPING_URL, ) postupdate_hashes = get_content_hashes_for_lock_file(work_path / "conda-lock.yml") assert preupdate_hashes != postupdate_hashes diff --git a/tests/test_markers.py b/tests/test_markers.py index 45c61cf8a..07e9de440 100644 --- a/tests/test_markers.py +++ b/tests/test_markers.py @@ -2,6 +2,7 @@ import pytest +from conda_lock.lookup import DEFAULT_MAPPING_URL from conda_lock.src_parser import make_lock_spec @@ -77,7 +78,9 @@ def cowsay_src_file(request, tmp_path: Path): def test_sys_platform_marker(cowsay_src_file): - lock_spec = make_lock_spec(src_files=[cowsay_src_file]) + lock_spec = make_lock_spec( + src_files=[cowsay_src_file], mapping_url=DEFAULT_MAPPING_URL + ) dependencies = lock_spec.dependencies platform_has_cowsay = { platform: any(dep.name == "cowsay" for dep in platform_deps) diff --git a/tests/test_pip_repositories.py b/tests/test_pip_repositories.py index 64e7c2a2b..7d0a29643 100644 --- a/tests/test_pip_repositories.py +++ b/tests/test_pip_repositories.py @@ -13,6 +13,7 @@ from conda_lock.conda_lock import DEFAULT_LOCKFILE_NAME, run_lock from conda_lock.lockfile import parse_conda_lock_file +from conda_lock.lookup import DEFAULT_MAPPING_URL from tests.test_conda_lock import clone_test_dir @@ -143,7 +144,11 @@ def test_it_uses_pip_repositories_with_env_var_substitution( assert environment_file.exists(), list(directory.iterdir()) # WHEN I create the lockfile - run_lock([directory / "environment.yaml"], conda_exe=conda_exe) + run_lock( + [directory / "environment.yaml"], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) # THEN the lockfile is generated correctly lockfile_path = directory / DEFAULT_LOCKFILE_NAME diff --git a/tests/test_regression.py b/tests/test_regression.py index 3d163183c..792d15b0e 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -12,6 +12,7 @@ from conda_lock.conda_lock import run_lock from conda_lock.invoke_conda import is_micromamba +from conda_lock.lookup import DEFAULT_MAPPING_URL from conda_lock.models.lock_spec import VersionedDependency from conda_lock.src_parser import DEFAULT_PLATFORMS from conda_lock.src_parser.environment_yaml import parse_environment_file @@ -47,7 +48,12 @@ def test_pr_436( ) (tmp_path / "environment.yml").write_text(spec) monkeypatch.chdir(tmp_path) - run_lock([tmp_path / "environment.yml"], conda_exe=mamba_exe, platforms=[platform]) + run_lock( + [tmp_path / "environment.yml"], + conda_exe=mamba_exe, + platforms=[platform], + mapping_url=DEFAULT_MAPPING_URL, + ) @pytest.mark.parametrize( @@ -68,7 +74,7 @@ def test_conda_pip_regressions_gh290( """Simple test that asserts that these engieonments can be locked""" spec = clone_test_dir(test_dir, tmp_path).joinpath(filename) monkeypatch.chdir(spec.parent) - run_lock([spec], conda_exe=mamba_exe) + run_lock([spec], conda_exe=mamba_exe, mapping_url=DEFAULT_MAPPING_URL) @pytest.fixture @@ -86,7 +92,11 @@ def test_run_lock_regression_gh155( monkeypatch.chdir(pip_environment_regression_gh155.parent) if is_micromamba(conda_exe): monkeypatch.setenv("CONDA_FLAGS", "-v") - run_lock([pip_environment_regression_gh155], conda_exe=conda_exe) + run_lock( + [pip_environment_regression_gh155], + conda_exe=conda_exe, + mapping_url=DEFAULT_MAPPING_URL, + ) @pytest.fixture @@ -97,7 +107,11 @@ def pip_environment_regression_gh449(tmp_path: Path): def test_pip_environment_regression_gh449(pip_environment_regression_gh449: Path): - res = parse_environment_file(pip_environment_regression_gh449, DEFAULT_PLATFORMS) + res = parse_environment_file( + pip_environment_regression_gh449, + DEFAULT_PLATFORMS, + mapping_url=DEFAULT_MAPPING_URL, + ) for plat in DEFAULT_PLATFORMS: assert [dep for dep in res.dependencies[plat] if dep.manager == "pip"] == [ VersionedDependency(