diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dbf2f77f0..f9428b6f2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,20 +2,20 @@ exclude: ^conda_lock/_vendor/.*$ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.3.0 hooks: - id: trailing-whitespace exclude: "^.*\\.patch$" - id: check-ast - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 22.10.0 hooks: - id: black language_version: python3 -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.2 +- repo: https://github.com/pycqa/flake8 + rev: 5.0.4 hooks: - id: flake8 @@ -26,8 +26,8 @@ repos: args: ["--profile", "black", "--filter-files"] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.931 + rev: v0.991 hooks: - id: mypy - additional_dependencies: [types-filelock, types-requests, types-toml, types-PyYAML, types-setuptools, pydantic] + additional_dependencies: [types-filelock, types-requests, types-toml, types-PyYAML, types-freezegun, types-setuptools, pydantic] exclude: ^tests/test-local-pip/setup.py$ \ No newline at end of file diff --git a/conda_lock/conda_lock.py b/conda_lock/conda_lock.py index 1c46f6f35..31b6157ba 100644 --- a/conda_lock/conda_lock.py +++ b/conda_lock/conda_lock.py @@ -8,7 +8,6 @@ import pathlib import posixpath import re -import subprocess import sys import tempfile @@ -34,7 +33,7 @@ import pkg_resources import yaml -from ensureconda import ensureconda +from ensureconda.api import ensureconda from typing_extensions import Literal from conda_lock.click_helpers import OrderedGroup @@ -65,10 +64,14 @@ from conda_lock.lookup import set_lookup_location from conda_lock.src_parser import ( Dependency, + GitMeta, + InputMeta, LockedDependency, Lockfile, LockMeta, LockSpecification, + MetadataOption, + TimeMeta, UpdateSpecification, aggregate_lock_specs, ) @@ -292,6 +295,8 @@ def make_lock_files( filter_categories: bool = True, extras: Optional[AbstractSet[str]] = None, check_input_hash: bool = False, + metadata_choices: AbstractSet[MetadataOption] = frozenset(), + metadata_yamls: Sequence[pathlib.Path] = (), ) -> None: """ Generate a lock file from the src files provided @@ -325,6 +330,10 @@ def make_lock_files( Filter out unused categories prior to solving check_input_hash : Do not re-solve for each target platform for which specifications are unchanged + metadata_choices: + Set of selected metadata fields to generate for this lockfile. + metadata_yamls: + YAML or JSON file(s) containing structured metadata to add to metadata section of the lockfile. """ # initialize virtual package fake @@ -401,10 +410,16 @@ def make_lock_files( platforms=platforms_to_lock, lockfile_path=lockfile_path, update_spec=update_spec, + metadata_choices=metadata_choices, + metadata_yamls=metadata_yamls, ) if "lock" in kinds: - write_conda_lock_file(lock_content, lockfile_path) + write_conda_lock_file( + lock_content, + lockfile_path, + metadata_choices=metadata_choices, + ) print( " - Install lock using:", KIND_USE_TEXT["lock"].format(lockfile=str(lockfile_path)), @@ -725,6 +740,34 @@ def _solve_for_arch( return list(conda_deps.values()) + list(pip_deps.values()) +def convert_structured_metadata_yaml(in_path: pathlib.Path) -> Dict[str, Any]: + with in_path.open("r") as infile: + metadata = yaml.safe_load(infile) + return metadata + + +def update_metadata(to_change: Dict[str, Any], change_source: Dict[str, Any]) -> None: + for key in change_source: + if key in to_change: + logger.warning( + f"Custom metadata field {key} provided twice, overwriting value " + + f"{to_change[key]} with {change_source[key]}" + ) + to_change.update(change_source) + + +def get_custom_metadata( + metadata_yamls: Sequence[pathlib.Path], +) -> Optional[Dict[str, str]]: + custom_metadata_dict: Dict[str, str] = {} + for yaml_path in metadata_yamls: + new_metadata = convert_structured_metadata_yaml(yaml_path) + update_metadata(custom_metadata_dict, new_metadata) + if custom_metadata_dict: + return custom_metadata_dict + return None + + def create_lockfile_from_spec( *, conda: PathLike, @@ -732,6 +775,8 @@ def create_lockfile_from_spec( platforms: List[str] = [], lockfile_path: pathlib.Path, update_spec: Optional[UpdateSpecification] = None, + metadata_choices: AbstractSet[MetadataOption] = frozenset(), + metadata_yamls: Sequence[pathlib.Path] = (), ) -> Lockfile: """ Solve or update specification @@ -754,6 +799,38 @@ def create_lockfile_from_spec( for dep in deps: locked[(dep.manager, dep.name, dep.platform)] = dep + spec_sources: Dict[str, pathlib.Path] = {} + for source in spec.sources: + try: + path = relative_path(lockfile_path.parent, source) + except ValueError as e: + if "Paths don't have the same drive" not in str(e): + raise e + path = str(source.resolve()) + spec_sources[path] = source + + if MetadataOption.TimeStamp in metadata_choices: + time_metadata = TimeMeta.create() + else: + time_metadata = None + + git_metadata = GitMeta.create( + metadata_choices=metadata_choices, + src_files=spec.sources, + ) + + if metadata_choices & {MetadataOption.InputSha, MetadataOption.InputMd5}: + inputs_metadata: Optional[Dict[str, InputMeta]] = { + relative_path: InputMeta.create( + metadata_choices=metadata_choices, src_file=src_file + ) + for relative_path, src_file in spec_sources.items() + } + else: + inputs_metadata = None + + custom_metadata = get_custom_metadata(metadata_yamls=metadata_yamls) + return Lockfile( package=[locked[k] for k in locked], metadata=LockMeta( @@ -761,6 +838,10 @@ def create_lockfile_from_spec( channels=[c for c in spec.channels], platforms=spec.platforms, sources=[str(source.resolve()) for source in spec.sources], + git_metadata=git_metadata, + time_metadata=time_metadata, + inputs_metadata=inputs_metadata, + custom_metadata=custom_metadata, ), ) @@ -939,6 +1020,8 @@ def run_lock( virtual_package_spec: Optional[pathlib.Path] = None, update: Optional[List[str]] = None, filter_categories: bool = False, + metadata_choices: AbstractSet[MetadataOption] = frozenset(), + metadata_yamls: Sequence[pathlib.Path] = (), ) -> None: if environment_files == DEFAULT_FILES: if lockfile_path.exists(): @@ -983,6 +1066,8 @@ def run_lock( extras=extras, check_input_hash=check_input_hash, filter_categories=filter_categories, + metadata_choices=metadata_choices, + metadata_yamls=metadata_yamls, ) @@ -1114,10 +1199,29 @@ def main() -> None: type=str, help="Location of the lookup file containing Pypi package names to conda names.", ) +@click.option( + "--md", + "--metadata", + "metadata_choices", + default=[], + multiple=True, + type=click.Choice([md.value for md in MetadataOption]), + help="Metadata fields to include in lock-file", +) +@click.option( + "--mdy", + "--metadata-yaml", + "--metadata-json", + "metadata_yamls", + default=[], + multiple=True, + type=click.Path(), + help="YAML or JSON file(s) containing structured metadata to add to metadata section of the lockfile.", +) @click.pass_context def lock( ctx: click.Context, - conda: Optional[PathLike], + conda: Optional[str], mamba: bool, micromamba: bool, platform: List[str], @@ -1133,9 +1237,11 @@ def lock( check_input_hash: bool, log_level: TLogLevel, pdb: bool, - virtual_package_spec: Optional[PathLike], + virtual_package_spec: Optional[pathlib.Path], pypi_to_conda_lookup_file: Optional[str], update: Optional[List[str]] = None, + metadata_choices: Sequence[str] = (), + metadata_yamls: Sequence[pathlib.Path] = (), ) -> None: """Generate fully reproducible lock files for conda environments. @@ -1155,6 +1261,10 @@ def lock( if pypi_to_conda_lookup_file: set_lookup_location(pypi_to_conda_lookup_file) + metadata_enum_choices = set(MetadataOption(md) for md in metadata_choices) + + metadata_yamls = [pathlib.Path(path) for path in metadata_yamls] + # bail out if we do not encounter the default file if no files were passed if ctx.get_parameter_source("files") == click.core.ParameterSource.DEFAULT: candidates = list(files) @@ -1199,6 +1309,8 @@ def lock( virtual_package_spec=virtual_package_spec, update=update, filter_categories=filter_categories, + metadata_choices=metadata_enum_choices, + metadata_yamls=metadata_yamls, ) if strip_auth: with tempfile.TemporaryDirectory() as tempdir: diff --git a/conda_lock/conda_solver.py b/conda_lock/conda_solver.py index 2f64abf79..c8903ef99 100644 --- a/conda_lock/conda_solver.py +++ b/conda_lock/conda_solver.py @@ -2,7 +2,6 @@ import logging import os import pathlib -import re import shlex import subprocess import sys @@ -420,7 +419,7 @@ def update_specs_for_arch( ) ) } - spec_for_name = {MatchSpec(v).name: v for v in specs} + spec_for_name = {MatchSpec(v).name: v for v in specs} # type: ignore to_update = [ spec_for_name[name] for name in set(installed).intersection(update) ] diff --git a/conda_lock/invoke_conda.py b/conda_lock/invoke_conda.py index b5abfaedc..785cb46f6 100644 --- a/conda_lock/invoke_conda.py +++ b/conda_lock/invoke_conda.py @@ -10,7 +10,7 @@ from distutils.version import LooseVersion from typing import IO, Dict, Iterator, List, Optional, Sequence, Union -import ensureconda +from ensureconda.api import determine_micromamba_version, ensureconda from conda_lock.models.channel import Channel @@ -26,15 +26,17 @@ def _ensureconda( micromamba: bool = False, conda: bool = False, conda_exe: bool = False, -) -> Optional[PathLike]: - _conda_exe = ensureconda.ensureconda( +) -> Optional[pathlib.Path]: + _conda_exe = ensureconda( mamba=mamba, micromamba=micromamba, conda=conda, conda_exe=conda_exe, ) - return _conda_exe + if _conda_exe is None: + return None + return pathlib.Path(_conda_exe) def _determine_conda_executable( @@ -54,9 +56,7 @@ def determine_conda_executable( for candidate in _determine_conda_executable(conda_executable, mamba, micromamba): if candidate is not None: if is_micromamba(candidate): - if ensureconda.api.determine_micromamba_version( - str(candidate) - ) < LooseVersion("0.17"): + if determine_micromamba_version(str(candidate)) < LooseVersion("0.17"): mamba_root_prefix() return candidate raise RuntimeError("Could not find conda (or compatible) executable") diff --git a/conda_lock/src_parser/__init__.py b/conda_lock/src_parser/__init__.py index a1d0edccb..8ce156e41 100644 --- a/conda_lock/src_parser/__init__.py +++ b/conda_lock/src_parser/__init__.py @@ -1,21 +1,42 @@ +import datetime +import enum import hashlib import json +import logging import pathlib import typing from collections import defaultdict, namedtuple from itertools import chain -from typing import ClassVar, Dict, List, Optional, Sequence, Set, Tuple, Union +from typing import ( + TYPE_CHECKING, + AbstractSet, + ClassVar, + Dict, + List, + Optional, + Sequence, + Set, + Tuple, + Union, +) + + +if TYPE_CHECKING: + from hashlib import _Hash from pydantic import BaseModel, Field, validator from typing_extensions import Literal -from conda_lock.common import ordered_union, suffix_union +from conda_lock.common import ordered_union, relative_path, suffix_union from conda_lock.errors import ChannelAggregationError from conda_lock.models.channel import Channel from conda_lock.virtual_package import FakeRepoData +logger = logging.getLogger(__name__) + + class StrictModel(BaseModel): class Config: extra = "forbid" @@ -101,6 +122,141 @@ def validate_hash(cls, v: HashModel, values: Dict[str, typing.Any]) -> HashModel return v +class MetadataOption(enum.Enum): + TimeStamp = "timestamp" + GitSha = "git_sha" + GitUserName = "git_user_name" + GitUserEmail = "git_user_email" + InputMd5 = "input_md5" + InputSha = "input_sha" + + +class TimeMeta(StrictModel): + """Stores information about when the lockfile was generated.""" + + created_at: str = Field(..., description="Time stamp of lock-file creation time") + + @classmethod + def create(cls) -> "TimeMeta": + return cls( + created_at=datetime.datetime.utcnow().isoformat(timespec="seconds") + "Z" + ) + + +class GitMeta(StrictModel): + """ + Stores information about the git repo the lockfile is being generated in (if applicable) and + the git user generating the file. + """ + + git_user_name: Optional[str] = Field( + default=None, description="Git user.name field of global config" + ) + git_user_email: Optional[str] = Field( + default=None, description="Git user.email field of global config" + ) + git_sha: Optional[str] = Field( + default=None, + description=( + "sha256 hash of the most recent git commit that modified one of the input files for " + + "this lockfile" + ), + ) + + @classmethod + def create( + cls, + metadata_choices: AbstractSet[MetadataOption], + src_files: List[pathlib.Path], + ) -> "GitMeta | None": + import git + + git_sha: "str | None" = None + git_user_name: "str | None" = None + git_user_email: "str | None" = None + + try: + repo = git.Repo(search_parent_directories=True) # type: ignore + if MetadataOption.GitSha in metadata_choices: + most_recent_datetime: Optional[datetime.datetime] = None + for src_file in src_files: + relative_src_file_path = relative_path( + pathlib.Path(repo.working_tree_dir), src_file # type: ignore + ) + commit = list( + repo.iter_commits(paths=relative_src_file_path, max_count=1) + )[0] + if repo.is_dirty(path=relative_src_file_path): + logger.warning( + "One of the inputs to conda-lock is dirty, using commit hash of head +" + ' "dirty"' + ) + git_sha = f"{repo.head.object.hexsha}-dirty" + break + else: + if ( + most_recent_datetime is None + or most_recent_datetime < commit.committed_datetime + ): + most_recent_datetime = commit.committed_datetime + git_sha = commit.hexsha + if MetadataOption.GitUserName in metadata_choices: + git_user_name = repo.config_reader().get_value("user", "name", None) # type: ignore + if MetadataOption.GitUserEmail in metadata_choices: + git_user_email = repo.config_reader().get_value("user", "email", None) # type: ignore + except git.exc.InvalidGitRepositoryError: # type: ignore + pass + + if any([git_sha, git_user_name, git_user_email]): + return cls( + git_sha=git_sha, + git_user_name=git_user_name, + git_user_email=git_user_email, + ) + else: + return None + + +class InputMeta(StrictModel): + """Stores information about an input provided to generate the lockfile.""" + + md5: Optional[str] = Field(..., description="md5 checksum for an input file") + sha256: Optional[str] = Field(..., description="md5 checksum for an input file") + + @classmethod + def create( + cls, metadata_choices: AbstractSet[MetadataOption], src_file: pathlib.Path + ) -> "InputMeta": + if MetadataOption.InputSha in metadata_choices: + sha256 = cls.get_input_sha256(src_file=src_file) + else: + sha256 = None + if MetadataOption.InputMd5 in metadata_choices: + md5 = cls.get_input_md5(src_file=src_file) + else: + md5 = None + return cls( + md5=md5, + sha256=sha256, + ) + + @classmethod + def get_input_md5(cls, src_file: pathlib.Path) -> str: + hasher = hashlib.md5() + return cls.hash_file(src_file=src_file, hasher=hasher) + + @classmethod + def get_input_sha256(cls, src_file: pathlib.Path) -> str: + hasher = hashlib.sha256() + return cls.hash_file(src_file=src_file, hasher=hasher) + + @staticmethod + def hash_file(src_file: pathlib.Path, hasher: "_Hash") -> str: + with src_file.open("r") as infile: + hasher.update(infile.read().encode("utf-8")) + return hasher.hexdigest() + + class LockMeta(StrictModel): content_hash: Dict[str, str] = Field( ..., description="Hash of dependencies for each target platform" @@ -113,6 +269,23 @@ class LockMeta(StrictModel): ..., description="paths to source files, relative to the parent directory of the lockfile", ) + time_metadata: Optional[TimeMeta] = Field( + default=None, description="Metadata dealing with the time lockfile was created" + ) + git_metadata: Optional[GitMeta] = Field( + default=None, + description=( + "Metadata dealing with the git repo the lockfile was created in and the user that created it" + ), + ) + inputs_metadata: Optional[Dict[str, InputMeta]] = Field( + default=None, + description="Metadata dealing with the input files used to create the lockfile", + ) + custom_metadata: Optional[Dict[str, str]] = Field( + default=None, + description="Custom metadata provided by the user to be added to the lockfile", + ) def __or__(self, other: "LockMeta") -> "LockMeta": """merge other into self""" @@ -121,11 +294,37 @@ def __or__(self, other: "LockMeta") -> "LockMeta": elif not isinstance(other, LockMeta): raise TypeError + if self.inputs_metadata is None: + new_inputs_metadata = other.inputs_metadata + elif other.inputs_metadata is None: + new_inputs_metadata = self.inputs_metadata + else: + new_inputs_metadata = self.inputs_metadata + new_inputs_metadata.update(other.inputs_metadata) + + if self.custom_metadata is None: + new_custom_metadata = other.custom_metadata + elif other.custom_metadata is None: + new_custom_metadata = self.custom_metadata + else: + new_custom_metadata = self.custom_metadata + for key in other.custom_metadata: + if key in new_custom_metadata: + logger.warning( + f"Custom metadata key {key} provided twice, overwriting original value" + + f"({new_custom_metadata[key]}) with new value " + + f"({other.custom_metadata[key]})" + ) + new_custom_metadata.update(other.custom_metadata) return LockMeta( content_hash={**self.content_hash, **other.content_hash}, channels=self.channels, platforms=sorted(set(self.platforms).union(other.platforms)), sources=ordered_union([self.sources, other.sources]), + time_metadata=other.time_metadata, + git_metadata=other.git_metadata, + inputs_metadata=new_inputs_metadata, + custom_metadata=new_custom_metadata, ) @validator("channels", pre=True, always=True) @@ -239,7 +438,7 @@ def content_hash(self) -> Dict[str, str]: } def content_hash_for_platform(self, platform: str) -> str: - data: dict = { + data = { "channels": [c.json() for c in self.channels], "specs": [ p.dict() diff --git a/conda_lock/src_parser/conda_common.py b/conda_lock/src_parser/conda_common.py index fc9ad0640..4d3c146d0 100644 --- a/conda_lock/src_parser/conda_common.py +++ b/conda_lock/src_parser/conda_common.py @@ -12,7 +12,7 @@ def conda_spec_to_versioned_dep(spec: str, category: str) -> VersionedDependency """ try: - ms = MatchSpec(spec) + ms = MatchSpec(spec) # type: ignore # This is done in the metaclass for the matchspec except Exception as e: raise RuntimeError(f"Failed to turn `{spec}` into a MatchSpec") from e diff --git a/conda_lock/src_parser/lockfile.py b/conda_lock/src_parser/lockfile.py index 455342fe5..aa37be00e 100644 --- a/conda_lock/src_parser/lockfile.py +++ b/conda_lock/src_parser/lockfile.py @@ -2,9 +2,12 @@ import pathlib from textwrap import dedent +from typing import Collection, Optional import yaml +from conda_lock.src_parser import MetadataOption + from . import Lockfile @@ -24,7 +27,10 @@ def parse_conda_lock_file( def write_conda_lock_file( - content: Lockfile, path: pathlib.Path, include_help_text: bool = True + content: Lockfile, + path: pathlib.Path, + metadata_choices: Optional[Collection[MetadataOption]], + include_help_text: bool = True, ) -> None: content.toposort_inplace() with path.open("w") as f: @@ -38,6 +44,12 @@ def write_section(text: str) -> None: continue print(("# " + line).rstrip(), file=f) + metadata_flags: str = ( + " ".join([f"--md {md.value}" for md in metadata_choices]) + if metadata_choices is not None and len(metadata_choices) != 0 + else "" + ) + write_section( f""" This lock file was generated by conda-lock (https://github.com/conda-incubator/conda-lock). DO NOT EDIT! @@ -68,16 +80,18 @@ def write_section(text: str) -> None: write_section( f""" To update a single package to the latest version compatible with the version constraints in the source: - conda-lock lock --lockfile {path.name} --update PACKAGE + conda-lock lock {metadata_flags} --lockfile {path.name} --update PACKAGE To re-solve the entire environment, e.g. after changing a version constraint in the source file: - conda-lock {' '.join('-f '+path for path in content.metadata.sources)} --lockfile {path.name} + conda-lock {metadata_flags}{' '.join('-f '+path for path in content.metadata.sources)} --lockfile {path.name} """ ) yaml.dump( { "version": Lockfile.version, - **json.loads(content.json(by_alias=True, exclude_unset=True)), + **json.loads( + content.json(by_alias=True, exclude_unset=True, exclude_none=True) + ), }, f, ) diff --git a/conda_lock/src_parser/meta_yaml.py b/conda_lock/src_parser/meta_yaml.py index 876050c92..fd377133e 100644 --- a/conda_lock/src_parser/meta_yaml.py +++ b/conda_lock/src_parser/meta_yaml.py @@ -6,13 +6,7 @@ import yaml from conda_lock.common import get_in -from conda_lock.src_parser import ( - Dependency, - LockSpecification, - VersionedDependency, - aggregate_lock_specs, -) -from conda_lock.src_parser.pyproject_toml import parse_python_requirement +from conda_lock.src_parser import Dependency, LockSpecification, aggregate_lock_specs from conda_lock.src_parser.selectors import filter_platform_selectors @@ -38,7 +32,7 @@ class UndefinedNeverFail(jinja2.Undefined): def __init__( # type: ignore self, hint=None, - obj=jinja2.runtime.missing, + obj=jinja2.utils.missing, name=None, exc=jinja2.exceptions.UndefinedError, ) -> None: @@ -61,7 +55,7 @@ def __getattr__(self, k: str) -> "UndefinedNeverFail": try: return object.__getattr__(self, k) # type: ignore except AttributeError: - return self._return_undefined(self._undefined_name + "." + k) + return self._return_undefined(self._undefined_name + "." + k) # type: ignore # Unlike the methods above, Python requires that these # few methods must always return the correct type @@ -140,7 +134,6 @@ def add_spec(spec: str, category: str) -> None: if spec is None: return - from .._vendor.conda.models.match_spec import MatchSpec from .conda_common import conda_spec_to_versioned_dep dep = conda_spec_to_versioned_dep(spec, category) diff --git a/conda_lock/src_parser/pyproject_toml.py b/conda_lock/src_parser/pyproject_toml.py index 1b9f62463..82cc47639 100644 --- a/conda_lock/src_parser/pyproject_toml.py +++ b/conda_lock/src_parser/pyproject_toml.py @@ -2,15 +2,16 @@ import collections.abc import logging import pathlib +import sys from functools import partial from typing import AbstractSet, Any, List, Mapping, Optional, Sequence, Union from urllib.parse import urldefrag -try: +if sys.version_info >= (3, 11): from tomllib import load as toml_load -except ImportError: +else: from tomli import load as toml_load from typing_extensions import Literal diff --git a/pyrightconfig.json b/pyrightconfig.json index ba9f27761..f8fe056a2 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -4,5 +4,6 @@ "conda_lock/_vendor/cleo/**", "conda_lock/_vendor/poetry_core/**", "conda_lock/_vendor/poetry/**", - ] + ], + "reportUnusedImport": "error", } diff --git a/requirements-dev.txt b/requirements-dev.txt index c2aa6602f..5dfc1e918 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,6 +8,7 @@ flake8-builtins flake8-comprehensions flake8-mutable build +freezegun isort mypy pre_commit diff --git a/requirements.txt b/requirements.txt index cec40ffdc..ad390225c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ pyyaml >= 5.1 ruamel.yaml tomli; python_version<"3.11" typing-extensions +gitpython toolz >=0.12.0,<1.0.0 # poetry: cachecontrol[filecache] >=0.12.9 @@ -34,4 +35,4 @@ requests >=2.18 # poetry: tomlkit >=0.7.0 # poetry: -virtualenv >=20.0.26 +virtualenv >=20.0.26 \ No newline at end of file diff --git a/tests/test_conda_lock.py b/tests/test_conda_lock.py index d2754faa4..2d2135cf8 100644 --- a/tests/test_conda_lock.py +++ b/tests/test_conda_lock.py @@ -1,4 +1,5 @@ import contextlib +import datetime import json import logging import os @@ -22,6 +23,7 @@ import yaml from flaky import flaky +from freezegun import freeze_time from conda_lock import __version__ from conda_lock._vendor.conda.models.match_spec import MatchSpec @@ -61,6 +63,7 @@ HashModel, LockedDependency, LockSpecification, + MetadataOption, Selectors, VersionedDependency, ) @@ -190,6 +193,11 @@ def env_with_uppercase_pip(tmp_path: Path): return clone_test_dir("test-uppercase-pip", tmp_path).joinpath("environment.yml") +@pytest.fixture +def git_metadata_zlib_environment(tmp_path: Path): + return clone_test_dir("zlib", tmp_path).joinpath("environment.yml") + + @pytest.fixture( scope="function", params=[ @@ -215,6 +223,7 @@ def _conda_exe_type(request: Any) -> str: @pytest.fixture(scope="session") +@typing.no_type_check def conda_exe(_conda_exe_type: str) -> PathLike: kwargs = dict( mamba=False, @@ -227,7 +236,41 @@ def conda_exe(_conda_exe_type: str) -> PathLike: if _conda_exe is not None: return _conda_exe - raise pytest.skip(f"{_conda_exe_type} is not installed") + pytest.skip(f"{_conda_exe_type} is not installed") + + +JSON_FIELDS: Dict[str, str] = {"json_unique_field": "test1", "common_field": "test2"} + +YAML_FIELDS: Dict[str, str] = {"yaml_unique_field": "test3", "common_field": "test4"} + +EXPECTED_CUSTOM_FIELDS: Dict[str, str] = { + "json_unique_field": "test1", + "yaml_unique_field": "test3", + "common_field": "test4", +} + + +@pytest.fixture +def custom_metadata_environment(tmp_path: Path): + return clone_test_dir("zlib", tmp_path / "test-custom-metadata") + + +@pytest.fixture +def custom_yaml_metadata(custom_metadata_environment: Path) -> Path: + outfile = custom_metadata_environment / "custom_metadata.yaml" + with outfile.open("w") as out_yaml: + yaml.dump(YAML_FIELDS, out_yaml) + + return outfile + + +@pytest.fixture +def custom_json_metadata(custom_metadata_environment: Path) -> Path: + outfile = custom_metadata_environment / "custom_metadata.json" + with outfile.open("w") as out_json: + json.dump(JSON_FIELDS, out_json) + + return outfile def test_parse_environment_file(gdal_environment: Path): @@ -518,6 +561,162 @@ def test_run_lock( run_lock([zlib_environment], conda_exe=conda_exe) +def test_run_lock_with_input_metadata( + monkeypatch: "pytest.MonkeyPatch", zlib_environment: Path, conda_exe: str +): + monkeypatch.chdir(zlib_environment.parent) + if is_micromamba(conda_exe): + monkeypatch.setenv("CONDA_FLAGS", "-v") + run_lock( + [zlib_environment], + conda_exe=conda_exe, + metadata_choices=set( + [ + MetadataOption.InputMd5, + MetadataOption.InputSha, + ] + ), + ) + lockfile = parse_conda_lock_file(zlib_environment.parent / DEFAULT_LOCKFILE_NAME) + + inputs_metadata = lockfile.metadata.inputs_metadata + assert inputs_metadata is not None, "Inputs Metadata was None" + print(inputs_metadata) + assert ( + inputs_metadata["environment.yml"].md5 == "5473161eb8500056d793df7ac720a36f" + ), "Input md5 didn't match expectation" + expected_shasum = "1177fb37f73bebd39bba9e504cb03495136b1961126475a5839da2e878b2afda" + assert ( + inputs_metadata["environment.yml"].sha256 == expected_shasum + ), "Input shasum didn't match expectation" + + +def test_run_lock_with_time_metadata( + monkeypatch: "pytest.MonkeyPatch", zlib_environment: Path, conda_exe: str +): + TIME_DIR = TEST_DIR / "test-time-metadata" + + TIME_DIR.mkdir(exist_ok=True) + monkeypatch.chdir(TIME_DIR) + if is_micromamba(conda_exe): + monkeypatch.setenv("CONDA_FLAGS", "-v") + frozen_datetime = datetime.datetime( + year=1, month=7, day=12, hour=15, minute=6, second=3 + ) + with freeze_time(frozen_datetime): + run_lock( + [zlib_environment], + conda_exe=conda_exe, + metadata_choices=set( + [ + MetadataOption.TimeStamp, + ] + ), + ) + lockfile = parse_conda_lock_file(TIME_DIR / DEFAULT_LOCKFILE_NAME) + + time_metadata = lockfile.metadata.time_metadata + assert time_metadata is not None, "Time metadata was None" + assert ( + datetime.datetime.fromisoformat(time_metadata.created_at.rstrip("Z")) + == frozen_datetime + ), ( + "Datetime added to lockfile didn't match expectation based on timestamps at start and end" + + " of test" + ) + + +def test_run_lock_with_git_metadata( + monkeypatch: "pytest.MonkeyPatch", + git_metadata_zlib_environment: Path, + conda_exe: str, +): + monkeypatch.chdir(git_metadata_zlib_environment.parent) + if is_micromamba(conda_exe): + monkeypatch.setenv("CONDA_FLAGS", "-v") + + import git + + try: + repo = git.Repo(search_parent_directories=True) # type: ignore + except git.exc.InvalidGitRepositoryError: # type: ignore + repo = git.Repo.init() # type: ignore + repo.index.add([git_metadata_zlib_environment]) + repo.index.commit( + "temporary commit for running via github actions without failure" + ) + if repo.config_reader().has_section("user"): + current_user_name = repo.config_reader().get_value("user", "name", None) + current_user_email = repo.config_reader().get_value("user", "email", None) + else: + current_user_name = None + current_user_email = None + + if current_user_name is None: + repo.config_writer().set_value("user", "name", "my_test_username").release() + if current_user_email is None: + repo.config_writer().set_value("user", "email", "my_test_email").release() + run_lock( + [git_metadata_zlib_environment], + conda_exe=conda_exe, + metadata_choices=set( + [ + MetadataOption.GitSha, + MetadataOption.GitUserName, + MetadataOption.GitUserEmail, + ] + ), + ) + lockfile = parse_conda_lock_file( + git_metadata_zlib_environment.parent / DEFAULT_LOCKFILE_NAME + ) + + assert ( + lockfile.metadata.git_metadata is not None + ), "Git metadata was None, should be some value" + assert ( + lockfile.metadata.git_metadata.git_user_name is not None + ), "Git metadata user.name was None, should be some value" + assert ( + lockfile.metadata.git_metadata.git_user_email is not None + ), "Git metadata user.email was None, should be some value" + if current_user_name is None: + config = repo.config_writer() + config.remove_option("user", "name") + config.release() + if current_user_email is None: + config = repo.config_writer() + config.remove_option("user", "email") + config.release() + + +def test_run_lock_with_custom_metadata( + monkeypatch: "pytest.MonkeyPatch", + custom_metadata_environment: Path, + custom_yaml_metadata: Path, + custom_json_metadata: Path, + conda_exe: str, +): + monkeypatch.chdir(custom_yaml_metadata.parent) + if is_micromamba(conda_exe): + monkeypatch.setenv("CONDA_FLAGS", "-v") + run_lock( + [custom_metadata_environment / "environment.yml"], + conda_exe=conda_exe, + metadata_yamls=[custom_json_metadata, custom_yaml_metadata], + ) + lockfile = parse_conda_lock_file( + custom_yaml_metadata.parent / DEFAULT_LOCKFILE_NAME + ) + + assert ( + lockfile.metadata.custom_metadata is not None + ), "Custom metadata was None unexpectedly" + assert ( + lockfile.metadata.custom_metadata == EXPECTED_CUSTOM_FIELDS + ), "Custom metadata didn't get written as expected" + + def test_run_lock_blas_mkl( monkeypatch: "pytest.MonkeyPatch", blas_mkl_environment: Path, conda_exe: str ): @@ -732,6 +931,7 @@ def test_poetry_version_parsing_constraints( conda=_conda_exe, spec=spec, lockfile_path=Path(DEFAULT_LOCKFILE_NAME), + metadata_yamls=(), ) python = next(p for p in lockfile_contents.package if p.name == "python") @@ -752,7 +952,7 @@ def test_run_with_channel_inversion( lockfile = parse_conda_lock_file(channel_inversion.parent / DEFAULT_LOCKFILE_NAME) for package in lockfile.package: if package.name == "cuda-python": - ms = MatchSpec(package.url) + ms = MatchSpec(package.url) # type: ignore assert ms.get("channel") == "conda-forge" break else: @@ -919,7 +1119,7 @@ def mamba_exe(): _conda_exe = _ensureconda(**kwargs) if _conda_exe is not None: return _conda_exe - raise pytest.skip("mamba is not installed") + pytest.skip("mamba is not installed") def _check_package_installed(package: str, prefix: str): @@ -992,7 +1192,7 @@ def test_install( "-p", platform, "-f", - zlib_environment, + str(zlib_environment), "-k", kind, "--filename-template", @@ -1015,7 +1215,7 @@ def invoke_install(*extra_args: str): "--conda", conda_exe, "--prefix", - tmp_path / env_name, + str(tmp_path / env_name), *extra_args, lock_filename, ], @@ -1224,7 +1424,7 @@ def test_virtual_packages( "-k", kind, "--virtual-package-spec", - test_dir / "virtual-packages-old-glibc.yaml", + str(test_dir / "virtual-packages-old-glibc.yaml"), ], )