From 340a768f03ca79eaefe113c4b4ee5a20d00ead2d Mon Sep 17 00:00:00 2001 From: Eric Arellano <14852634+Eric-Arellano@users.noreply.github.com> Date: Wed, 10 Aug 2022 15:30:23 -0500 Subject: [PATCH] Use a list of requirement constraints for lockfile invalidation (#16469) Improves upon https://github.com/pantsbuild/pants/pull/16420. @huonw wisely suggested we avoid using `hash` for constraints files because it makes Git diffs and merge conflicts much worse. https://github.com/python-poetry/poetry/issues/496 https://github.com/rust-lang/cargo/pull/7070 [ci skip-rust] [ci skip-build-wheels] --- .../pants/backend/python/goals/lockfile.py | 50 ++++++++++--------- .../backend/python/goals/lockfile_test.py | 50 +++++++++++-------- .../pants/backend/python/pip_requirement.py | 3 ++ .../python/util_rules/lockfile_metadata.py | 41 +++++++-------- .../util_rules/lockfile_metadata_test.py | 18 +++---- .../pants/backend/python/util_rules/pex.py | 4 +- .../python/util_rules/pex_requirements.py | 47 +++++++++-------- .../util_rules/pex_requirements_test.py | 16 ++++-- .../backend/python/util_rules/pex_test.py | 2 +- .../core/goals/update_build_files_test.py | 2 +- 10 files changed, 125 insertions(+), 108 deletions(-) diff --git a/src/python/pants/backend/python/goals/lockfile.py b/src/python/pants/backend/python/goals/lockfile.py index b1c4c85fe029..175b4e1d1e41 100644 --- a/src/python/pants/backend/python/goals/lockfile.py +++ b/src/python/pants/backend/python/goals/lockfile.py @@ -48,7 +48,6 @@ ) from pants.core.util_rules.lockfile_metadata import calculate_invalidation_digest from pants.engine.fs import CreateDigest, Digest, DigestContents, FileContent, MergeDigests -from pants.engine.internals.native_engine import FileDigest from pants.engine.process import ProcessCacheScope, ProcessResult from pants.engine.rules import Get, MultiGet, collect_rules, rule, rule_helper from pants.engine.target import AllTargets @@ -155,17 +154,23 @@ def warn_python_repos(option: str) -> None: return MaybeWarnPythonRepos() +@dataclass(frozen=True) +class _PipArgsAndConstraintsSetup: + args: tuple[str, ...] + digest: Digest + constraints: FrozenOrderedSet[PipRequirement] + + @rule_helper async def _setup_pip_args_and_constraints_file( python_setup: PythonSetup, *, resolve_name: str -) -> tuple[list[str], Digest, FileDigest | None]: - extra_args = [] - extra_digests = [] - constraints_file_digest: None | FileDigest = None +) -> _PipArgsAndConstraintsSetup: + args = [] + digests = [] if python_setup.no_binary or python_setup.only_binary: pip_args_file = "__pip_args.txt" - extra_args.extend(["-r", pip_args_file]) + args.extend(["-r", pip_args_file]) pip_args_file_content = "\n".join( [f"--no-binary {pkg}" for pkg in python_setup.no_binary] + [f"--only-binary {pkg}" for pkg in python_setup.only_binary] @@ -173,17 +178,17 @@ async def _setup_pip_args_and_constraints_file( pip_args_digest = await Get( Digest, CreateDigest([FileContent(pip_args_file, pip_args_file_content.encode())]) ) - extra_digests.append(pip_args_digest) + digests.append(pip_args_digest) + constraints: FrozenOrderedSet[PipRequirement] = FrozenOrderedSet() resolve_config = await Get(ResolvePexConfig, ResolvePexConfigRequest(resolve_name)) if resolve_config.constraints_file: - _constraints_file_entry = resolve_config.constraints_file[1] - extra_args.append(f"--constraints={_constraints_file_entry.path}") - constraints_file_digest = _constraints_file_entry.file_digest - extra_digests.append(resolve_config.constraints_file[0]) + args.append(f"--constraints={resolve_config.constraints_file.path}") + digests.append(resolve_config.constraints_file.digest) + constraints = resolve_config.constraints_file.constraints - input_digest = await Get(Digest, MergeDigests(extra_digests)) - return extra_args, input_digest, constraints_file_digest + input_digest = await Get(Digest, MergeDigests(digests)) + return _PipArgsAndConstraintsSetup(tuple(args), input_digest, constraints) @rule(desc="Generate Python lockfile", level=LogLevel.DEBUG) @@ -194,16 +199,13 @@ async def generate_lockfile( python_repos: PythonRepos, python_setup: PythonSetup, ) -> GenerateLockfileResult: - constraints_file_hash: str | None = None + requirement_constraints: FrozenOrderedSet[PipRequirement] = FrozenOrderedSet() if req.use_pex: - ( - extra_args, - input_digest, - constraints_file_digest, - ) = await _setup_pip_args_and_constraints_file(python_setup, resolve_name=req.resolve_name) - if constraints_file_digest: - constraints_file_hash = constraints_file_digest.fingerprint + pip_args_setup = await _setup_pip_args_and_constraints_file( + python_setup, resolve_name=req.resolve_name + ) + requirement_constraints = pip_args_setup.constraints header_delimiter = "//" result = await Get( @@ -233,13 +235,13 @@ async def generate_lockfile( "mac", # This makes diffs more readable when lockfiles change. "--indent=2", - *extra_args, + *pip_args_setup.args, *python_repos.pex_args, *python_setup.manylinux_pex_args, *req.interpreter_constraints.generate_pex_arg_list(), *req.requirements, ), - additional_input_digest=input_digest, + additional_input_digest=pip_args_setup.digest, output_files=("lock.json",), description=f"Generate lockfile for {req.resolve_name}", # Instead of caching lockfile generation with LMDB, we instead use the invalidation @@ -306,7 +308,7 @@ async def generate_lockfile( metadata = PythonLockfileMetadata.new( valid_for_interpreter_constraints=req.interpreter_constraints, requirements={PipRequirement.parse(i) for i in req.requirements}, - constraints_file_hash=constraints_file_hash, + requirement_constraints=set(requirement_constraints), ) lockfile_with_header = metadata.add_header_to_lockfile( initial_lockfile_digest_contents[0].content, diff --git a/src/python/pants/backend/python/goals/lockfile_test.py b/src/python/pants/backend/python/goals/lockfile_test.py index 5365e6bf72a8..f467271994da 100644 --- a/src/python/pants/backend/python/goals/lockfile_test.py +++ b/src/python/pants/backend/python/goals/lockfile_test.py @@ -44,7 +44,7 @@ def _generate( rule_runner: RuleRunner, use_pex: bool, ansicolors_version: str = "==1.1.8", - constraints_file_hash: str | None = None, + requirement_constraints_str: str = '// "requirement_constraints": []', ) -> str: result = rule_runner.request( GenerateLockfileResult, @@ -64,24 +64,29 @@ def _generate( if not use_pex: return content - constraints_file_hash_str = f'"{constraints_file_hash}"' if constraints_file_hash else "null" - pex_header = dedent( - f"""\ - // This lockfile was autogenerated by Pants. To regenerate, run: - // - // ./pants generate-lockfiles --resolve=test - // - // --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE --- - // {{ - // "version": 3, - // "valid_for_interpreter_constraints": [], - // "generated_with_requirements": [ - // "ansicolors{ansicolors_version}" - // ], - // "constraints_file_hash": {constraints_file_hash_str} - // }} - // --- END PANTS LOCKFILE METADATA --- - """ + pex_header = ( + dedent( + f"""\ + // This lockfile was autogenerated by Pants. To regenerate, run: + // + // ./pants generate-lockfiles --resolve=test + // + // --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE --- + // {{ + // "version": 3, + // "valid_for_interpreter_constraints": [], + // "generated_with_requirements": [ + // "ansicolors{ansicolors_version}" + // ], + """ + ) + + requirement_constraints_str + + dedent( + """ + // } + // --- END PANTS LOCKFILE METADATA --- + """ + ) ) assert content.startswith(pex_header) return strip_prefix(content, pex_header) @@ -167,8 +172,11 @@ def test_constraints_file(rule_runner: RuleRunner) -> None: rule_runner=rule_runner, use_pex=True, ansicolors_version=">=1.0", - constraints_file_hash=( - "1999760ce9dd0f82847def308992e3345592fc9e77a937c1e9bbb78a42ae3943" + requirement_constraints_str=dedent( + """\ + // "requirement_constraints": [ + // "ansicolors==1.1.7" + // ]""" ), ) ) diff --git a/src/python/pants/backend/python/pip_requirement.py b/src/python/pants/backend/python/pip_requirement.py index fbd4578b6bbe..ba9ac1dc7f2b 100644 --- a/src/python/pants/backend/python/pip_requirement.py +++ b/src/python/pants/backend/python/pip_requirement.py @@ -82,5 +82,8 @@ def __eq__(self, other): return False return self._req == other._req + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._req})" + def __str__(self): return str(self._req) diff --git a/src/python/pants/backend/python/util_rules/lockfile_metadata.py b/src/python/pants/backend/python/util_rules/lockfile_metadata.py index 9092bee8086b..2623d2eb7bb9 100644 --- a/src/python/pants/backend/python/util_rules/lockfile_metadata.py +++ b/src/python/pants/backend/python/util_rules/lockfile_metadata.py @@ -39,7 +39,7 @@ def new( *, valid_for_interpreter_constraints: InterpreterConstraints, requirements: set[PipRequirement], - constraints_file_hash: str | None, + requirement_constraints: set[PipRequirement], ) -> PythonLockfileMetadata: """Call the most recent version of the `LockfileMetadata` class to construct a concrete instance. @@ -50,7 +50,7 @@ def new( """ return PythonLockfileMetadataV3( - valid_for_interpreter_constraints, requirements, constraints_file_hash + valid_for_interpreter_constraints, requirements, requirement_constraints ) @classmethod @@ -70,7 +70,7 @@ def is_valid_for( user_interpreter_constraints: InterpreterConstraints, interpreter_universe: Iterable[str], user_requirements: Iterable[PipRequirement], - constraints_file_path_and_hash: tuple[str, str] | None, + requirement_constraints: Iterable[PipRequirement], ) -> LockfileMetadataValidation: """Returns Truthy if this `PythonLockfileMetadata` can be used in the current execution context.""" @@ -114,7 +114,7 @@ def is_valid_for( interpreter_universe: Iterable[str], # Everything below is not used by v1. user_requirements: Iterable[PipRequirement], - constraints_file_path_and_hash: tuple[str, str] | None, + requirement_constraints: Iterable[PipRequirement], ) -> LockfileMetadataValidation: failure_reasons: set[InvalidPythonLockfileReason] = set() @@ -168,13 +168,7 @@ def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]: instance = cast(PythonLockfileMetadataV2, instance) # Requirements need to be stringified then sorted so that tests are deterministic. Sorting # followed by stringifying does not produce a meaningful result. - return { - "generated_with_requirements": ( - sorted(str(i) for i in instance.requirements) - if instance.requirements is not None - else None - ) - } + return {"generated_with_requirements": (sorted(str(i) for i in instance.requirements))} def is_valid_for( self, @@ -185,7 +179,7 @@ def is_valid_for( interpreter_universe: Iterable[str], user_requirements: Iterable[PipRequirement], # Everything below is not used by V2. - constraints_file_path_and_hash: tuple[str, str] | None, + requirement_constraints: Iterable[PipRequirement], ) -> LockfileMetadataValidation: failure_reasons = set() @@ -210,7 +204,7 @@ def is_valid_for( class PythonLockfileMetadataV3(PythonLockfileMetadataV2): """Lockfile version that considers constraints files.""" - constraints_file_hash: str | None + requirement_constraints: set[PipRequirement] @classmethod def _from_json_dict( @@ -221,19 +215,23 @@ def _from_json_dict( ) -> PythonLockfileMetadataV3: v2_metadata = super()._from_json_dict(json_dict, lockfile_description, error_suffix) metadata = _get_metadata(json_dict, lockfile_description, error_suffix) - constraints_file_hash = metadata( - "constraints_file_hash", str, lambda x: x # type: ignore[no-any-return] + requirement_constraints = metadata( + "requirement_constraints", + Set[PipRequirement], + lambda l: {PipRequirement.parse(i) for i in l}, ) return PythonLockfileMetadataV3( valid_for_interpreter_constraints=v2_metadata.valid_for_interpreter_constraints, requirements=v2_metadata.requirements, - constraints_file_hash=constraints_file_hash, + requirement_constraints=requirement_constraints, ) @classmethod def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]: instance = cast(PythonLockfileMetadataV3, instance) - return {"constraints_file_hash": instance.constraints_file_hash} + return { + "requirement_constraints": (sorted(str(i) for i in instance.requirement_constraints)) + } def is_valid_for( self, @@ -243,7 +241,7 @@ def is_valid_for( user_interpreter_constraints: InterpreterConstraints, interpreter_universe: Iterable[str], user_requirements: Iterable[PipRequirement], - constraints_file_path_and_hash: tuple[str, str] | None, + requirement_constraints: Iterable[PipRequirement], ) -> LockfileMetadataValidation: failure_reasons = ( super() @@ -253,14 +251,11 @@ def is_valid_for( user_interpreter_constraints=user_interpreter_constraints, interpreter_universe=interpreter_universe, user_requirements=user_requirements, - constraints_file_path_and_hash=constraints_file_path_and_hash, + requirement_constraints=requirement_constraints, ) .failure_reasons ) - provided_constraints_file_hash = ( - constraints_file_path_and_hash[1] if constraints_file_path_and_hash else None - ) - if provided_constraints_file_hash != self.constraints_file_hash: + if self.requirement_constraints != set(requirement_constraints): failure_reasons.add(InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH) return LockfileMetadataValidation(failure_reasons) diff --git a/src/python/pants/backend/python/util_rules/lockfile_metadata_test.py b/src/python/pants/backend/python/util_rules/lockfile_metadata_test.py index 39a942998b34..2e7b34b29619 100644 --- a/src/python/pants/backend/python/util_rules/lockfile_metadata_test.py +++ b/src/python/pants/backend/python/util_rules/lockfile_metadata_test.py @@ -32,7 +32,7 @@ def test_metadata_header_round_trip() -> None: ["CPython==2.7.*", "PyPy", "CPython>=3.6,<4,!=3.7.*"] ), requirements=reqset("ansicolors==0.1.0"), - constraints_file_hash="abc", + requirement_constraints={PipRequirement.parse("constraint")}, ) serialized_lockfile = input_metadata.add_header_to_lockfile( b"req1==1.0", regenerate_command="./pants lock", delimeter="#" @@ -62,7 +62,7 @@ def test_add_header_to_lockfile() -> None: # "generated_with_requirements": [ # "ansicolors==0.1.0" # ], -# "constraints_file_hash": null +# "requirement_constraints": [] # } # --- END PANTS LOCKFILE METADATA --- dave==3.1.4 \\ @@ -75,7 +75,7 @@ def line_by_line(b: bytes) -> list[bytes]: metadata = PythonLockfileMetadata.new( valid_for_interpreter_constraints=InterpreterConstraints([">=3.7"]), requirements=reqset("ansicolors==0.1.0"), - constraints_file_hash=None, + requirement_constraints=set(), ) result = metadata.add_header_to_lockfile( input_lockfile, regenerate_command="./pants lock", delimeter="#" @@ -158,7 +158,7 @@ def test_is_valid_for_v1(user_digest, expected_digest, user_ic, expected_ic, mat user_interpreter_constraints=InterpreterConstraints(user_ic), interpreter_universe=INTERPRETER_UNIVERSE, user_requirements=set(), - constraints_file_path_and_hash=None, + requirement_constraints=set(), ) ) == matches @@ -232,7 +232,7 @@ def test_is_valid_for_interpreter_constraints_and_requirements( for m in [ PythonLockfileMetadataV2(InterpreterConstraints(lock_ics), reqset(*lock_reqs)), PythonLockfileMetadataV3( - InterpreterConstraints(lock_ics), reqset(*lock_reqs), constraints_file_hash=None + InterpreterConstraints(lock_ics), reqset(*lock_reqs), requirement_constraints=set() ), ]: result = m.is_valid_for( @@ -241,21 +241,21 @@ def test_is_valid_for_interpreter_constraints_and_requirements( user_interpreter_constraints=InterpreterConstraints(user_ics), interpreter_universe=INTERPRETER_UNIVERSE, user_requirements=reqset(*user_reqs), - constraints_file_path_and_hash=None, + requirement_constraints=set(), ) assert result.failure_reasons == set(expected) @pytest.mark.parametrize("is_tool", [True, False]) -def test_is_valid_for_constraints_file_hash(is_tool: bool) -> None: +def test_is_valid_for_requirement_constraints(is_tool: bool) -> None: result = PythonLockfileMetadataV3( - InterpreterConstraints([]), reqset(), constraints_file_hash="abc" + InterpreterConstraints([]), reqset(), requirement_constraints={PipRequirement.parse("c1")} ).is_valid_for( is_tool=is_tool, expected_invalidation_digest="", user_interpreter_constraints=InterpreterConstraints([]), interpreter_universe=INTERPRETER_UNIVERSE, user_requirements=reqset(), - constraints_file_path_and_hash=("c.txt", "xyz"), + requirement_constraints={PipRequirement.parse("c2")}, ) assert result.failure_reasons == {InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH} diff --git a/src/python/pants/backend/python/util_rules/pex.py b/src/python/pants/backend/python/util_rules/pex.py index 13d9d56aa85b..cf51071f533b 100644 --- a/src/python/pants/backend/python/util_rules/pex.py +++ b/src/python/pants/backend/python/util_rules/pex.py @@ -430,7 +430,7 @@ async def _setup_pex_requirements( lockfile.original_lockfile, request.requirements.complete_req_strings, python_setup, - constraints_file_path_and_hash=resolve_config.constraints_file_path_and_hash, + resolve_config.constraints_file, ) return _BuildPexRequirementsSetup( @@ -468,7 +468,7 @@ async def _setup_pex_requirements( loaded_lockfile.original_lockfile, request.requirements.req_strings, python_setup, - constraints_file_path_and_hash=resolve_config.constraints_file_path_and_hash, + resolve_config.constraints_file, ) return _BuildPexRequirementsSetup( diff --git a/src/python/pants/backend/python/util_rules/pex_requirements.py b/src/python/pants/backend/python/util_rules/pex_requirements.py index b7e35d3e96ab..0b708901b043 100644 --- a/src/python/pants/backend/python/util_rules/pex_requirements.py +++ b/src/python/pants/backend/python/util_rules/pex_requirements.py @@ -23,9 +23,7 @@ CreateDigest, Digest, DigestContents, - DigestEntries, FileContent, - FileEntry, GlobMatchErrorBehavior, PathGlobs, ) @@ -288,18 +286,18 @@ class GeneratePythonToolLockfileSentinel(GenerateToolLockfileSentinel): pass +@dataclass(frozen=True) +class ResolvePexConstraintsFile: + digest: Digest + path: str + constraints: FrozenOrderedSet[PipRequirement] + + @dataclass(frozen=True) class ResolvePexConfig: """Configuration from `[python]` that impacts how the resolve is created.""" - constraints_file: tuple[Digest, FileEntry] | None - - @property - def constraints_file_path_and_hash(self) -> tuple[str, str] | None: - if self.constraints_file is None: - return None - file_entry = self.constraints_file[1] - return file_entry.path, file_entry.file_digest.fingerprint + constraints_file: ResolvePexConstraintsFile | None @dataclass(frozen=True) @@ -322,7 +320,7 @@ async def determine_resolve_pex_config( if issubclass(sentinel, GeneratePythonToolLockfileSentinel) ) - constraints_file: tuple[Digest, FileEntry] | None = None + constraints_file: ResolvePexConstraintsFile | None = None _constraints_file_path = python_setup.resolves_to_constraints_file( all_python_tool_resolve_names ).get(request.resolve_name) @@ -338,25 +336,29 @@ async def determine_resolve_pex_config( glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin=_constraints_origin, ) - _constraints_digest, _constraints_digest_entries = await MultiGet( + _constraints_digest, _constraints_digest_contents = await MultiGet( Get(Digest, PathGlobs, _constraints_path_globs), - Get(DigestEntries, PathGlobs, _constraints_path_globs), + Get(DigestContents, PathGlobs, _constraints_path_globs), ) - if len(_constraints_digest_entries) != 1: + if len(_constraints_digest_contents) != 1: raise ValueError( softwrap( f""" Expected only one file from {_constraints_origin}, but matched: - {_constraints_digest_entries} + {sorted(fc.path for fc in _constraints_digest_contents)} Did you use a glob like `*`? """ ) ) - _constraints_file_entry = next(iter(_constraints_digest_entries)) - assert isinstance(_constraints_file_entry, FileEntry) - constraints_file = (_constraints_digest, _constraints_file_entry) + _constraints_file_content = next(iter(_constraints_digest_contents)) + constraints = parse_requirements_file( + _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path + ) + constraints_file = ResolvePexConstraintsFile( + _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints) + ) return ResolvePexConfig(constraints_file=constraints_file) @@ -377,8 +379,7 @@ def validate_metadata( lockfile: Lockfile | LockfileContent, consumed_req_strings: Iterable[str], python_setup: PythonSetup, - *, - constraints_file_path_and_hash: tuple[str, str] | None, + constraints_file: ResolvePexConstraintsFile | None, ) -> None: """Given interpreter constraints and requirements to be consumed, validate lockfile metadata.""" @@ -390,7 +391,7 @@ def validate_metadata( user_interpreter_constraints=interpreter_constraints, interpreter_universe=python_setup.interpreter_versions_universe, user_requirements=user_requirements, - constraints_file_path_and_hash=constraints_file_path_and_hash, + requirement_constraints=constraints_file.constraints if constraints_file else set(), ) if validation: return @@ -401,9 +402,7 @@ def validate_metadata( lockfile=lockfile, user_interpreter_constraints=interpreter_constraints, user_requirements=user_requirements, - maybe_constraints_file_path=constraints_file_path_and_hash[0] - if constraints_file_path_and_hash - else None, + maybe_constraints_file_path=(constraints_file.path if constraints_file else None), ) is_tool = isinstance(lockfile, (ToolCustomLockfile, ToolDefaultLockfile)) msg_iter = ( diff --git a/src/python/pants/backend/python/util_rules/pex_requirements_test.py b/src/python/pants/backend/python/util_rules/pex_requirements_test.py index 0e8b3679c991..adfa8bb8c338 100644 --- a/src/python/pants/backend/python/util_rules/pex_requirements_test.py +++ b/src/python/pants/backend/python/util_rules/pex_requirements_test.py @@ -13,6 +13,7 @@ from pants.backend.python.util_rules.lockfile_metadata import PythonLockfileMetadataV3 from pants.backend.python.util_rules.pex_requirements import ( Lockfile, + ResolvePexConstraintsFile, ToolCustomLockfile, ToolDefaultLockfile, _pex_lockfile_requirement_count, @@ -22,13 +23,14 @@ validate_metadata, ) from pants.engine.fs import FileContent +from pants.engine.internals.native_engine import EMPTY_DIGEST from pants.testutil.option_util import create_subsystem from pants.util.ordered_set import FrozenOrderedSet METADATA = PythonLockfileMetadataV3( InterpreterConstraints(["==3.8.*"]), {PipRequirement.parse("ansicolors"), PipRequirement.parse("requests")}, - constraints_file_hash="abc", + requirement_constraints={PipRequirement.parse("abc")}, ) @@ -123,7 +125,11 @@ def test_validate_tool_lockfiles( requirements, req_strings, create_python_setup(InvalidLockfileBehavior.warn), - constraints_file_path_and_hash=("c.txt", "xyz" if invalid_constraints_file else "abc"), + constraints_file=ResolvePexConstraintsFile( + EMPTY_DIGEST, + "c.txt", + FrozenOrderedSet({PipRequirement.parse("xyz" if invalid_constraints_file else "abc")}), + ), ) def contains(msg: str, if_: bool) -> None: @@ -201,7 +207,11 @@ def test_validate_user_lockfiles( lockfile, req_strings, create_python_setup(InvalidLockfileBehavior.warn), - constraints_file_path_and_hash=("c.txt", "xyz" if invalid_constraints_file else "abc"), + constraints_file=ResolvePexConstraintsFile( + EMPTY_DIGEST, + "c.txt", + FrozenOrderedSet({PipRequirement.parse("xyz" if invalid_constraints_file else "abc")}), + ), ) def contains(msg: str, if_: bool = True) -> None: diff --git a/src/python/pants/backend/python/util_rules/pex_test.py b/src/python/pants/backend/python/util_rules/pex_test.py index 0b6eecada609..6ca8e5788d50 100644 --- a/src/python/pants/backend/python/util_rules/pex_test.py +++ b/src/python/pants/backend/python/util_rules/pex_test.py @@ -745,7 +745,7 @@ def test_lockfile_validation(rule_runner: RuleRunner) -> None: lock_content = PythonLockfileMetadata.new( valid_for_interpreter_constraints=InterpreterConstraints(), requirements=set(), - constraints_file_hash=None, + requirement_constraints=set(), ).add_header_to_lockfile(b"", regenerate_command="regen", delimeter="#") rule_runner.write_files({"lock.txt": lock_content.decode()}) diff --git a/src/python/pants/core/goals/update_build_files_test.py b/src/python/pants/core/goals/update_build_files_test.py index d60fabd70c46..dd732bdade95 100644 --- a/src/python/pants/core/goals/update_build_files_test.py +++ b/src/python/pants/core/goals/update_build_files_test.py @@ -145,7 +145,7 @@ def test_find_python_interpreter_constraints_from_lockfile() -> None: default_metadata = PythonLockfileMetadata.new( valid_for_interpreter_constraints=InterpreterConstraints(["==2.7.*"]), requirements=set(), - constraints_file_hash=None, + requirement_constraints=set(), ) def assert_ics(