From 3d7a2955842f54467a569e4e5b74dd8ba9f5d67e Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 11 Feb 2019 22:02:21 -0800 Subject: [PATCH 1/8] add a TypedCollection type constraint to reduce boilerplate for datatype tuple fields (#7115) ### Problem *Resolves #6936.* There's been a [TODO in `pants.util.objects.Collection`](https://github.com/pantsbuild/pants/blob/c342fd3432aa0d73e402d2db7e013ecfcc76e9c8/src/python/pants/util/objects.py#L413) for a while to typecheck datatype tuple fields. #6936 has some thoughts on how to do this, but after realizing I could split out `TypeConstraint` into a base class and then introduce `BasicTypeConstraint` for type constraints which only act on the type, I think that ticket is invalidated as this solution is much cleaner. ### Solution - Split out logic for basic type checking (without looking at the object itself) into a `BasicTypeConstraint` class, which `Exactly` and friends inherit from. - Create the `TypedCollection` type constraint, which checks that its argument is iterable and then validates each element of the collection with a `BasicTypeConstraint` constructor argument. - Note that `TypedCollection` is a `TypeConstraint`, but not a `BasicTypeConstraint`, as it has to inspect the actual object object to determine whether each element matches the provided `BasicTypeConstraint`. - Move `pants.util.objects.Collection` into `src/python/pants/engine/objects.py`, as it is specifically for engine objects. - Use `TypedCollection` for the `dependencies` field of the datatype returned by `Collection.of()`. ### Result - `datatype` consumers and creators no longer have to have lots of boilerplate when using collections arguments, and those arguments can now be typechecked and made hashable for free! ### TODO in followup: `wrapper_type` See #7172. --- src/python/pants/engine/BUILD | 5 + src/python/pants/engine/addressable.py | 4 +- src/python/pants/engine/fs.py | 3 +- src/python/pants/engine/legacy/BUILD | 1 + src/python/pants/engine/legacy/graph.py | 3 +- src/python/pants/engine/objects.py | 41 ++++ src/python/pants/engine/scheduler.py | 3 +- src/python/pants/util/objects.py | 231 +++++++++++------- tests/python/pants_test/engine/BUILD | 10 + .../engine/legacy/test_graph_integration.py | 6 +- tests/python/pants_test/engine/test_engine.py | 12 +- tests/python/pants_test/engine/test_mapper.py | 2 +- .../python/pants_test/engine/test_objects.py | 33 +++ .../pants_test/engine/test_scheduler.py | 4 +- tests/python/pants_test/util/test_objects.py | 160 ++++++++++-- 15 files changed, 384 insertions(+), 134 deletions(-) create mode 100644 tests/python/pants_test/engine/test_objects.py diff --git a/src/python/pants/engine/BUILD b/src/python/pants/engine/BUILD index 168201a5719..cab6d0cbcee 100644 --- a/src/python/pants/engine/BUILD +++ b/src/python/pants/engine/BUILD @@ -50,6 +50,7 @@ python_library( dependencies=[ '3rdparty/python/twitter/commons:twitter.common.collections', '3rdparty/python:future', + ':objects', ':rules', ':selectors', 'src/python/pants/base:project_tree', @@ -121,7 +122,10 @@ python_library( name='objects', sources=['objects.py'], dependencies=[ + '3rdparty/python:future', 'src/python/pants/util:meta', + 'src/python/pants/util:memo', + 'src/python/pants/util:objects', ] ) @@ -172,6 +176,7 @@ python_library( ':isolated_process', ':native', ':nodes', + ':objects', ':rules', 'src/python/pants/base:exceptions', 'src/python/pants/base:specs', diff --git a/src/python/pants/engine/addressable.py b/src/python/pants/engine/addressable.py index 508a9471c61..25f4233c701 100644 --- a/src/python/pants/engine/addressable.py +++ b/src/python/pants/engine/addressable.py @@ -11,9 +11,9 @@ from future.utils import string_types from pants.build_graph.address import Address, BuildFileAddress -from pants.engine.objects import Resolvable, Serializable +from pants.engine.objects import Collection, Resolvable, Serializable from pants.util.collections_abc_backport import MutableMapping, MutableSequence -from pants.util.objects import Collection, TypeConstraintError +from pants.util.objects import TypeConstraintError Addresses = Collection.of(Address) diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py index c43b41cf858..c290c87aadf 100644 --- a/src/python/pants/engine/fs.py +++ b/src/python/pants/engine/fs.py @@ -7,10 +7,11 @@ from future.utils import binary_type, text_type from pants.base.project_tree import Dir, File +from pants.engine.objects import Collection from pants.engine.rules import RootRule from pants.option.custom_types import GlobExpansionConjunction from pants.option.global_options import GlobMatchErrorBehavior -from pants.util.objects import Collection, datatype +from pants.util.objects import datatype class FileContent(datatype([('path', text_type), ('content', binary_type)])): diff --git a/src/python/pants/engine/legacy/BUILD b/src/python/pants/engine/legacy/BUILD index cb8086944d8..8db00aa36d4 100644 --- a/src/python/pants/engine/legacy/BUILD +++ b/src/python/pants/engine/legacy/BUILD @@ -75,6 +75,7 @@ python_library( 'src/python/pants/build_graph', 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', + 'src/python/pants/engine:objects', 'src/python/pants/engine:parser', 'src/python/pants/engine:selectors', 'src/python/pants/option', diff --git a/src/python/pants/engine/legacy/graph.py b/src/python/pants/engine/legacy/graph.py index 72368f1a6db..7bd0efbeaf8 100644 --- a/src/python/pants/engine/legacy/graph.py +++ b/src/python/pants/engine/legacy/graph.py @@ -26,13 +26,14 @@ from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.engine.legacy.structs import BundleAdaptor, BundlesField, SourcesField, TargetAdaptor from pants.engine.mapper import AddressMapper +from pants.engine.objects import Collection from pants.engine.parser import SymbolTable, TargetAdaptorContainer from pants.engine.rules import RootRule, rule from pants.engine.selectors import Get, Select from pants.option.global_options import GlobMatchErrorBehavior from pants.source.filespec import any_matches_filespec from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper -from pants.util.objects import Collection, datatype +from pants.util.objects import datatype logger = logging.getLogger(__name__) diff --git a/src/python/pants/engine/objects.py b/src/python/pants/engine/objects.py index a0b0e784a7c..48b017d688d 100644 --- a/src/python/pants/engine/objects.py +++ b/src/python/pants/engine/objects.py @@ -5,10 +5,16 @@ from __future__ import absolute_import, division, print_function, unicode_literals import inspect +import sys from abc import abstractmethod, abstractproperty +from builtins import object from collections import namedtuple +from future.utils import PY2 + +from pants.util.memo import memoized_classmethod from pants.util.meta import AbstractClass +from pants.util.objects import Exactly, TypedCollection, datatype class SerializationError(Exception): @@ -146,3 +152,38 @@ def validate(self): :raises: :class:`ValidationError` if this object is invalid. """ + + +class Collection(object): + """Constructs classes representing collections of objects of a particular type. + + The produced class will expose its values under a field named dependencies - this is a stable API + which may be consumed e.g. over FFI from the engine. + + Python consumers of a Collection should prefer to use its standard iteration API. + + Note that elements of a Collection are type-checked upon construction. + """ + + @memoized_classmethod + def of(cls, *element_types): + union = '|'.join(element_type.__name__ for element_type in element_types) + type_name = '{}.of({})'.format(cls.__name__, union) + if PY2: + type_name = type_name.encode('utf-8') + type_checked_collection_class = datatype([ + # Create a datatype with a single field 'dependencies' which is type-checked on construction + # to be a collection containing elements of only the exact `element_types` specified. + ('dependencies', TypedCollection(Exactly(*element_types))) + ], superclass_name=cls.__name__) + supertypes = (cls, type_checked_collection_class) + properties = {'element_types': element_types} + collection_of_type = type(type_name, supertypes, properties) + + # Expose the custom class type at the module level to be pickle compatible. + setattr(sys.modules[cls.__module__], type_name, collection_of_type) + + return collection_of_type + + def __iter__(self): + return iter(self.dependencies) diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 72d04e661f8..fa4688c64ab 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -19,12 +19,13 @@ from pants.engine.isolated_process import ExecuteProcessRequest, FallibleExecuteProcessResult from pants.engine.native import Function, TypeConstraint, TypeId from pants.engine.nodes import Return, Throw +from pants.engine.objects import Collection from pants.engine.rules import RuleIndex, SingletonRule, TaskRule from pants.engine.selectors import Params, Select, constraint_for from pants.rules.core.exceptions import GracefulTerminationException from pants.util.contextutil import temporary_file_path from pants.util.dirutil import check_no_overlapping_paths -from pants.util.objects import Collection, datatype +from pants.util.objects import datatype from pants.util.strutil import pluralize diff --git a/src/python/pants/util/objects.py b/src/python/pants/util/objects.py index ddc51f388fb..7c29fe1fefe 100644 --- a/src/python/pants/util/objects.py +++ b/src/python/pants/util/objects.py @@ -4,17 +4,15 @@ from __future__ import absolute_import, division, print_function, unicode_literals -import sys from abc import abstractmethod -from builtins import object, zip +from builtins import zip from collections import namedtuple -from future.utils import PY2 from twitter.common.collections import OrderedSet -from pants.util.collections_abc_backport import OrderedDict -from pants.util.memo import memoized, memoized_classproperty -from pants.util.meta import AbstractClass +from pants.util.collections_abc_backport import Iterable, OrderedDict +from pants.util.memo import memoized_classproperty +from pants.util.meta import AbstractClass, classproperty def datatype(field_decls, superclass_name=None, **kwargs): @@ -266,6 +264,7 @@ def __init__(self, type_name, msg, *args, **kwargs): type_name, formatted_msg, *args, **kwargs) +# TODO: make these members of the `TypeConstraint` class! class TypeConstraintError(TypeError): """Indicates a :class:`TypeConstraint` violation.""" @@ -273,43 +272,99 @@ class TypeConstraintError(TypeError): class TypeConstraint(AbstractClass): """Represents a type constraint. - Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exact` or + Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exactly` or :class:`SubclassesOf`. """ - def __init__(self, *types, **kwargs): + def __init__(self, description): """Creates a type constraint centered around the given types. The type constraint is satisfied as a whole if satisfied for at least one of the given types. - :param type *types: The focus of this type constraint. - :param str description: A description for this constraint if the list of types is too long. + :param str description: A concise, readable description of what the type constraint represents. + Used directly as the __str__ implementation. """ + self._description = description + + @abstractmethod + def satisfied_by(self, obj): + """Return `True` if the given object satisfies this type constraint. + + :rtype: bool + """ + + def make_type_constraint_error(self, obj, constraint): + return TypeConstraintError( + "value {!r} (with type {!r}) must satisfy this type constraint: {}." + .format(obj, type(obj).__name__, constraint)) + + # TODO: disallow overriding this method with some form of mixin/decorator along with datatype + # __eq__! + def validate_satisfied_by(self, obj): + """Return `obj` if the object satisfies this type constraint, or raise. + + :raises: `TypeConstraintError` if `obj` does not satisfy the constraint. + """ + + if self.satisfied_by(obj): + return obj + + raise self.make_type_constraint_error(obj, self) + + def __ne__(self, other): + return not (self == other) + + def __str__(self): + return self._description + + +class TypeOnlyConstraint(TypeConstraint): + """A `TypeConstraint` predicated only on the object's type. + + `TypeConstraint` subclasses may override `.satisfied_by()` to perform arbitrary validation on the + object itself -- however, this class implements `.satisfied_by()` with a guarantee that it will + only act on the object's `type` via `.satisfied_by_type()`. This kind of type checking is faster + and easier to understand than the more complex validation allowed by `.satisfied_by()`. + """ + + # TODO: make an @abstract_classproperty decorator to do this boilerplate! + @classproperty + def _variance_symbol(cls): + """This is propagated to the the `TypeConstraint` constructor.""" + raise NotImplementedError('{} must implement the _variance_symbol classproperty!' + .format(cls.__name__)) + + def __init__(self, *types): + """Creates a type constraint based on some logic to match the given types. + + NB: A `TypeOnlyConstraint` implementation should ensure that the type constraint is satisfied as + a whole if satisfied for at least one of the given `types`. + + :param type *types: The types this constraint will match in some way. + """ + if not types: raise ValueError('Must supply at least one type') if any(not isinstance(t, type) for t in types): raise TypeError('Supplied types must be types. {!r}'.format(types)) - # NB: `types` is converted to tuple here because self.types's docstring says - # it returns a tuple. Does it matter what type this field is? + if len(types) == 1: + type_list = types[0].__name__ + else: + type_list = ' or '.join(t.__name__ for t in types) + description = '{}({})'.format(type(self).__name__, type_list) + + super(TypeOnlyConstraint, self).__init__(description=description) + + # NB: This is made into a tuple so that we can use self._types in issubclass() and others! self._types = tuple(types) - self._desc = kwargs.get('description', None) + # TODO(#7114): remove this after the engine is converted to use `TypeId` instead of + # `TypeConstraint`! @property def types(self): - """Return the subject types of this type constraint. - - :type: tuple of type - """ return self._types - def satisfied_by(self, obj): - """Return `True` if the given object satisfies this type constraint. - - :rtype: bool - """ - return self.satisfied_by_type(type(obj)) - @abstractmethod def satisfied_by_type(self, obj_type): """Return `True` if the given object satisfies this type constraint. @@ -317,18 +372,8 @@ def satisfied_by_type(self, obj_type): :rtype: bool """ - def validate_satisfied_by(self, obj): - """Return `obj` if the object satisfies this type constraint, or raise. - - :raises: `TypeConstraintError` if `obj` does not satisfy the constraint. - """ - - if self.satisfied_by(obj): - return obj - - raise TypeConstraintError( - "value {!r} (with type {!r}) must satisfy this type constraint: {!r}." - .format(obj, type(obj).__name__, self)) + def satisfied_by(self, obj): + return self.satisfied_by_type(type(obj)) def __hash__(self): return hash((type(self), self._types)) @@ -336,44 +381,23 @@ def __hash__(self): def __eq__(self, other): return type(self) == type(other) and self._types == other._types - def __ne__(self, other): - return not (self == other) - - def __str__(self): - if self._desc: - constrained_type = '({})'.format(self._desc) - else: - if len(self._types) == 1: - constrained_type = self._types[0].__name__ - else: - constrained_type = '({})'.format(', '.join(t.__name__ for t in self._types)) - return '{variance_symbol}{constrained_type}'.format(variance_symbol=self._variance_symbol, - constrained_type=constrained_type) - def __repr__(self): - if self._desc: - constrained_type = self._desc - else: - constrained_type = ', '.join(t.__name__ for t in self._types) + constrained_type = ', '.join(t.__name__ for t in self._types) return ('{type_constraint_type}({constrained_type})' .format(type_constraint_type=type(self).__name__, - constrained_type=constrained_type)) + constrained_type=constrained_type)) -class SuperclassesOf(TypeConstraint): +class SuperclassesOf(TypeOnlyConstraint): """Objects of the exact type as well as any super-types are allowed.""" - _variance_symbol = '-' - def satisfied_by_type(self, obj_type): return any(issubclass(t, obj_type) for t in self._types) -class Exactly(TypeConstraint): +class Exactly(TypeOnlyConstraint): """Only objects of the exact type are allowed.""" - _variance_symbol = '=' - def satisfied_by_type(self, obj_type): return obj_type in self._types @@ -384,41 +408,66 @@ def graph_str(self): return repr(self) -class SubclassesOf(TypeConstraint): +class SubclassesOf(TypeOnlyConstraint): """Objects of the exact type as well as any sub-types are allowed.""" - _variance_symbol = '+' - def satisfied_by_type(self, obj_type): return issubclass(obj_type, self._types) -class Collection(object): - """Constructs classes representing collections of objects of a particular type. +class TypedCollection(TypeConstraint): + """A `TypeConstraint` which accepts a TypeOnlyConstraint and validates a collection.""" - The produced class will expose its values under a field named dependencies - this is a stable API - which may be consumed e.g. over FFI from the engine. + _iterable_constraint = SubclassesOf(Iterable) - Python consumers of a Collection should prefer to use its standard iteration API. - """ - # TODO: could we check that the input is iterable in the ctor? - - @classmethod - @memoized - def of(cls, *element_types): - union = '|'.join(element_type.__name__ for element_type in element_types) - type_name = '{}.of({})'.format(cls.__name__, union) - if PY2: - type_name = type_name.encode('utf-8') - # TODO: could we allow type checking in the datatype() invocation here? - supertypes = (cls, datatype(['dependencies'], superclass_name='Collection')) - properties = {'element_types': element_types} - collection_of_type = type(type_name, supertypes, properties) - - # Expose the custom class type at the module level to be pickle compatible. - setattr(sys.modules[cls.__module__], type_name, collection_of_type) - - return collection_of_type - - def __iter__(self): - return iter(self.dependencies) + def __init__(self, constraint): + """Create a `TypeConstraint` which validates each member of a collection with `constraint`. + + :param TypeOnlyConstraint constraint: the `TypeConstraint` to apply to each element. This is + currently required to be a `TypeOnlyConstraint` to avoid + complex prototypal type relationships. + """ + + if not isinstance(constraint, TypeOnlyConstraint): + raise TypeError("constraint for collection must be a {}! was: {}" + .format(TypeOnlyConstraint.__name__, constraint)) + + description = '{}({})'.format(type(self).__name__, constraint) + + self._constraint = constraint + + super(TypedCollection, self).__init__(description=description) + + # TODO: consider making this a private method of TypeConstraint, as it now duplicates the logic in + # self.validate_satisfied_by()! + def satisfied_by(self, obj): + if self._iterable_constraint.satisfied_by(obj): + return all(self._constraint.satisfied_by(el) for el in obj) + return False + + def make_collection_type_constraint_error(self, base_obj, el): + base_error = self.make_type_constraint_error(el, self._constraint) + return TypeConstraintError("in wrapped constraint {} matching iterable object {}: {}" + .format(self, base_obj, base_error)) + + def validate_satisfied_by(self, obj): + if self._iterable_constraint.satisfied_by(obj): + for el in obj: + if not self._constraint.satisfied_by(el): + raise self.make_collection_type_constraint_error(obj, el) + return obj + + base_iterable_error = self.make_type_constraint_error(obj, self._iterable_constraint) + raise TypeConstraintError( + "in wrapped constraint {}: {}".format(self, base_iterable_error)) + + def __hash__(self): + return hash((type(self), self._constraint)) + + def __eq__(self, other): + return type(self) == type(other) and self._constraint == other._constraint + + def __repr__(self): + return ('{type_constraint_type}({constraint!r})' + .format(type_constraint_type=type(self).__name__, + constraint=self._constraint)) diff --git a/tests/python/pants_test/engine/BUILD b/tests/python/pants_test/engine/BUILD index c5849d62d7d..38b4d2cb4ad 100644 --- a/tests/python/pants_test/engine/BUILD +++ b/tests/python/pants_test/engine/BUILD @@ -156,6 +156,7 @@ python_tests( 'src/python/pants/build_graph', 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', + 'src/python/pants/engine:objects', 'src/python/pants/engine:struct', 'src/python/pants/util:dirutil', 'tests/python/pants_test/engine/examples:mapper_test', @@ -232,3 +233,12 @@ python_library( 'src/python/pants/util:dirutil', ] ) + +python_tests( + name='objects', + sources=['test_objects.py'], + dependencies=[ + 'src/python/pants/engine:objects', + 'tests/python/pants_test:test_base', + ], +) diff --git a/tests/python/pants_test/engine/legacy/test_graph_integration.py b/tests/python/pants_test/engine/legacy/test_graph_integration.py index dc8751c068e..2f9b9b167d3 100644 --- a/tests/python/pants_test/engine/legacy/test_graph_integration.py +++ b/tests/python/pants_test/engine/legacy/test_graph_integration.py @@ -61,12 +61,12 @@ def _list_target_check_warnings_sources(self, target_name): _ERR_TARGETS = { 'testprojects/src/python/sources:some-missing-some-not': [ "globs('*.txt', '*.rs')", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.txt\', {unicode_literal}\'testprojects/src/python/sources/*.rs\'), exclude=(), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.txt\', {unicode_literal}\'testprojects/src/python/sources/*.rs\'), exclude=(), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: []. Unmatched globs were: [\"testprojects/src/python/sources/*.rs\"].", ], 'testprojects/src/python/sources:missing-sources': [ "*.scala", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.scala\',), exclude=({unicode_literal}\'testprojects/src/python/sources/*Test.scala\', {unicode_literal}\'testprojects/src/python/sources/*Spec.scala\'), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=any_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.scala\',), exclude=({unicode_literal}\'testprojects/src/python/sources/*Test.scala\', {unicode_literal}\'testprojects/src/python/sources/*Spec.scala\'), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=any_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: [\"testprojects/src/python/sources/*Test.scala\", \"testprojects/src/python/sources/*Spec.scala\"]. Unmatched globs were: [\"testprojects/src/python/sources/*.scala\"].", ], 'testprojects/src/java/org/pantsbuild/testproject/bundle:missing-bundle-fileset': [ @@ -75,7 +75,7 @@ def _list_target_check_warnings_sources(self, target_name): "Globs('*.aaaa')", "ZGlobs('**/*.abab')", "['file1.aaaa', 'file2.aaaa']", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\',), exclude=(), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\',), exclude=(), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: []. Unmatched globs were: [\"testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\"].", ] } diff --git a/tests/python/pants_test/engine/test_engine.py b/tests/python/pants_test/engine/test_engine.py index 1944c2e3260..9ad331b0aa9 100644 --- a/tests/python/pants_test/engine/test_engine.py +++ b/tests/python/pants_test/engine/test_engine.py @@ -123,8 +123,8 @@ def test_include_trace_error_raises_error_with_trace(self): self.assert_equal_with_printing(dedent(''' 1 Exception encountered: - Computing Select(, =A) - Computing Task(nested_raise, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(nested_raise, , Exactly(A), true) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call @@ -175,8 +175,8 @@ def a_from_c_and_d(c, d): self.assert_equal_with_printing(dedent(''' 1 Exception encountered: - Computing Select(, =A) - Computing Task(a_from_c_and_d, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(a_from_c_and_d, , Exactly(A), true) Computing Task(d_from_b_nested_raise, , =D, true) Throw(An exception for B) Traceback (most recent call last): @@ -189,8 +189,8 @@ def a_from_c_and_d(c, d): Exception: An exception for B - Computing Select(, =A) - Computing Task(a_from_c_and_d, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(a_from_c_and_d, , Exactly(A), true) Computing Task(c_from_b_nested_raise, , =C, true) Throw(An exception for B) Traceback (most recent call last): diff --git a/tests/python/pants_test/engine/test_mapper.py b/tests/python/pants_test/engine/test_mapper.py index 3c8ce754d36..716d0bcd5ec 100644 --- a/tests/python/pants_test/engine/test_mapper.py +++ b/tests/python/pants_test/engine/test_mapper.py @@ -17,12 +17,12 @@ from pants.engine.fs import create_fs_rules from pants.engine.mapper import (AddressFamily, AddressMap, AddressMapper, DifferingFamiliesError, DuplicateNameError, UnaddressableObjectError) +from pants.engine.objects import Collection from pants.engine.parser import SymbolTable from pants.engine.rules import rule from pants.engine.selectors import Get, Select from pants.engine.struct import Struct from pants.util.dirutil import safe_open -from pants.util.objects import Collection from pants_test.engine.examples.parsers import JsonParser from pants_test.engine.scheduler_test_base import SchedulerTestBase from pants_test.engine.util import Target, TargetTable diff --git a/tests/python/pants_test/engine/test_objects.py b/tests/python/pants_test/engine/test_objects.py new file mode 100644 index 00000000000..0a194edeab8 --- /dev/null +++ b/tests/python/pants_test/engine/test_objects.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +from future.utils import PY3, text_type + +from pants.engine.objects import Collection +from pants.util.objects import TypeCheckError +from pants_test.test_base import TestBase + + +class CollectionTest(TestBase): + def test_collection_iteration(self): + self.assertEqual([1, 2], [x for x in Collection.of(int)([1, 2])]) + + def test_element_typechecking(self): + IntColl = Collection.of(int) + with self.assertRaisesRegexp(TypeCheckError, re.escape("""\ +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'hello']: value {u}'hello' (with type '{string_type}') must satisfy this type constraint: Exactly(int).""" + .format(u='' if PY3 else 'u', + string_type='str' if PY3 else 'unicode'))): + IntColl([3, "hello"]) + + IntOrStringColl = Collection.of(int, text_type) + self.assertEqual([3, "hello"], [x for x in IntOrStringColl([3, "hello"])]) + with self.assertRaisesRegexp(TypeCheckError, re.escape("""\ +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int or {string_type})) matching iterable object [()]: value () (with type 'tuple') must satisfy this type constraint: Exactly(int or {string_type}).""" + .format(string_type='str' if PY3 else 'unicode'))): + IntOrStringColl([()]) diff --git a/tests/python/pants_test/engine/test_scheduler.py b/tests/python/pants_test/engine/test_scheduler.py index 108d2bda5cc..586fbfb6e00 100644 --- a/tests/python/pants_test/engine/test_scheduler.py +++ b/tests/python/pants_test/engine/test_scheduler.py @@ -122,8 +122,8 @@ def test_trace_includes_rule_exception_traceback(self): trace = remove_locations_from_traceback(trace) assert_equal_with_printing(self, dedent(''' - Computing Select(, =A) - Computing Task(nested_raise, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(nested_raise, , Exactly(A), true) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/util/test_objects.py b/tests/python/pants_test/util/test_objects.py index 4fb24ebbf34..9bf68ccb21b 100644 --- a/tests/python/pants_test/util/test_objects.py +++ b/tests/python/pants_test/util/test_objects.py @@ -12,19 +12,23 @@ from future.utils import PY2, PY3, text_type -from pants.util.objects import (Collection, Exactly, SubclassesOf, SuperclassesOf, TypeCheckError, +from pants.util.objects import (Exactly, SubclassesOf, SuperclassesOf, TypeCheckError, + TypeConstraintError, TypedCollection, TypedDatatypeInstanceConstructionError, datatype, enum) from pants_test.test_base import TestBase -class CollectionTest(TestBase): - def test_collection_iteration(self): - self.assertEqual([1, 2], [x for x in Collection.of(int)([1, 2])]) - - class TypeConstraintTestBase(TestBase): class A(object): - pass + + def __repr__(self): + return '{}()'.format(type(self).__name__) + + def __str__(self): + return '(str form): {}'.format(repr(self)) + + def __eq__(self, other): + return type(self) == type(other) class B(A): pass @@ -41,9 +45,17 @@ def test_none(self): with self.assertRaises(ValueError): SubclassesOf() + def test_str_and_repr(self): + superclasses_of_b = SuperclassesOf(self.B) + self.assertEqual("SuperclassesOf(B)", str(superclasses_of_b)) + self.assertEqual("SuperclassesOf(B)", repr(superclasses_of_b)) + + superclasses_of_multiple = SuperclassesOf(self.A, self.B) + self.assertEqual("SuperclassesOf(A or B)", str(superclasses_of_multiple)) + self.assertEqual("SuperclassesOf(A, B)", repr(superclasses_of_multiple)) + def test_single(self): superclasses_of_b = SuperclassesOf(self.B) - self.assertEqual((self.B,), superclasses_of_b.types) self.assertTrue(superclasses_of_b.satisfied_by(self.A())) self.assertTrue(superclasses_of_b.satisfied_by(self.B())) self.assertFalse(superclasses_of_b.satisfied_by(self.BPrime())) @@ -51,12 +63,19 @@ def test_single(self): def test_multiple(self): superclasses_of_a_or_b = SuperclassesOf(self.A, self.B) - self.assertEqual((self.A, self.B), superclasses_of_a_or_b.types) self.assertTrue(superclasses_of_a_or_b.satisfied_by(self.A())) self.assertTrue(superclasses_of_a_or_b.satisfied_by(self.B())) self.assertFalse(superclasses_of_a_or_b.satisfied_by(self.BPrime())) self.assertFalse(superclasses_of_a_or_b.satisfied_by(self.C())) + def test_validate(self): + superclasses_of_a_or_b = SuperclassesOf(self.A, self.B) + self.assertEqual(self.A(), superclasses_of_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), superclasses_of_a_or_b.validate_satisfied_by(self.B())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value C() (with type 'C') must satisfy this type constraint: SuperclassesOf(A or B).")): + superclasses_of_a_or_b.validate_satisfied_by(self.C()) + class ExactlyTest(TypeConstraintTestBase): def test_none(self): @@ -65,7 +84,6 @@ def test_none(self): def test_single(self): exactly_b = Exactly(self.B) - self.assertEqual((self.B,), exactly_b.types) self.assertFalse(exactly_b.satisfied_by(self.A())) self.assertTrue(exactly_b.satisfied_by(self.B())) self.assertFalse(exactly_b.satisfied_by(self.BPrime())) @@ -73,7 +91,6 @@ def test_single(self): def test_multiple(self): exactly_a_or_b = Exactly(self.A, self.B) - self.assertEqual((self.A, self.B), exactly_a_or_b.types) self.assertTrue(exactly_a_or_b.satisfied_by(self.A())) self.assertTrue(exactly_a_or_b.satisfied_by(self.B())) self.assertFalse(exactly_a_or_b.satisfied_by(self.BPrime())) @@ -84,31 +101,43 @@ def test_disallows_unsplatted_lists(self): Exactly([1]) def test_str_and_repr(self): - exactly_b_types = Exactly(self.B, description='B types') - self.assertEqual("=(B types)", str(exactly_b_types)) - self.assertEqual("Exactly(B types)", repr(exactly_b_types)) - exactly_b = Exactly(self.B) - self.assertEqual("=B", str(exactly_b)) + self.assertEqual("Exactly(B)", str(exactly_b)) self.assertEqual("Exactly(B)", repr(exactly_b)) exactly_multiple = Exactly(self.A, self.B) - self.assertEqual("=(A, B)", str(exactly_multiple)) + self.assertEqual("Exactly(A or B)", str(exactly_multiple)) self.assertEqual("Exactly(A, B)", repr(exactly_multiple)) def test_checking_via_bare_type(self): self.assertTrue(Exactly(self.B).satisfied_by_type(self.B)) self.assertFalse(Exactly(self.B).satisfied_by_type(self.C)) + def test_validate(self): + exactly_a_or_b = Exactly(self.A, self.B) + self.assertEqual(self.A(), exactly_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), exactly_a_or_b.validate_satisfied_by(self.B())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value C() (with type 'C') must satisfy this type constraint: Exactly(A or B).")): + exactly_a_or_b.validate_satisfied_by(self.C()) + class SubclassesOfTest(TypeConstraintTestBase): def test_none(self): with self.assertRaises(ValueError): SubclassesOf() + def test_str_and_repr(self): + subclasses_of_b = SubclassesOf(self.B) + self.assertEqual("SubclassesOf(B)", str(subclasses_of_b)) + self.assertEqual("SubclassesOf(B)", repr(subclasses_of_b)) + + subclasses_of_multiple = SubclassesOf(self.A, self.B) + self.assertEqual("SubclassesOf(A or B)", str(subclasses_of_multiple)) + self.assertEqual("SubclassesOf(A, B)", repr(subclasses_of_multiple)) + def test_single(self): subclasses_of_b = SubclassesOf(self.B) - self.assertEqual((self.B,), subclasses_of_b.types) self.assertFalse(subclasses_of_b.satisfied_by(self.A())) self.assertTrue(subclasses_of_b.satisfied_by(self.B())) self.assertFalse(subclasses_of_b.satisfied_by(self.BPrime())) @@ -116,12 +145,62 @@ def test_single(self): def test_multiple(self): subclasses_of_b_or_c = SubclassesOf(self.B, self.C) - self.assertEqual((self.B, self.C), subclasses_of_b_or_c.types) self.assertTrue(subclasses_of_b_or_c.satisfied_by(self.B())) self.assertTrue(subclasses_of_b_or_c.satisfied_by(self.C())) self.assertFalse(subclasses_of_b_or_c.satisfied_by(self.BPrime())) self.assertFalse(subclasses_of_b_or_c.satisfied_by(self.A())) + def test_validate(self): + subclasses_of_a_or_b = SubclassesOf(self.A, self.B) + self.assertEqual(self.A(), subclasses_of_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), subclasses_of_a_or_b.validate_satisfied_by(self.B())) + self.assertEqual(self.C(), subclasses_of_a_or_b.validate_satisfied_by(self.C())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value 1 (with type 'int') must satisfy this type constraint: SubclassesOf(A or B).")): + subclasses_of_a_or_b.validate_satisfied_by(1) + + +class TypedCollectionTest(TypeConstraintTestBase): + def test_str_and_repr(self): + collection_of_exactly_b = TypedCollection(Exactly(self.B)) + self.assertEqual("TypedCollection(Exactly(B))", str(collection_of_exactly_b)) + self.assertEqual("TypedCollection(Exactly(B))", repr(collection_of_exactly_b)) + + collection_of_multiple_subclasses = TypedCollection( + SubclassesOf(self.A, self.B)) + self.assertEqual("TypedCollection(SubclassesOf(A or B))", + str(collection_of_multiple_subclasses)) + self.assertEqual("TypedCollection(SubclassesOf(A, B))", + repr(collection_of_multiple_subclasses)) + + def test_collection_single(self): + collection_constraint = TypedCollection(Exactly(self.A)) + self.assertTrue(collection_constraint.satisfied_by([self.A()])) + self.assertFalse(collection_constraint.satisfied_by([self.A(), self.B()])) + self.assertTrue(collection_constraint.satisfied_by([self.A(), self.A()])) + + def test_collection_multiple(self): + collection_constraint = TypedCollection(SubclassesOf(self.B, self.BPrime)) + self.assertTrue(collection_constraint.satisfied_by([self.B(), self.C(), self.BPrime()])) + self.assertFalse(collection_constraint.satisfied_by([self.B(), self.A()])) + + def test_no_complex_sub_constraint(self): + sub_collection = TypedCollection(Exactly(self.A)) + with self.assertRaisesRegexp(TypeError, re.escape( + "constraint for collection must be a TypeOnlyConstraint! was: {}".format(sub_collection))): + TypedCollection(sub_collection) + + def test_validate(self): + collection_exactly_a_or_b = TypedCollection(Exactly(self.A, self.B)) + self.assertEqual([self.A()], collection_exactly_a_or_b.validate_satisfied_by([self.A()])) + self.assertEqual([self.B()], collection_exactly_a_or_b.validate_satisfied_by([self.B()])) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("in wrapped constraint TypedCollection(Exactly(A or B)): value A() (with type 'A') must satisfy this type constraint: SubclassesOf(Iterable).")): + collection_exactly_a_or_b.validate_satisfied_by(self.A()) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("in wrapped constraint TypedCollection(Exactly(A or B)) matching iterable object [C()]: value C() (with type 'C') must satisfy this type constraint: Exactly(A or B).")): + collection_exactly_a_or_b.validate_satisfied_by([self.C()]) + class ExportedDatatype(datatype(['val'])): pass @@ -175,6 +254,12 @@ def __repr__(self): class WithSubclassTypeConstraint(datatype([('some_value', SubclassesOf(SomeBaseClass))])): pass +class WithCollectionTypeConstraint(datatype([ + ('dependencies', TypedCollection(Exactly(int))), +])): + pass + + class NonNegativeInt(datatype([('an_int', int)])): """Example of overriding __new__() to perform deeper argument checking.""" @@ -392,7 +477,7 @@ def test_instance_construction_by_repr(self): some_val = SomeTypedDatatype(3) self.assertEqual(3, some_val.val) self.assertEqual(repr(some_val), "SomeTypedDatatype(val=3)") - self.assertEqual(str(some_val), "SomeTypedDatatype(val<=int>=3)") + self.assertEqual(str(some_val), "SomeTypedDatatype(val=3)") some_object = WithExplicitTypeConstraint(text_type('asdf'), 45) self.assertEqual(some_object.a_string, 'asdf') @@ -402,7 +487,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(some_object), expected_message) def compare_str(unicode_type_name): - expected_message = "WithExplicitTypeConstraint(a_string<={}>=asdf, an_int<=int>=45)".format(unicode_type_name) + expected_message = "WithExplicitTypeConstraint(a_string=asdf, an_int=45)".format(unicode_type_name) self.assertEqual(str(some_object), expected_message) if PY2: compare_str('unicode') @@ -414,7 +499,7 @@ def compare_str(unicode_type_name): some_nonneg_int = NonNegativeInt(an_int=3) self.assertEqual(3, some_nonneg_int.an_int) self.assertEqual(repr(some_nonneg_int), "NonNegativeInt(an_int=3)") - self.assertEqual(str(some_nonneg_int), "NonNegativeInt(an_int<=int>=3)") + self.assertEqual(str(some_nonneg_int), "NonNegativeInt(an_int=3)") wrapped_nonneg_int = CamelCaseWrapper(NonNegativeInt(45)) # test attribute naming for camel-cased types @@ -424,7 +509,7 @@ def compare_str(unicode_type_name): "CamelCaseWrapper(nonneg_int=NonNegativeInt(an_int=45))") self.assertEqual( str(wrapped_nonneg_int), - "CamelCaseWrapper(nonneg_int<=NonNegativeInt>=NonNegativeInt(an_int<=int>=45))") + "CamelCaseWrapper(nonneg_int=NonNegativeInt(an_int=45))") mixed_type_obj = MixedTyping(value=3, name=text_type('asdf')) self.assertEqual(3, mixed_type_obj.value) @@ -433,7 +518,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(mixed_type_obj), expected_message) def compare_str(unicode_type_name): - expected_message = "MixedTyping(value=3, name<={}>=asdf)".format(unicode_type_name) + expected_message = "MixedTyping(value=3, name=asdf)".format(unicode_type_name) self.assertEqual(str(mixed_type_obj), expected_message) if PY2: compare_str('unicode') @@ -448,7 +533,7 @@ def compare_str(unicode_type_name): "WithSubclassTypeConstraint(some_value=SomeDatatypeClass())") self.assertEqual( str(subclass_constraint_obj), - "WithSubclassTypeConstraint(some_value<+SomeBaseClass>=SomeDatatypeClass())") + "WithSubclassTypeConstraint(some_value=SomeDatatypeClass())") def test_mixin_type_construction(self): obj_with_mixin = TypedWithMixin(text_type(' asdf ')) @@ -457,7 +542,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(obj_with_mixin), expected_message) def compare_str(unicode_type_name): - expected_message = "TypedWithMixin(val<={}>= asdf )".format(unicode_type_name) + expected_message = "TypedWithMixin(val= asdf )".format(unicode_type_name) self.assertEqual(str(obj_with_mixin), expected_message) if PY2: compare_str('unicode') @@ -468,6 +553,14 @@ def compare_str(unicode_type_name): self.assertEqual(obj_with_mixin.as_str(), ' asdf ') self.assertEqual(obj_with_mixin.stripped(), 'asdf') + def test_instance_with_collection_construction_str_repr(self): + # TODO: convert the type of the input collection using a `wrapper_type` argument! + obj_with_collection = WithCollectionTypeConstraint([3]) + self.assertEqual("WithCollectionTypeConstraint(dependencies=[3])", + str(obj_with_collection)) + self.assertEqual("WithCollectionTypeConstraint(dependencies=[3])", + repr(obj_with_collection)) + def test_instance_construction_errors(self): with self.assertRaises(TypeError) as cm: SomeTypedDatatype(something=3) @@ -574,6 +667,21 @@ def compare_str(unicode_type_name, include_unicode=False): field 'some_value' was invalid: value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(SomeBaseClass).""") self.assertEqual(str(cm.exception), expected_msg) + with self.assertRaises(TypeCheckError) as cm: + WithCollectionTypeConstraint(3) + expected_msg = """\ +error: in constructor of type WithCollectionTypeConstraint: type check error: +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)): value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(Iterable).""" + self.assertEqual(str(cm.exception), expected_msg) + + with self.assertRaises(TypeCheckError) as cm: + WithCollectionTypeConstraint([3, "asdf"]) + expected_msg = """\ +error: in constructor of type WithCollectionTypeConstraint: type check error: +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'asdf']: value {u}'asdf' (with type '{string_type}') must satisfy this type constraint: Exactly(int).\ +""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') + self.assertEqual(str(cm.exception), expected_msg) + def test_copy(self): obj = AnotherTypedDatatype(string='some_string', elements=[1, 2, 3]) new_obj = obj.copy(string='another_string') From d0432df3b4ab356c5b9ddf1d7ad5b4aeeab87b66 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 12 Feb 2019 12:50:59 -0800 Subject: [PATCH 2/8] Pin pytest version to avoid induced breakage from more-itertools transitive dep (#7238) ### Problem A floating transitive dependency of pytest, `more-itertools`, dropped support for python 2 in its 6.0.0 release -- see pytest-dev/pytest#4770. This is currently breaking our and our users' CI: see https://travis-ci.org/pantsbuild/pants/jobs/492004734. We could pin that dep, but as mentioned in https://github.com/pytest-dev/pytest/issues/4770#issuecomment-462869367, pinning transitive deps of pytest would impose requirement constraints on users of pytest in pants. ### Solution - Pin `pytest==3.0.7` for now. ### Result python tests should no longer be broken. --- src/python/pants/backend/python/subsystems/pytest.py | 4 +++- .../python/pants_test/rules/test_test_integration.py | 12 ++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/python/pants/backend/python/subsystems/pytest.py b/src/python/pants/backend/python/subsystems/pytest.py index e3eb7f9e1b6..cbf53f953dd 100644 --- a/src/python/pants/backend/python/subsystems/pytest.py +++ b/src/python/pants/backend/python/subsystems/pytest.py @@ -14,7 +14,9 @@ class PyTest(Subsystem): def register_options(cls, register): super(PyTest, cls).register_options(register) # TODO: This is currently bounded below `3.7` due to #6282. - register('--requirements', advanced=True, default='pytest>=3.0.7,<3.7', + # TODO: Additionally, this is temporarily pinned to 3.0.7 due to more-itertools 6.0.0 dropping + # Python 2 support: https://github.com/pytest-dev/pytest/issues/4770. + register('--requirements', advanced=True, default='pytest==3.0.7', help='Requirements string for the pytest library.') register('--timeout-requirements', advanced=True, default='pytest-timeout>=1.2,<1.3', help='Requirements string for the pytest-timeout library.') diff --git a/tests/python/pants_test/rules/test_test_integration.py b/tests/python/pants_test/rules/test_test_integration.py index 6f28e5bf31c..913752de8dc 100644 --- a/tests/python/pants_test/rules/test_test_integration.py +++ b/tests/python/pants_test/rules/test_test_integration.py @@ -72,7 +72,7 @@ def test_passing_python_test(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_pass.py . [100%] @@ -92,7 +92,7 @@ def test_failing_python_test(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_fail.py F [100%] @@ -120,7 +120,7 @@ def test_source_dep(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_with_source_dep.py . [100%] @@ -139,7 +139,7 @@ def test_thirdparty_dep(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py . [100%] @@ -160,7 +160,7 @@ def test_mixed_python_tests(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_fail.py F [100%] @@ -177,7 +177,7 @@ def test_fail(): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items testprojects/tests/python/pants/dummies/test_pass.py . [100%] From e382541d70c5b57e682dce998d361c96aed4d8e9 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 12 Feb 2019 12:54:49 -0800 Subject: [PATCH 3/8] Canonicalize enum pattern matching for execution strategy, platform, and elsewhere (#7226) ### Problem In #7092 we added [`NailgunTask#do_for_execution_strategy_variant()`](https://github.com/cosmicexplorer/pants/blob/70977ef064305b78406a627e07f4dae3a60e4ae4/src/python/pants/backend/jvm/tasks/nailgun_task.py#L31-L43), which allowed performing more declarative execution strategy-specific logic in nailgunnable tasks. Further work with rsc will do even more funky things with our nailgunnable task logic, and while we will eventually have a unified story again for nailgun and subprocess invocations with the v2 engine (see #7079), for now having this check that we have performed the logic we expect all execution strategy variants is very useful. This PR puts that pattern matching logic into `enum()`: https://github.com/pantsbuild/pants/blob/84cf9a75dbf68cf7126fe8372ab9b2f48720464d/src/python/pants/util/objects.py#L173-L174, among other things. **Note:** `TypeCheckError` and other exceptions are moved up from further down in `objects.py`. ### Solution - add `resolve_for_enum_variant()` method to `enum` which does the job of the previous `do_for_execution_strategy_variant()` - make the native backend's `Platform` into an enum. - stop silently converting a `None` argument to the enum's `create()` classmethod into its`default_value`. - add `register_enum_option()` helper method to register options based on enum types. ### Result We have a low-overhead way to convert potentially-tricky conditional logic into a checked pattern matching-style interface with `enum()`, and it is easier to register enum options. --- .../jvm/tasks/jvm_compile/rsc/rsc_compile.py | 8 +- .../pants/backend/jvm/tasks/nailgun_task.py | 39 ++-- .../backend/native/config/environment.py | 35 +-- .../backend/native/subsystems/binaries/gcc.py | 6 +- .../native/subsystems/binaries/llvm.py | 11 +- .../native/subsystems/native_build_step.py | 17 +- .../native/subsystems/native_toolchain.py | 20 +- .../backend/native/targets/native_artifact.py | 6 +- .../pants/backend/native/tasks/conan_fetch.py | 6 +- .../native/tasks/link_shared_libraries.py | 10 +- .../backend/python/tasks/unpack_wheels.py | 6 +- src/python/pants/engine/fs.py | 5 +- src/python/pants/init/engine_initializer.py | 3 +- src/python/pants/option/global_options.py | 16 +- src/python/pants/util/objects.py | 216 +++++++++++++----- .../backend/native/tasks/test_cpp_compile.py | 1 + .../tasks/native/test_ctypes_integration.py | 110 +++++---- tests/python/pants_test/util/test_objects.py | 113 ++++++--- 18 files changed, 369 insertions(+), 259 deletions(-) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py index 441196b86cf..6a5675be561 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py @@ -201,11 +201,11 @@ def _nailgunnable_combined_classpath(self): # Overrides the normal zinc compiler classpath, which only contains zinc. def get_zinc_compiler_classpath(self): - return self.do_for_execution_strategy_variant({ + return self.execution_strategy_enum.resolve_for_enum_variant({ self.HERMETIC: lambda: super(RscCompile, self).get_zinc_compiler_classpath(), self.SUBPROCESS: lambda: super(RscCompile, self).get_zinc_compiler_classpath(), self.NAILGUN: lambda: self._nailgunnable_combined_classpath, - }) + })() def register_extra_products_from_contexts(self, targets, compile_contexts): super(RscCompile, self).register_extra_products_from_contexts(targets, compile_contexts) @@ -861,7 +861,7 @@ def _runtool_nonhermetic(self, parent_workunit, classpath, main, tool_name, args def _runtool(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None): with self.context.new_workunit(tool_name) as wu: - return self.do_for_execution_strategy_variant({ + return self.execution_strategy_enum.resolve_for_enum_variant({ self.HERMETIC: lambda: self._runtool_hermetic( main, tool_name, args, distribution, tgt=tgt, input_files=input_files, input_digest=input_digest, output_dir=output_dir), @@ -869,7 +869,7 @@ def _runtool(self, main, tool_name, args, distribution, wu, self.tool_classpath(tool_name), main, tool_name, args, distribution), self.NAILGUN: lambda: self._runtool_nonhermetic( wu, self._nailgunnable_combined_classpath, main, tool_name, args, distribution), - }) + })() def _run_metai_tool(self, distribution, diff --git a/src/python/pants/backend/jvm/tasks/nailgun_task.py b/src/python/pants/backend/jvm/tasks/nailgun_task.py index ef6cef73318..dbc3229a5e3 100644 --- a/src/python/pants/backend/jvm/tasks/nailgun_task.py +++ b/src/python/pants/backend/jvm/tasks/nailgun_task.py @@ -15,6 +15,7 @@ from pants.process.subprocess import Subprocess from pants.task.task import Task, TaskBase from pants.util.memo import memoized_property +from pants.util.objects import enum, register_enum_option class NailgunTaskBase(JvmToolTaskMixin, TaskBase): @@ -24,30 +25,16 @@ class NailgunTaskBase(JvmToolTaskMixin, TaskBase): SUBPROCESS = 'subprocess' HERMETIC = 'hermetic' - class InvalidExecutionStrategyMapping(Exception): pass - - _all_execution_strategies = frozenset([NAILGUN, SUBPROCESS, HERMETIC]) - - def do_for_execution_strategy_variant(self, mapping): - """Invoke the method in `mapping` with the key corresponding to the execution strategy. - - `mapping` is a dict mapping execution strategy -> zero-argument lambda. - """ - variants = frozenset(mapping.keys()) - if variants != self._all_execution_strategies: - raise self.InvalidExecutionStrategyMapping( - 'Must specify a mapping with exactly the keys {} (was: {})' - .format(self._all_execution_strategies, variants)) - method_for_variant = mapping[self.execution_strategy] - # The methods need not return a value, but we pass it along if they do. - return method_for_variant() + class ExecutionStrategy(enum([NAILGUN, SUBPROCESS, HERMETIC])): pass @classmethod def register_options(cls, register): super(NailgunTaskBase, cls).register_options(register) - register('--execution-strategy', choices=[cls.NAILGUN, cls.SUBPROCESS, cls.HERMETIC], default=cls.NAILGUN, - help='If set to nailgun, nailgun will be enabled and repeated invocations of this ' - 'task will be quicker. If set to subprocess, then the task will be run without nailgun.') + register_enum_option( + register, cls.ExecutionStrategy, '--execution-strategy', + help='If set to nailgun, nailgun will be enabled and repeated invocations of this ' + 'task will be quicker. If set to subprocess, then the task will be run without nailgun. ' + 'Hermetic execution is an experimental subprocess execution framework.') register('--nailgun-timeout-seconds', advanced=True, default=10, type=float, help='Timeout (secs) for nailgun startup.') register('--nailgun-connect-attempts', advanced=True, default=5, type=int, @@ -60,6 +47,13 @@ def register_options(cls, register): rev='0.9.1'), ]) + @memoized_property + def execution_strategy_enum(self): + # TODO: This .create() call can be removed when the enum interface is more stable as the option + # is converted into an instance of self.ExecutionStrategy via the `type` argument through + # register_enum_option(). + return self.ExecutionStrategy.create(self.get_options().execution_strategy) + @classmethod def subsystem_dependencies(cls): return super(NailgunTaskBase, cls).subsystem_dependencies() + (Subprocess.Factory,) @@ -76,9 +70,10 @@ def __init__(self, *args, **kwargs): self._executor_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir, *id_tuple) - @memoized_property + # TODO: eventually deprecate this when we can move all subclasses to use the enum! + @property def execution_strategy(self): - return self.get_options().execution_strategy + return self.execution_strategy_enum.value def create_java_executor(self, dist=None): """Create java executor that uses this task's ng daemon, if allowed. diff --git a/src/python/pants/backend/native/config/environment.py b/src/python/pants/backend/native/config/environment.py index 6d8a55b30ea..eb6d26c8194 100644 --- a/src/python/pants/backend/native/config/environment.py +++ b/src/python/pants/backend/native/config/environment.py @@ -9,37 +9,14 @@ from pants.engine.rules import SingletonRule from pants.util.meta import AbstractClass -from pants.util.objects import datatype +from pants.util.objects import datatype, enum from pants.util.osutil import all_normalized_os_names, get_normalized_os_name from pants.util.strutil import create_path_env_var -class Platform(datatype(['normalized_os_name'])): +class Platform(enum('normalized_os_name', all_normalized_os_names())): - class UnsupportedPlatformError(Exception): - """Thrown if pants is running on an unrecognized platform.""" - - @classmethod - def create(cls): - return Platform(get_normalized_os_name()) - - _NORMALIZED_OS_NAMES = frozenset(all_normalized_os_names()) - - def resolve_platform_specific(self, platform_specific_funs): - arg_keys = frozenset(platform_specific_funs.keys()) - unknown_plats = self._NORMALIZED_OS_NAMES - arg_keys - if unknown_plats: - raise self.UnsupportedPlatformError( - "platform_specific_funs {} must support platforms {}" - .format(platform_specific_funs, list(unknown_plats))) - extra_plats = arg_keys - self._NORMALIZED_OS_NAMES - if extra_plats: - raise self.UnsupportedPlatformError( - "platform_specific_funs {} has unrecognized platforms {}" - .format(platform_specific_funs, list(extra_plats))) - - fun_for_platform = platform_specific_funs[self.normalized_os_name] - return fun_for_platform() + default_value = get_normalized_os_name() class Executable(AbstractClass): @@ -89,9 +66,9 @@ def as_invocation_environment_dict(self): :rtype: dict of string -> string """ - lib_env_var = self._platform.resolve_platform_specific({ - 'darwin': lambda: 'DYLD_LIBRARY_PATH', - 'linux': lambda: 'LD_LIBRARY_PATH', + lib_env_var = self._platform.resolve_for_enum_variant({ + 'darwin': 'DYLD_LIBRARY_PATH', + 'linux': 'LD_LIBRARY_PATH', }) return { 'PATH': create_path_env_var(self.path_entries), diff --git a/src/python/pants/backend/native/subsystems/binaries/gcc.py b/src/python/pants/backend/native/subsystems/binaries/gcc.py index 5f48e12fb85..b61bdfcd498 100644 --- a/src/python/pants/backend/native/subsystems/binaries/gcc.py +++ b/src/python/pants/backend/native/subsystems/binaries/gcc.py @@ -44,9 +44,9 @@ def path_entries(self): @memoized_method def _common_lib_dirs(self, platform): - lib64_tuples = platform.resolve_platform_specific({ - 'darwin': lambda: [], - 'linux': lambda: [('lib64',)], + lib64_tuples = platform.resolve_for_enum_variant({ + 'darwin': [], + 'linux': [('lib64',)], }) return self._filemap(lib64_tuples + [ ('lib',), diff --git a/src/python/pants/backend/native/subsystems/binaries/llvm.py b/src/python/pants/backend/native/subsystems/binaries/llvm.py index a49146cbbd1..413d0bd8bd3 100644 --- a/src/python/pants/backend/native/subsystems/binaries/llvm.py +++ b/src/python/pants/backend/native/subsystems/binaries/llvm.py @@ -80,16 +80,13 @@ def _filemap(self, all_components_list): def path_entries(self): return self._filemap([('bin',)]) - _PLATFORM_SPECIFIC_LINKER_NAME = { - 'darwin': lambda: 'ld64.lld', - 'linux': lambda: 'lld', - } - def linker(self, platform): return Linker( path_entries=self.path_entries, - exe_filename=platform.resolve_platform_specific( - self._PLATFORM_SPECIFIC_LINKER_NAME), + exe_filename=platform.resolve_for_enum_variant({ + 'darwin': 'ld64.lld', + 'linux': 'lld', + }), library_dirs=[], linking_library_dirs=[], extra_args=[], diff --git a/src/python/pants/backend/native/subsystems/native_build_step.py b/src/python/pants/backend/native/subsystems/native_build_step.py index b30f51c07e6..55ffea10dd4 100644 --- a/src/python/pants/backend/native/subsystems/native_build_step.py +++ b/src/python/pants/backend/native/subsystems/native_build_step.py @@ -10,14 +10,10 @@ from pants.subsystem.subsystem import Subsystem from pants.util.memo import memoized_property from pants.util.meta import classproperty -from pants.util.objects import enum +from pants.util.objects import enum, register_enum_option -class ToolchainVariant(enum('descriptor', ['gnu', 'llvm'])): - - @property - def is_gnu(self): - return self.descriptor == 'gnu' +class ToolchainVariant(enum(['gnu', 'llvm'])): pass class NativeBuildStep(CompilerOptionSetsMixin, MirroredTargetOptionMixin, Subsystem): @@ -39,11 +35,10 @@ def register_options(cls, register): help='The default for the "compiler_option_sets" argument ' 'for targets of this language.') - register('--toolchain-variant', type=str, fingerprint=True, advanced=True, - choices=ToolchainVariant.allowed_values, - default=ToolchainVariant.default_value, - help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all " - "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.") + register_enum_option( + register, ToolchainVariant, '--toolchain-variant', advanced=True, + help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all " + "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.") def get_compiler_option_sets_for_target(self, target): return self.get_target_mirrored_option('compiler_option_sets', target) diff --git a/src/python/pants/backend/native/subsystems/native_toolchain.py b/src/python/pants/backend/native/subsystems/native_toolchain.py index 7f4dec31efc..dffa151825f 100644 --- a/src/python/pants/backend/native/subsystems/native_toolchain.py +++ b/src/python/pants/backend/native/subsystems/native_toolchain.py @@ -87,10 +87,11 @@ class LLVMCppToolchain(datatype([('cpp_toolchain', CppToolchain)])): pass @rule(LibcObjects, [Select(Platform), Select(NativeToolchain)]) def select_libc_objects(platform, native_toolchain): - paths = platform.resolve_platform_specific({ + # We use lambdas here to avoid searching for libc on osx, where it will fail. + paths = platform.resolve_for_enum_variant({ 'darwin': lambda: [], 'linux': lambda: native_toolchain._libc_dev.get_libc_objects(), - }) + })() yield LibcObjects(paths) @@ -343,8 +344,12 @@ class ToolchainVariantRequest(datatype([ @rule(CToolchain, [Select(ToolchainVariantRequest)]) def select_c_toolchain(toolchain_variant_request): native_toolchain = toolchain_variant_request.toolchain - # TODO: make an enum exhaustiveness checking method that works with `yield Get(...)` statements! - if toolchain_variant_request.variant.is_gnu: + # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`! + use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({ + 'gnu': True, + 'llvm': False, + }) + if use_gcc: toolchain_resolved = yield Get(GCCCToolchain, NativeToolchain, native_toolchain) else: toolchain_resolved = yield Get(LLVMCToolchain, NativeToolchain, native_toolchain) @@ -354,7 +359,12 @@ def select_c_toolchain(toolchain_variant_request): @rule(CppToolchain, [Select(ToolchainVariantRequest)]) def select_cpp_toolchain(toolchain_variant_request): native_toolchain = toolchain_variant_request.toolchain - if toolchain_variant_request.variant.is_gnu: + # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`! + use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({ + 'gnu': True, + 'llvm': False, + }) + if use_gcc: toolchain_resolved = yield Get(GCCCppToolchain, NativeToolchain, native_toolchain) else: toolchain_resolved = yield Get(LLVMCppToolchain, NativeToolchain, native_toolchain) diff --git a/src/python/pants/backend/native/targets/native_artifact.py b/src/python/pants/backend/native/targets/native_artifact.py index dc8461d642c..b6ba3bb132b 100644 --- a/src/python/pants/backend/native/targets/native_artifact.py +++ b/src/python/pants/backend/native/targets/native_artifact.py @@ -22,9 +22,9 @@ def alias(cls): def as_shared_lib(self, platform): # TODO: check that the name conforms to some format in the constructor (e.g. no dots?). - return platform.resolve_platform_specific({ - 'darwin': lambda: 'lib{}.dylib'.format(self.lib_name), - 'linux': lambda: 'lib{}.so'.format(self.lib_name), + return platform.resolve_for_enum_variant({ + 'darwin': 'lib{}.dylib'.format(self.lib_name), + 'linux': 'lib{}.so'.format(self.lib_name), }) def _compute_fingerprint(self): diff --git a/src/python/pants/backend/native/tasks/conan_fetch.py b/src/python/pants/backend/native/tasks/conan_fetch.py index 6ffa7fe4416..5f9eb11a14f 100644 --- a/src/python/pants/backend/native/tasks/conan_fetch.py +++ b/src/python/pants/backend/native/tasks/conan_fetch.py @@ -124,9 +124,9 @@ def _conan_user_home(self, conan, in_workdir=False): @memoized_property def _conan_os_name(self): - return Platform.create().resolve_platform_specific({ - 'darwin': lambda: 'Macos', - 'linux': lambda: 'Linux', + return Platform.create().resolve_for_enum_variant({ + 'darwin': 'Macos', + 'linux': 'Linux', }) @property diff --git a/src/python/pants/backend/native/tasks/link_shared_libraries.py b/src/python/pants/backend/native/tasks/link_shared_libraries.py index 4f3efc0b69f..f49c31bb2f1 100644 --- a/src/python/pants/backend/native/tasks/link_shared_libraries.py +++ b/src/python/pants/backend/native/tasks/link_shared_libraries.py @@ -142,11 +142,6 @@ def _make_link_request(self, vt, compiled_objects_product): return link_request - _SHARED_CMDLINE_ARGS = { - 'darwin': lambda: ['-Wl,-dylib'], - 'linux': lambda: ['-shared'], - } - def _execute_link_request(self, link_request): object_files = link_request.object_files @@ -163,7 +158,10 @@ def _execute_link_request(self, link_request): self.context.log.debug("resulting_shared_lib_path: {}".format(resulting_shared_lib_path)) # We are executing in the results_dir, so get absolute paths for everything. cmd = ([linker.exe_filename] + - self.platform.resolve_platform_specific(self._SHARED_CMDLINE_ARGS) + + self.platform.resolve_for_enum_variant({ + 'darwin': ['-Wl,-dylib'], + 'linux': ['-shared'], + }) + linker.extra_args + ['-o', os.path.abspath(resulting_shared_lib_path)] + ['-L{}'.format(lib_dir) for lib_dir in link_request.external_lib_dirs] + diff --git a/src/python/pants/backend/python/tasks/unpack_wheels.py b/src/python/pants/backend/python/tasks/unpack_wheels.py index 632395e0cd3..e66483a584e 100644 --- a/src/python/pants/backend/python/tasks/unpack_wheels.py +++ b/src/python/pants/backend/python/tasks/unpack_wheels.py @@ -105,9 +105,9 @@ def _name_and_platform(whl): @memoized_classproperty def _current_platform_abbreviation(cls): - return NativeBackendPlatform.create().resolve_platform_specific({ - 'darwin': lambda: 'macosx', - 'linux': lambda: 'linux', + return NativeBackendPlatform.create().resolve_for_enum_variant({ + 'darwin': 'macosx', + 'linux': 'linux', }) @classmethod diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py index c290c87aadf..c202bb3d3f3 100644 --- a/src/python/pants/engine/fs.py +++ b/src/python/pants/engine/fs.py @@ -57,8 +57,9 @@ def __new__(cls, include, exclude=(), glob_match_error_behavior=None, conjunctio cls, include=tuple(include), exclude=tuple(exclude), - glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior), - conjunction=GlobExpansionConjunction.create(conjunction)) + glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior, + none_is_default=True), + conjunction=GlobExpansionConjunction.create(conjunction, none_is_default=True)) class PathGlobsAndRoot(datatype([('path_globs', PathGlobs), ('root', text_type)])): diff --git a/src/python/pants/init/engine_initializer.py b/src/python/pants/init/engine_initializer.py index 1b35f31692d..41f042c8844 100644 --- a/src/python/pants/init/engine_initializer.py +++ b/src/python/pants/init/engine_initializer.py @@ -346,7 +346,8 @@ def setup_legacy_graph_extended( rules = ( [ RootRule(Console), - SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior)), + SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior, + none_is_default=True)), SingletonRule.from_instance(build_configuration), SingletonRule(SymbolTable, symbol_table), ] + diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 31160095c0e..c346f85c4fd 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -16,7 +16,7 @@ from pants.option.optionable import Optionable from pants.option.scope import ScopeInfo from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin -from pants.util.objects import datatype, enum +from pants.util.objects import datatype, enum, register_enum_option class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error'])): @@ -26,8 +26,6 @@ class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error' be aware of any changes to this object's definition. """ - default_option_value = 'warn' - class ExecutionOptions(datatype([ 'remote_store_server', @@ -197,12 +195,12 @@ def register_bootstrap_options(cls, register): help='Paths to ignore for all filesystem operations performed by pants ' '(e.g. BUILD file scanning, glob matching, etc). ' 'Patterns use the gitignore syntax (https://git-scm.com/docs/gitignore).') - register('--glob-expansion-failure', type=str, - choices=GlobMatchErrorBehavior.allowed_values, - default=GlobMatchErrorBehavior.default_option_value, - advanced=True, - help="Raise an exception if any targets declaring source files " - "fail to match any glob provided in the 'sources' argument.") + register_enum_option( + # TODO: allow using the attribute `GlobMatchErrorBehavior.warn` for more safety! + register, GlobMatchErrorBehavior, '--glob-expansion-failure', default='warn', + advanced=True, + help="Raise an exception if any targets declaring source files " + "fail to match any glob provided in the 'sources' argument.") register('--exclude-target-regexp', advanced=True, type=list, default=[], daemon=False, metavar='', help='Exclude target roots that match these regexes.') diff --git a/src/python/pants/util/objects.py b/src/python/pants/util/objects.py index 7c29fe1fefe..e223a5d22fc 100644 --- a/src/python/pants/util/objects.py +++ b/src/python/pants/util/objects.py @@ -15,6 +15,22 @@ from pants.util.meta import AbstractClass, classproperty +class TypeCheckError(TypeError): + + # TODO: make some wrapper exception class to make this kind of + # prefixing easy (maybe using a class field format string?). + def __init__(self, type_name, msg, *args, **kwargs): + formatted_msg = "type check error in class {}: {}".format(type_name, msg) + super(TypeCheckError, self).__init__(formatted_msg, *args, **kwargs) + + +class TypedDatatypeInstanceConstructionError(TypeCheckError): + """Raised when a datatype()'s fields fail a type check upon construction.""" + + +# TODO: create a mixin which declares/implements the methods we define on the generated class in +# datatype() and enum() to decouple the class's logic from the way it's created. This may also make +# migration to python 3 dataclasses as per #7074 easier. def datatype(field_decls, superclass_name=None, **kwargs): """A wrapper for `namedtuple` that accounts for the type of the object in equality. @@ -56,9 +72,20 @@ def datatype(field_decls, superclass_name=None, **kwargs): namedtuple_cls = namedtuple(superclass_name, field_names, **kwargs) class DataType(namedtuple_cls): + @classproperty + def type_check_error_type(cls): + """The exception type to use in make_type_error().""" + return TypedDatatypeInstanceConstructionError + @classmethod def make_type_error(cls, msg, *args, **kwargs): - return TypeCheckError(cls.__name__, msg, *args, **kwargs) + """A helper method to generate an exception type for type checking errors. + + This method uses `cls.type_check_error_type` to ensure that type checking errors can be caught + with a reliable exception type. The type returned by `cls.type_check_error_type` should ensure + that the exception messages are prefixed with enough context to be useful and *not* confusing. + """ + return cls.type_check_error_type(cls.__name__, msg, *args, **kwargs) def __new__(cls, *args, **kwargs): # TODO: Ideally we could execute this exactly once per `cls` but it should be a @@ -69,7 +96,8 @@ def __new__(cls, *args, **kwargs): try: this_object = super(DataType, cls).__new__(cls, *args, **kwargs) except TypeError as e: - raise cls.make_type_error(e) + raise cls.make_type_error( + "error in namedtuple() base constructor: {}".format(e)) # TODO: Make this kind of exception pattern (filter for errors then display them all at once) # more ergonomic. @@ -82,7 +110,9 @@ def __new__(cls, *args, **kwargs): type_failure_msgs.append( "field '{}' was invalid: {}".format(field_name, e)) if type_failure_msgs: - raise cls.make_type_error('\n'.join(type_failure_msgs)) + raise cls.make_type_error( + 'errors type checking constructor arguments:\n{}' + .format('\n'.join(type_failure_msgs))) return this_object @@ -168,17 +198,37 @@ def __str__(self): return type(superclass_name.encode('utf-8'), (DataType,), {}) -def enum(field_name, all_values): +class EnumVariantSelectionError(TypeCheckError): + """Raised when an invalid variant for an enum() is constructed or matched against.""" + + +def enum(*args): """A datatype which can take on a finite set of values. This method is experimental and unstable. Any enum subclass can be constructed with its create() classmethod. This method will use the first - element of `all_values` as the enum value if none is specified. - - :param field_name: A string used as the field for the datatype. Note that enum does not yet - support type checking as with datatype. - :param all_values: An iterable of objects representing all possible values for the enum. - NB: `all_values` must be a finite, non-empty iterable with unique values! + element of `all_values` as the default value, but enum classes can override this behavior by + setting `default_value` in the class body. + + NB: Relying on the `field_name` directly is discouraged in favor of using + resolve_for_enum_variant() in Python code. The `field_name` argument is exposed to make enum + instances more readable when printed, and to allow code in another language using an FFI to + reliably extract the value from an enum instance. + + :param string field_name: A string used as the field for the datatype. This positional argument is + optional, and defaults to 'value'. Note that `enum()` does not yet + support type checking as with `datatype()`. + :param Iterable all_values: A nonempty iterable of objects representing all possible values for + the enum. This argument must be a finite, non-empty iterable with + unique values. + :raises: :class:`ValueError` """ + if len(args) == 1: + field_name = 'value' + all_values, = args + elif len(args) == 2: + field_name, all_values = args + else: + raise ValueError("enum() accepts only 1 or 2 args! args = {!r}".format(args)) # This call to list() will eagerly evaluate any `all_values` which would otherwise be lazy, such # as a generator. @@ -186,82 +236,124 @@ def enum(field_name, all_values): # `OrderedSet` maintains the order of the input iterable, but is faster to check membership. allowed_values_set = OrderedSet(all_values_realized) - if len(allowed_values_set) < len(all_values_realized): + if len(allowed_values_set) == 0: + raise ValueError("all_values must be a non-empty iterable!") + elif len(allowed_values_set) < len(all_values_realized): raise ValueError("When converting all_values ({}) to a set, at least one duplicate " "was detected. The unique elements of all_values were: {}." - .format(all_values_realized, allowed_values_set)) + .format(all_values_realized, list(allowed_values_set))) class ChoiceDatatype(datatype([field_name])): - allowed_values = allowed_values_set - default_value = next(iter(allowed_values)) + default_value = next(iter(allowed_values_set)) + + # Overriden from datatype() so providing an invalid variant is catchable as a TypeCheckError, + # but more specific. + type_check_error_type = EnumVariantSelectionError @memoized_classproperty def _singletons(cls): - """Generate memoized instances of this enum wrapping each of this enum's allowed values.""" - return { value: cls(value) for value in cls.allowed_values } + """Generate memoized instances of this enum wrapping each of this enum's allowed values. + + NB: The implementation of enum() should use this property as the source of truth for allowed + values and enum instances from those values. + """ + return OrderedDict((value, cls._make_singleton(value)) for value in allowed_values_set) @classmethod - def _check_value(cls, value): - if value not in cls.allowed_values: - raise cls.make_type_error( - "Value {!r} for '{}' must be one of: {!r}." - .format(value, field_name, cls.allowed_values)) + def _make_singleton(cls, value): + """ + We convert uses of the constructor to call create(), so we then need to go around __new__ to + bootstrap singleton creation from datatype()'s __new__. + """ + return super(ChoiceDatatype, cls).__new__(cls, value) + + @classproperty + def _allowed_values(cls): + """The values provided to the enum() type constructor, for use in error messages.""" + return list(cls._singletons.keys()) + + def __new__(cls, value): + """Forward `value` to the .create() factory method. + + The .create() factory method is preferred, but forwarding the constructor like this allows us + to use the generated enum type both as a type to check against with isinstance() as well as a + function to create instances with. This makes it easy to use as a pants option type. + """ + return cls.create(value) @classmethod - def create(cls, value=None): + def create(cls, *args, **kwargs): + """Create an instance of this enum, using the default value if specified. + + :param value: Use this as the enum value. If `value` is an instance of this class, return it, + otherwise it is checked against the enum's allowed values. This positional + argument is optional, and if not specified, `cls.default_value` is used. + :param bool none_is_default: If this is True, a None `value` is converted into + `cls.default_value` before being checked against the enum's + allowed values. + """ + none_is_default = kwargs.pop('none_is_default', False) + if kwargs: + raise ValueError('unrecognized keyword arguments for {}.create(): {!r}' + .format(cls.__name__, kwargs)) + + if len(args) == 0: + value = cls.default_value + elif len(args) == 1: + value = args[0] + if none_is_default and value is None: + value = cls.default_value + else: + raise ValueError('{}.create() accepts 0 or 1 positional args! *args = {!r}' + .format(cls.__name__, args)) + # If we get an instance of this enum class, just return it. This means you can call .create() - # on None, an allowed value for the enum, or an existing instance of the enum. + # on an allowed value for the enum, or an existing instance of the enum. if isinstance(value, cls): return value - # Providing an explicit value that is not None will *not* use the default value! - if value is None: - value = cls.default_value - - # We actually circumvent the constructor in this method due to the cls._singletons - # memoized_classproperty, but we want to raise the same error, so we move checking into a - # common method. - cls._check_value(value) - + if value not in cls._singletons: + raise cls.make_type_error( + "Value {!r} for '{}' must be one of: {!r}." + .format(value, field_name, cls._allowed_values)) return cls._singletons[value] - def __new__(cls, *args, **kwargs): - this_object = super(ChoiceDatatype, cls).__new__(cls, *args, **kwargs) + def resolve_for_enum_variant(self, mapping): + """Return the object in `mapping` with the key corresponding to the enum value. - field_value = getattr(this_object, field_name) - - cls._check_value(field_value) - - return this_object - - return ChoiceDatatype + `mapping` is a dict mapping enum variant value -> arbitrary object. All variant values must be + provided. + NB: The objects in `mapping` should be made into lambdas if lazy execution is desired, as this + will "evaluate" all of the values in `mapping`. + """ + keys = frozenset(mapping.keys()) + if keys != frozenset(self._allowed_values): + raise self.make_type_error( + "pattern matching must have exactly the keys {} (was: {})" + .format(self._allowed_values, list(keys))) + match_for_variant = mapping[getattr(self, field_name)] + return match_for_variant -class TypedDatatypeClassConstructionError(Exception): - - # TODO: make some wrapper exception class to make this kind of - # prefixing easy (maybe using a class field format string?). - def __init__(self, type_name, msg, *args, **kwargs): - full_msg = "error: while trying to generate typed datatype {}: {}".format( - type_name, msg) - super(TypedDatatypeClassConstructionError, self).__init__( - full_msg, *args, **kwargs) - - -class TypedDatatypeInstanceConstructionError(TypeError): + @classmethod + def iterate_enum_variants(cls): + """Iterate over all instances of this enum, in the declared order. - def __init__(self, type_name, msg, *args, **kwargs): - full_msg = "error: in constructor of type {}: {}".format(type_name, msg) - super(TypedDatatypeInstanceConstructionError, self).__init__( - full_msg, *args, **kwargs) + NB: This method is exposed for testing enum variants easily. resolve_for_enum_variant() should + be used for performing conditional logic based on an enum instance's value. + """ + # TODO(#7232): use this method to register attributes on the generated type object for each of + # the singletons! + return cls._singletons.values() + return ChoiceDatatype -class TypeCheckError(TypedDatatypeInstanceConstructionError): - def __init__(self, type_name, msg, *args, **kwargs): - formatted_msg = "type check error:\n{}".format(msg) - super(TypeCheckError, self).__init__( - type_name, formatted_msg, *args, **kwargs) +# TODO(#7233): allow usage of the normal register() by using an enum class as the `type` argument! +def register_enum_option(register, enum_cls, *args, **kwargs): + """A helper method for declaring a pants option from an `enum()`.""" + default_value = kwargs.pop('default', enum_cls.default_value) + register(*args, choices=enum_cls._allowed_values, default=default_value, **kwargs) # TODO: make these members of the `TypeConstraint` class! diff --git a/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py b/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py index e9f831ff55a..4389f60b22f 100644 --- a/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py +++ b/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py @@ -70,6 +70,7 @@ def test_target_level_toolchain_variant_llvm(self): task = self.create_task(self.context(target_roots=[cpp_lib_target])) compiler = task.get_compiler(cpp_lib_target) + # TODO(#6866): test specifically which compiler is selected, traversing the PATH if necessary. self.assertIn('llvm', compiler.path_entries[0]) def test_target_level_toolchain_variant_default_llvm(self): diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py index 92922794a1e..619ac036d31 100644 --- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py +++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py @@ -7,6 +7,7 @@ import glob import os import re +from functools import wraps from zipfile import ZipFile from pants.backend.native.config.environment import Platform @@ -24,6 +25,14 @@ def invoke_pex_for_output(pex_file_to_run): return subprocess.check_output([pex_file_to_run], stderr=subprocess.STDOUT) +def _toolchain_variants(func): + @wraps(func) + def wrapper(*args, **kwargs): + for variant in ToolchainVariant.iterate_enum_variants(): + func(*args, toolchain_variant=variant, **kwargs) + return wrapper + + class CTypesIntegrationTest(PantsRunIntegrationTest): _binary_target_dir = 'testprojects/src/python/python_distribution/ctypes' @@ -38,32 +47,16 @@ class CTypesIntegrationTest(PantsRunIntegrationTest): 'testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags:bin' ) - def test_ctypes_binary_creation(self): + @_toolchain_variants + def test_ctypes_binary_creation(self, toolchain_variant): """Create a python_binary() with all native toolchain variants, and test the result.""" - # TODO: this pattern could be made more ergonomic for `enum()`, along with exhaustiveness - # checking. - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_binary_creation(variant) - - _compiler_names_for_variant = { - 'gnu': ['gcc', 'g++'], - 'llvm': ['clang', 'clang++'], - } - - # All of our toolchains currently use the C++ compiler's filename as argv[0] for the linker. - _linker_names_for_variant = { - 'gnu': ['g++'], - 'llvm': ['clang++'], - } - - def _assert_ctypes_binary_creation(self, toolchain_variant): with temporary_dir() as tmp_dir: pants_run = self.run_pants(command=['binary', self._binary_target], config={ GLOBAL_SCOPE_CONFIG_SECTION: { 'pants_distdir': tmp_dir, }, 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) @@ -71,12 +64,23 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): # Check that we have selected the appropriate compilers for our selected toolchain variant, # for both C and C++ compilation. - # TODO(#6866): don't parse info logs for testing! - for compiler_name in self._compiler_names_for_variant[toolchain_variant]: + # TODO(#6866): don't parse info logs for testing! There is a TODO in test_cpp_compile.py + # in the native backend testing to traverse the PATH to find the selected compiler. + compiler_names_to_check = toolchain_variant.resolve_for_enum_variant({ + 'gnu': ['gcc', 'g++'], + 'llvm': ['clang', 'clang++'], + }) + for compiler_name in compiler_names_to_check: self.assertIn("selected compiler exe name: '{}'".format(compiler_name), pants_run.stdout_data) - for linker_name in self._linker_names_for_variant[toolchain_variant]: + # All of our toolchains currently use the C++ compiler's filename as argv[0] for the linker, + # so there is only one name to check. + linker_names_to_check = toolchain_variant.resolve_for_enum_variant({ + 'gnu': ['g++'], + 'llvm': ['clang++'], + }) + for linker_name in linker_names_to_check: self.assertIn("selected linker exe name: '{}'".format(linker_name), pants_run.stdout_data) @@ -92,9 +96,9 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): dist_name, dist_version, wheel_platform = name_and_platform(wheel_dist) self.assertEqual(dist_name, 'ctypes_test') - contains_current_platform = Platform.create().resolve_platform_specific({ - 'darwin': lambda: wheel_platform.startswith('macosx'), - 'linux': lambda: wheel_platform.startswith('linux'), + contains_current_platform = Platform.create().resolve_for_enum_variant({ + 'darwin': wheel_platform.startswith('macosx'), + 'linux': wheel_platform.startswith('linux'), }) self.assertTrue(contains_current_platform) @@ -110,16 +114,8 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): binary_run_output = invoke_pex_for_output(pex) self.assertEqual(b'x=3, f(x)=17\n', binary_run_output) - def test_ctypes_native_language_interop(self): - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_interop_with_mock_buildroot(variant) - - _include_not_found_message_for_variant = { - 'gnu': "fatal error: some_math.h: No such file or directory", - 'llvm': "fatal error: 'some_math.h' file not found" - } - - def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): + @_toolchain_variants + def test_ctypes_native_language_interop(self, toolchain_variant): # TODO: consider making this mock_buildroot/run_pants_with_workdir into a # PantsRunIntegrationTest method! with self.mock_buildroot( @@ -138,7 +134,7 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): # Explicitly set to True (although this is the default). config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, # TODO(#6848): don't make it possible to forget to add the toolchain_variant option! 'native-build-settings': { @@ -148,19 +144,25 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), build_root=buildroot.new_buildroot) self.assert_failure(pants_binary_strict_deps_failure) - self.assertIn(self._include_not_found_message_for_variant[toolchain_variant], + self.assertIn(toolchain_variant.resolve_for_enum_variant({ + 'gnu': "fatal error: some_math.h: No such file or directory", + 'llvm': "fatal error: 'some_math.h' file not found", + }), pants_binary_strict_deps_failure.stdout_data) # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the # DYLD_LIBRARY_PATH during the 'run' goal somehow. - attempt_pants_run = Platform.create().resolve_platform_specific({ - 'darwin': lambda: toolchain_variant != 'gnu', - 'linux': lambda: True, + attempt_pants_run = Platform.create().resolve_for_enum_variant({ + 'darwin': toolchain_variant.resolve_for_enum_variant({ + 'gnu': False, + 'llvm': True, + }), + 'linux': True, }) if attempt_pants_run: pants_run_interop = self.run_pants(['-q', 'run', self._binary_target_with_interop], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, 'native-build-settings': { 'strict_deps': True, @@ -169,28 +171,25 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): self.assert_success(pants_run_interop) self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data) - def test_ctypes_third_party_integration(self): - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_third_party_integration(variant) - - def _assert_ctypes_third_party_integration(self, toolchain_variant): + @_toolchain_variants + def test_ctypes_third_party_integration(self, toolchain_variant): pants_binary = self.run_pants(['binary', self._binary_target_with_third_party], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) self.assert_success(pants_binary) # TODO(#6848): this fails when run with gcc on osx as it requires gcc's libstdc++.so.6.dylib to # be available on the runtime library path. - attempt_pants_run = Platform.create().resolve_platform_specific({ - 'darwin': lambda: toolchain_variant != 'gnu', - 'linux': lambda: True, + attempt_pants_run = Platform.create().resolve_for_enum_variant({ + 'darwin': toolchain_variant != 'gnu', + 'linux': True, }) if attempt_pants_run: pants_run = self.run_pants(['-q', 'run', self._binary_target_with_third_party], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) self.assert_success(pants_run) @@ -220,23 +219,20 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo self.assert_success(pants_run) self.assertIn('x=3, f(x)=17', pants_run.stdout_data) - def test_native_compiler_option_sets_integration(self): + @_toolchain_variants + def test_native_compiler_option_sets_integration(self, toolchain_variant): """Test that native compilation includes extra compiler flags from target definitions. This target uses the ndebug and asdf option sets. If either of these are not present (disabled), this test will fail. """ - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_third_party_integration(variant) - - def _assert_native_compiler_option_sets_integration(self, toolchain_variant): command = [ 'run', self._binary_target_with_compiler_option_sets ] pants_run = self.run_pants(command=command, config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, 'native-build-step.cpp-compile-settings': { 'compiler_option_sets_enabled_args': { diff --git a/tests/python/pants_test/util/test_objects.py b/tests/python/pants_test/util/test_objects.py index 9bf68ccb21b..6fff0e54aa7 100644 --- a/tests/python/pants_test/util/test_objects.py +++ b/tests/python/pants_test/util/test_objects.py @@ -12,9 +12,11 @@ from future.utils import PY2, PY3, text_type -from pants.util.objects import (Exactly, SubclassesOf, SuperclassesOf, TypeCheckError, - TypeConstraintError, TypedCollection, - TypedDatatypeInstanceConstructionError, datatype, enum) +from pants.util.collections_abc_backport import OrderedDict +from pants.util.objects import (EnumVariantSelectionError, Exactly, SubclassesOf, + SuperclassesOf, TypeCheckError, TypeConstraintError, + TypedCollection, TypedDatatypeInstanceConstructionError, datatype, + enum) from pants_test.test_base import TestBase @@ -564,7 +566,7 @@ def test_instance_with_collection_construction_str_repr(self): def test_instance_construction_errors(self): with self.assertRaises(TypeError) as cm: SomeTypedDatatype(something=3) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n__new__() got an unexpected keyword argument 'something'" + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'something'" self.assertEqual(str(cm.exception), expected_msg) # not providing all the fields @@ -575,7 +577,7 @@ def test_instance_construction_errors(self): if PY3 else "__new__() takes exactly 2 arguments (1 given)" ) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n" + expected_msg_ending + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: {}".format(expected_msg_ending) self.assertEqual(str(cm.exception), expected_msg) # unrecognized fields @@ -586,20 +588,20 @@ def test_instance_construction_errors(self): if PY3 else "__new__() takes exactly 2 arguments (3 given)" ) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n" + expected_msg_ending + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: {}".format(expected_msg_ending) self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypedDatatypeInstanceConstructionError) as cm: CamelCaseWrapper(nonneg_int=3) expected_msg = ( - """error: in constructor of type CamelCaseWrapper: type check error: + """type check error in class CamelCaseWrapper: errors type checking constructor arguments: field 'nonneg_int' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly(NonNegativeInt).""") self.assertEqual(str(cm.exception), expected_msg) # test that kwargs with keywords that aren't field names fail the same way with self.assertRaises(TypeError) as cm: CamelCaseWrapper(4, a=3) - expected_msg = "error: in constructor of type CamelCaseWrapper: type check error:\n__new__() got an unexpected keyword argument 'a'" + expected_msg = "type check error in class CamelCaseWrapper: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'a'" self.assertEqual(str(cm.exception), expected_msg) def test_type_check_errors(self): @@ -607,7 +609,7 @@ def test_type_check_errors(self): with self.assertRaises(TypeCheckError) as cm: SomeTypedDatatype([]) expected_msg = ( - """error: in constructor of type SomeTypedDatatype: type check error: + """type check error in class SomeTypedDatatype: errors type checking constructor arguments: field 'val' was invalid: value [] (with type 'list') must satisfy this type constraint: Exactly(int).""") self.assertEqual(str(cm.exception), expected_msg) @@ -616,7 +618,7 @@ def test_type_check_errors(self): AnotherTypedDatatype(text_type('correct'), text_type('should be list')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'elements' was invalid: value {unicode_literal}'should be list' (with type '{type_name}') must satisfy this type constraint: Exactly(list).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) self.assertEqual(str(cm.exception), expected_message) @@ -630,7 +632,7 @@ def compare_str(unicode_type_name, include_unicode=False): AnotherTypedDatatype(3, text_type('should be list')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'string' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly({type_name}). field 'elements' was invalid: value {unicode_literal}'should be list' (with type '{type_name}') must satisfy this type constraint: Exactly(list).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) @@ -644,7 +646,7 @@ def compare_str(unicode_type_name, include_unicode=False): NonNegativeInt(text_type('asdf')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type NonNegativeInt: type check error: + """type check error in class NonNegativeInt: errors type checking constructor arguments: field 'an_int' was invalid: value {unicode_literal}'asdf' (with type '{type_name}') must satisfy this type constraint: Exactly(int).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) self.assertEqual(str(cm.exception), expected_message) @@ -655,31 +657,28 @@ def compare_str(unicode_type_name, include_unicode=False): with self.assertRaises(TypeCheckError) as cm: NonNegativeInt(-3) - expected_msg = ( - """error: in constructor of type NonNegativeInt: type check error: -value is negative: -3.""") + expected_msg = "type check error in class NonNegativeInt: value is negative: -3." self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: WithSubclassTypeConstraint(3) expected_msg = ( - """error: in constructor of type WithSubclassTypeConstraint: type check error: + """type check error in class WithSubclassTypeConstraint: errors type checking constructor arguments: field 'some_value' was invalid: value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(SomeBaseClass).""") self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: WithCollectionTypeConstraint(3) expected_msg = """\ -error: in constructor of type WithCollectionTypeConstraint: type check error: +type check error in class WithCollectionTypeConstraint: errors type checking constructor arguments: field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)): value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(Iterable).""" self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: WithCollectionTypeConstraint([3, "asdf"]) expected_msg = """\ -error: in constructor of type WithCollectionTypeConstraint: type check error: -field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'asdf']: value {u}'asdf' (with type '{string_type}') must satisfy this type constraint: Exactly(int).\ -""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') +type check error in class WithCollectionTypeConstraint: errors type checking constructor arguments: +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, u'asdf']: value u'asdf' (with type 'unicode') must satisfy this type constraint: Exactly(int).""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') self.assertEqual(str(cm.exception), expected_msg) def test_copy(self): @@ -696,21 +695,20 @@ def test_copy_failure(self): with self.assertRaises(TypeCheckError) as cm: obj.copy(nonexistent_field=3) expected_msg = ( - """error: in constructor of type AnotherTypedDatatype: type check error: -__new__() got an unexpected keyword argument 'nonexistent_field'""") + """type check error in class AnotherTypedDatatype: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'nonexistent_field'""") self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: obj.copy(elements=3) expected_msg = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'elements' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly(list).""") self.assertEqual(str(cm.exception), expected_msg) def test_enum_class_creation_errors(self): expected_rx = re.escape( "When converting all_values ([1, 2, 3, 1]) to a set, at least one duplicate " - "was detected. The unique elements of all_values were: OrderedSet([1, 2, 3]).") + "was detected. The unique elements of all_values were: [1, 2, 3].") with self.assertRaisesRegexp(ValueError, expected_rx): class DuplicateAllowedValues(enum('x', [1, 2, 3, 1])): pass @@ -718,20 +716,71 @@ def test_enum_instance_creation(self): self.assertEqual(1, SomeEnum.create().x) self.assertEqual(2, SomeEnum.create(2).x) self.assertEqual(1, SomeEnum(1).x) - self.assertEqual(2, SomeEnum(x=2).x) def test_enum_instance_creation_errors(self): expected_rx = re.escape( - "Value 3 for 'x' must be one of: OrderedSet([1, 2]).") - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + "Value 3 for 'x' must be one of: [1, 2].") + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx): SomeEnum.create(3) - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx): SomeEnum(3) - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + + # Specifying the value by keyword argument is not allowed. + with self.assertRaisesRegexp(TypeError, re.escape("__new__() got an unexpected keyword argument 'x'")): SomeEnum(x=3) + # Test that None is not used as the default unless none_is_default=True. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape( + "Value None for 'x' must be one of: [1, 2]." + )): + SomeEnum.create(None) + self.assertEqual(1, SomeEnum.create(None, none_is_default=True).x) + expected_rx_falsy_value = re.escape( - "Value {}'' for 'x' must be one of: OrderedSet([1, 2])." + "Value {}'' for 'x' must be one of: [1, 2]." .format('u' if PY2 else '')) - with self.assertRaisesRegexp(TypeCheckError, expected_rx_falsy_value): - SomeEnum(x='') + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx_falsy_value): + SomeEnum('') + + def test_enum_resolve_variant(self): + one_enum_instance = SomeEnum(1) + two_enum_instance = SomeEnum(2) + self.assertEqual(3, one_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + })) + self.assertEqual(4, two_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + })) + + # Test that an unrecognized variant raises an error. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape("""\ +type check error in class SomeEnum: pattern matching must have exactly the keys [1, 2] (was: [1, 2, 3])""", + )): + one_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + 3: 5, + }) + + # Test that not providing all the variants raises an error. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape("""\ +type check error in class SomeEnum: pattern matching must have exactly the keys [1, 2] (was: [1])""")): + one_enum_instance.resolve_for_enum_variant({ + 1: 3, + }) + + # Test that the ordering of the values in the enum constructor is not relevant for testing + # whether all variants are provided. + class OutOfOrderEnum(enum([2, 1, 3])): pass + two_out_of_order_instance = OutOfOrderEnum(2) + # This OrderedDict mapping is in a different order than in the enum constructor. This test means + # we can rely on providing simply a literal dict to resolve_for_enum_variant() and not worry + # that the dict ordering will cause an error. + letter = two_out_of_order_instance.resolve_for_enum_variant(OrderedDict([ + (1, 'b'), + (2, 'a'), + (3, 'c'), + ])) + self.assertEqual(letter, 'a') From e4456fd16de322a06088f45b19ac3b809c055579 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 12 Feb 2019 18:09:35 -0800 Subject: [PATCH 4/8] fix expected pytest output for pytest integration after pinning to 3.0.7 (#7240) ### Problem #7238 attempted to fix an upstream pytest issue (and therefore unbreak our CI) by pinning the default pytest version in our pytest subsystem to `pytest==3.0.7`. This worked, but broke a few of our other tests which relied on specific pytest output, and master is broken now (sorry!). I also hastily merged #7226, which introduced another test failure, which I have fixed. These are the only failing tests, and these all now pass locally on my laptop. ### Solution - Fix expected pytest output in pytest runner testing. ### Result I think it's still a good idea to string match pytest output unless we suddenly have to change pytest versions drastically like this again. --- .../python/pants_test/rules/test_test_integration.py | 12 ++++++------ tests/python/pants_test/util/test_objects.py | 9 ++++----- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/python/pants_test/rules/test_test_integration.py b/tests/python/pants_test/rules/test_test_integration.py index 913752de8dc..60127dadf22 100644 --- a/tests/python/pants_test/rules/test_test_integration.py +++ b/tests/python/pants_test/rules/test_test_integration.py @@ -74,7 +74,7 @@ def test_passing_python_test(self): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_pass.py . [100%] +testprojects/tests/python/pants/dummies/test_pass.py . =========================== 1 passed in SOME_TEXT =========================== @@ -94,7 +94,7 @@ def test_failing_python_test(self): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_fail.py F [100%] +testprojects/tests/python/pants/dummies/test_fail.py F =================================== FAILURES =================================== __________________________________ test_fail ___________________________________ @@ -122,7 +122,7 @@ def test_source_dep(self): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_with_source_dep.py . [100%] +testprojects/tests/python/pants/dummies/test_with_source_dep.py . =========================== 1 passed in SOME_TEXT =========================== @@ -141,7 +141,7 @@ def test_thirdparty_dep(self): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py . [100%] +testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py . =========================== 1 passed in SOME_TEXT =========================== @@ -162,7 +162,7 @@ def test_mixed_python_tests(self): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_fail.py F [100%] +testprojects/tests/python/pants/dummies/test_fail.py F =================================== FAILURES =================================== __________________________________ test_fail ___________________________________ @@ -179,7 +179,7 @@ def test_fail(): plugins: SOME_TEXT collected 1 items -testprojects/tests/python/pants/dummies/test_pass.py . [100%] +testprojects/tests/python/pants/dummies/test_pass.py . =========================== 1 passed in SOME_TEXT =========================== diff --git a/tests/python/pants_test/util/test_objects.py b/tests/python/pants_test/util/test_objects.py index 6fff0e54aa7..4813e0cd5cf 100644 --- a/tests/python/pants_test/util/test_objects.py +++ b/tests/python/pants_test/util/test_objects.py @@ -13,10 +13,9 @@ from future.utils import PY2, PY3, text_type from pants.util.collections_abc_backport import OrderedDict -from pants.util.objects import (EnumVariantSelectionError, Exactly, SubclassesOf, - SuperclassesOf, TypeCheckError, TypeConstraintError, - TypedCollection, TypedDatatypeInstanceConstructionError, datatype, - enum) +from pants.util.objects import (EnumVariantSelectionError, Exactly, SubclassesOf, SuperclassesOf, + TypeCheckError, TypeConstraintError, TypedCollection, + TypedDatatypeInstanceConstructionError, datatype, enum) from pants_test.test_base import TestBase @@ -678,7 +677,7 @@ def compare_str(unicode_type_name, include_unicode=False): WithCollectionTypeConstraint([3, "asdf"]) expected_msg = """\ type check error in class WithCollectionTypeConstraint: errors type checking constructor arguments: -field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, u'asdf']: value u'asdf' (with type 'unicode') must satisfy this type constraint: Exactly(int).""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'asdf']: value {u}'asdf' (with type '{string_type}') must satisfy this type constraint: Exactly(int).""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') self.assertEqual(str(cm.exception), expected_msg) def test_copy(self): From bc0536c86c41219d5f3aa1f19fea04e7c2b777ef Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Thu, 14 Feb 2019 13:59:26 -0800 Subject: [PATCH 5/8] Remove deprecated test classes (#7243) ### Problem `BaseTest` and the v1-aware `TaskTestBase` are long deprecated. Additionally, the `GraphTest` classes used v2 APIs that existed before `TestBase` came around. ### Solution Delete deprecated classes, and port `GraphTest` to `TestBase`. --- .../testproject/buildfile_path/BUILD | 3 - .../org/pantsbuild/testproject/ordering/BUILD | 11 - .../org/pantsbuild/testproject/ordering/a | 0 .../org/pantsbuild/testproject/ordering/b | 0 .../org/pantsbuild/testproject/ordering/d | 0 .../org/pantsbuild/testproject/ordering/i | 0 .../org/pantsbuild/testproject/ordering/l | 0 .../org/pantsbuild/testproject/ordering/n | 0 .../org/pantsbuild/testproject/ordering/p | 0 .../org/pantsbuild/testproject/ordering/s | 0 .../org/pantsbuild/testproject/ordering/t | 0 .../org/pantsbuild/testproject/ordering/u | 0 tests/python/pants_test/BUILD | 25 +- .../pants_test/backend/python/tasks/BUILD | 1 - .../backend/python/tasks/native/BUILD | 1 - tests/python/pants_test/base_test.py | 504 ------------------ tests/python/pants_test/engine/legacy/BUILD | 1 + .../pants_test/engine/legacy/test_graph.py | 186 ++----- .../engine/legacy/test_graph_integration.py | 8 + tests/python/pants_test/tasks/BUILD | 14 - .../python/pants_test/tasks/task_test_base.py | 305 ----------- tests/python/pants_test/test_base.py | 14 +- 22 files changed, 52 insertions(+), 1021 deletions(-) delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/a delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/b delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/d delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/i delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/l delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/n delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/p delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/s delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/t delete mode 100644 testprojects/src/resources/org/pantsbuild/testproject/ordering/u delete mode 100644 tests/python/pants_test/base_test.py delete mode 100644 tests/python/pants_test/tasks/task_test_base.py diff --git a/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD b/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD index 861ff069618..7e66776349a 100644 --- a/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD +++ b/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD @@ -1,6 +1,3 @@ target( - dependencies=[ - 'testprojects/src/resources/org/pantsbuild/testproject/ordering:literal' - ], description='''This target exists at path {}.'''.format(buildfile_path()), ) diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD b/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD deleted file mode 100644 index af2b0e8d07f..00000000000 --- a/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD +++ /dev/null @@ -1,11 +0,0 @@ -SOURCES=['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd', 'p', 'a', 'n', 't', 's'] - -resources( - name='literal', - sources=SOURCES, -) - -resources( - name='globs', - sources=globs(*SOURCES), -) diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/a b/testprojects/src/resources/org/pantsbuild/testproject/ordering/a deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/b b/testprojects/src/resources/org/pantsbuild/testproject/ordering/b deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/d b/testprojects/src/resources/org/pantsbuild/testproject/ordering/d deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/i b/testprojects/src/resources/org/pantsbuild/testproject/ordering/i deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/l b/testprojects/src/resources/org/pantsbuild/testproject/ordering/l deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/n b/testprojects/src/resources/org/pantsbuild/testproject/ordering/n deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/p b/testprojects/src/resources/org/pantsbuild/testproject/ordering/p deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/s b/testprojects/src/resources/org/pantsbuild/testproject/ordering/s deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/t b/testprojects/src/resources/org/pantsbuild/testproject/ordering/t deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/u b/testprojects/src/resources/org/pantsbuild/testproject/ordering/u deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/python/pants_test/BUILD b/tests/python/pants_test/BUILD index fb9a4961b25..af7bd0b8845 100644 --- a/tests/python/pants_test/BUILD +++ b/tests/python/pants_test/BUILD @@ -5,7 +5,6 @@ python_library( name='test_infra', dependencies=[ '3rdparty/python:future', - ':base_test', 'tests/python/pants_test:int-test-for-export', 'tests/python/pants_test:test_base', 'tests/python/pants_test/jvm:jar_task_test_base', @@ -24,28 +23,6 @@ python_library( ) ) -python_library( - name = 'base_test', - sources = ['base_test.py'], - dependencies = [ - '3rdparty/python:future', - 'src/python/pants/base:build_file', - 'src/python/pants/base:build_root', - 'src/python/pants/base:cmd_line_spec_parser', - 'src/python/pants/base:deprecated', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:project_tree', - 'src/python/pants/build_graph', - 'src/python/pants/init', - 'src/python/pants/source', - 'src/python/pants/subsystem', - 'src/python/pants/task', - 'src/python/pants/util:dirutil', - 'tests/python/pants_test/base:context_utils', - 'tests/python/pants_test/option/util', - ] -) - python_library( name = 'int-test-for-export', sources = [ @@ -119,7 +96,7 @@ python_tests( name = 'test_maven_layout', sources = ['test_maven_layout.py'], dependencies = [ - ':base_test', + ':test_base', 'src/python/pants/backend/jvm/subsystems:junit', 'src/python/pants/build_graph', 'src/python/pants/source', diff --git a/tests/python/pants_test/backend/python/tasks/BUILD b/tests/python/pants_test/backend/python/tasks/BUILD index 072b3380e38..cf8ef3dcd88 100644 --- a/tests/python/pants_test/backend/python/tasks/BUILD +++ b/tests/python/pants_test/backend/python/tasks/BUILD @@ -54,7 +54,6 @@ python_tests( 'tests/python/pants_test/backend/python/tasks/util', 'tests/python/pants_test/engine:scheduler_test_base', 'tests/python/pants_test/subsystem:subsystem_utils', - 'tests/python/pants_test/tasks:task_test_base', 'tests/python/pants_test:task_test_base', ], timeout=600 diff --git a/tests/python/pants_test/backend/python/tasks/native/BUILD b/tests/python/pants_test/backend/python/tasks/native/BUILD index 7bbccd25921..278a8f16ecd 100644 --- a/tests/python/pants_test/backend/python/tasks/native/BUILD +++ b/tests/python/pants_test/backend/python/tasks/native/BUILD @@ -33,7 +33,6 @@ python_tests( 'tests/python/pants_test/backend/python/tasks/util', 'tests/python/pants_test/engine:scheduler_test_base', 'tests/python/pants_test/subsystem:subsystem_utils', - 'tests/python/pants_test/tasks:task_test_base', 'tests/python/pants_test:task_test_base', ], tags={'platform_specific_behavior'}, diff --git a/tests/python/pants_test/base_test.py b/tests/python/pants_test/base_test.py deleted file mode 100644 index 4fef0bc5cee..00000000000 --- a/tests/python/pants_test/base_test.py +++ /dev/null @@ -1,504 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import absolute_import, division, print_function, unicode_literals - -import itertools -import logging -import os -import unittest -from builtins import object, open -from collections import defaultdict -from contextlib import contextmanager -from tempfile import mkdtemp -from textwrap import dedent - -from future.utils import PY2 - -from pants.base.build_file import BuildFile -from pants.base.build_root import BuildRoot -from pants.base.cmd_line_spec_parser import CmdLineSpecParser -from pants.base.deprecated import deprecated_module -from pants.base.exceptions import TaskError -from pants.base.file_system_project_tree import FileSystemProjectTree -from pants.build_graph.address import Address -from pants.build_graph.build_configuration import BuildConfiguration -from pants.build_graph.build_file_address_mapper import BuildFileAddressMapper -from pants.build_graph.build_file_aliases import BuildFileAliases -from pants.build_graph.build_file_parser import BuildFileParser -from pants.build_graph.mutable_build_graph import MutableBuildGraph -from pants.build_graph.target import Target -from pants.init.util import clean_global_runtime_state -from pants.option.options_bootstrapper import OptionsBootstrapper -from pants.option.scope import GLOBAL_SCOPE -from pants.source.source_root import SourceRootConfig -from pants.subsystem.subsystem import Subsystem -from pants.task.goal_options_mixin import GoalOptionsMixin -from pants.util.dirutil import safe_mkdir, safe_open, safe_rmtree -from pants_test.base.context_utils import create_context_from_options -from pants_test.option.util.fakes import create_options_for_optionables - - -# Fix this during a dev release -deprecated_module('1.13.0.dev2', 'Use pants_test.test_base instead') - - -class TestGenerator(object): - """A mixin that facilitates test generation at runtime.""" - - @classmethod - def generate_tests(cls): - """Generate tests for a given class. - - This should be called against the composing class in it's defining module, e.g. - - class ThingTest(TestGenerator): - ... - - ThingTest.generate_tests() - - """ - raise NotImplementedError() - - @classmethod - def add_test(cls, method_name, method): - """A classmethod that adds dynamic test methods to a given class. - - :param string method_name: The name of the test method (e.g. `test_thing_x`). - :param callable method: A callable representing the method. This should take a 'self' argument - as its first parameter for instance method binding. - """ - assert not hasattr(cls, method_name), ( - 'a test with name `{}` already exists on `{}`!'.format(method_name, cls.__name__) - ) - assert method_name.startswith('test_'), '{} is not a valid test name!'.format(method_name) - setattr(cls, method_name, method) - - -class BaseTest(unittest.TestCase): - """A baseclass useful for tests requiring a temporary buildroot. - - :API: public - - """ - - def build_path(self, relpath): - """Returns the canonical BUILD file path for the given relative build path. - - :API: public - """ - if os.path.basename(relpath).startswith('BUILD'): - return relpath - else: - return os.path.join(relpath, 'BUILD') - - def create_dir(self, relpath): - """Creates a directory under the buildroot. - - :API: public - - relpath: The relative path to the directory from the build root. - """ - path = os.path.join(self.build_root, relpath) - safe_mkdir(path) - return path - - def create_workdir_dir(self, relpath): - """Creates a directory under the work directory. - - :API: public - - relpath: The relative path to the directory from the work directory. - """ - path = os.path.join(self.pants_workdir, relpath) - safe_mkdir(path) - return path - - def create_file(self, relpath, contents='', mode='w'): - """Writes to a file under the buildroot. - - :API: public - - relpath: The relative path to the file from the build root. - contents: A string containing the contents of the file - '' by default.. - mode: The mode to write to the file in - over-write by default. - """ - path = os.path.join(self.build_root, relpath) - with safe_open(path, mode=mode) as fp: - fp.write(contents) - return path - - def create_workdir_file(self, relpath, contents='', mode='w'): - """Writes to a file under the work directory. - - :API: public - - relpath: The relative path to the file from the work directory. - contents: A string containing the contents of the file - '' by default.. - mode: The mode to write to the file in - over-write by default. - """ - path = os.path.join(self.pants_workdir, relpath) - with safe_open(path, mode=mode) as fp: - fp.write(contents) - return path - - def add_to_build_file(self, relpath, target): - """Adds the given target specification to the BUILD file at relpath. - - :API: public - - relpath: The relative path to the BUILD file from the build root. - target: A string containing the target definition as it would appear in a BUILD file. - """ - self.create_file(self.build_path(relpath), target, mode='a') - return BuildFile(self.address_mapper._project_tree, relpath=self.build_path(relpath)) - - def make_target(self, - spec='', - target_type=Target, - dependencies=None, - derived_from=None, - synthetic=False, - **kwargs): - """Creates a target and injects it into the test's build graph. - - :API: public - - :param string spec: The target address spec that locates this target. - :param type target_type: The concrete target subclass to create this new target from. - :param list dependencies: A list of target instances this new target depends on. - :param derived_from: The target this new target was derived from. - :type derived_from: :class:`pants.build_graph.target.Target` - """ - address = Address.parse(spec) - target = target_type(name=address.target_name, - address=address, - build_graph=self.build_graph, - **kwargs) - dependencies = dependencies or [] - - self.build_graph.apply_injectables([target]) - self.build_graph.inject_target(target, - dependencies=[dep.address for dep in dependencies], - derived_from=derived_from, - synthetic=synthetic) - - # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. - # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. - traversables = [target.compute_dependency_specs(payload=target.payload)] - - for dependency_spec in itertools.chain(*traversables): - dependency_address = Address.parse(dependency_spec, relative_to=address.spec_path) - dependency_target = self.build_graph.get_target(dependency_address) - if not dependency_target: - raise ValueError('Tests must make targets for dependency specs ahead of them ' - 'being traversed, {} tried to traverse {} which does not exist.' - .format(target, dependency_address)) - if dependency_target not in target.dependencies: - self.build_graph.inject_dependency(dependent=target.address, - dependency=dependency_address) - target.mark_transitive_invalidation_hash_dirty() - - return target - - @property - def alias_groups(self): - """ - :API: public - """ - return BuildFileAliases(targets={'target': Target}) - - @property - def build_ignore_patterns(self): - """ - :API: public - """ - return None - - def setUp(self): - """ - :API: public - """ - super(BaseTest, self).setUp() - # Avoid resetting the Runtracker here, as that is specific to fork'd process cleanup. - clean_global_runtime_state(reset_subsystem=True) - - self.real_build_root = BuildRoot().path - - self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) - self.subprocess_dir = os.path.join(self.build_root, '.pids') - self.addCleanup(safe_rmtree, self.build_root) - - self.pants_workdir = os.path.join(self.build_root, '.pants.d') - safe_mkdir(self.pants_workdir) - - self.options = defaultdict(dict) # scope -> key-value mapping. - self.options[GLOBAL_SCOPE] = { - 'pants_workdir': self.pants_workdir, - 'pants_supportdir': os.path.join(self.build_root, 'build-support'), - 'pants_distdir': os.path.join(self.build_root, 'dist'), - 'pants_configdir': os.path.join(self.build_root, 'config'), - 'pants_subprocessdir': self.subprocess_dir, - 'cache_key_gen_version': '0-test', - } - self.options['cache'] = { - 'read_from': [], - 'write_to': [], - } - - BuildRoot().path = self.build_root - self.addCleanup(BuildRoot().reset) - - self._build_configuration = BuildConfiguration() - self._build_configuration.register_aliases(self.alias_groups) - self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) - self.project_tree = FileSystemProjectTree(self.build_root) - self.reset_build_graph() - - def buildroot_files(self, relpath=None): - """Returns the set of all files under the test build root. - - :API: public - - :param string relpath: If supplied, only collect files from this subtree. - :returns: All file paths found. - :rtype: set - """ - def scan(): - for root, dirs, files in os.walk(os.path.join(self.build_root, relpath or '')): - for f in files: - yield os.path.relpath(os.path.join(root, f), self.build_root) - return set(scan()) - - def reset_build_graph(self): - """Start over with a fresh build graph with no targets in it.""" - self.address_mapper = BuildFileAddressMapper(self.build_file_parser, self.project_tree, - build_ignore_patterns=self.build_ignore_patterns) - self.build_graph = MutableBuildGraph(address_mapper=self.address_mapper) - - def set_options_for_scope(self, scope, **kwargs): - self.options[scope].update(kwargs) - - def context(self, for_task_types=None, for_subsystems=None, options=None, - target_roots=None, console_outstream=None, workspace=None, - scheduler=None, **kwargs): - """ - :API: public - - :param dict **kwargs: keyword arguments passed in to `create_options_for_optionables`. - """ - # Many tests use source root functionality via the SourceRootConfig.global_instance(). - # (typically accessed via Target.target_base), so we always set it up, for convenience. - for_subsystems = set(for_subsystems or ()) - for subsystem in for_subsystems: - if subsystem.options_scope is None: - raise TaskError('You must set a scope on your subsystem type before using it in tests.') - - optionables = {SourceRootConfig} | self._build_configuration.optionables() | for_subsystems - - for_task_types = for_task_types or () - for task_type in for_task_types: - scope = task_type.options_scope - if scope is None: - raise TaskError('You must set a scope on your task type before using it in tests.') - optionables.add(task_type) - # If task is expected to inherit goal-level options, register those directly on the task, - # by subclassing the goal options registrar and settings its scope to the task scope. - if issubclass(task_type, GoalOptionsMixin): - subclass_name = 'test_{}_{}_{}'.format( - task_type.__name__, task_type.goal_options_registrar_cls.options_scope, - task_type.options_scope) - if PY2: - subclass_name = subclass_name.encode('utf-8') - optionables.add(type(subclass_name, (task_type.goal_options_registrar_cls, ), - {'options_scope': task_type.options_scope})) - - # Now expand to all deps. - all_optionables = set() - for optionable in optionables: - all_optionables.update(si.optionable_cls for si in optionable.known_scope_infos()) - - # Now default the option values and override with any caller-specified values. - # TODO(benjy): Get rid of the options arg, and require tests to call set_options. - options = options.copy() if options else {} - for s, opts in self.options.items(): - scoped_opts = options.setdefault(s, {}) - scoped_opts.update(opts) - - fake_options = create_options_for_optionables( - all_optionables, options=options, **kwargs) - - Subsystem.reset(reset_options=True) - Subsystem.set_options(fake_options) - - context = create_context_from_options(fake_options, - target_roots=target_roots, - build_graph=self.build_graph, - build_file_parser=self.build_file_parser, - address_mapper=self.address_mapper, - console_outstream=console_outstream, - workspace=workspace, - scheduler=scheduler) - return context - - def tearDown(self): - """ - :API: public - """ - super(BaseTest, self).tearDown() - BuildFile.clear_cache() - Subsystem.reset() - - def target(self, spec): - """Resolves the given target address to a Target object. - - :API: public - - address: The BUILD target address to resolve. - - Returns the corresponding Target or else None if the address does not point to a defined Target. - """ - address = Address.parse(spec) - self.build_graph.inject_address_closure(address) - return self.build_graph.get_target(address) - - def targets(self, spec): - """Resolves a target spec to one or more Target objects. - - :API: public - - spec: Either BUILD target address or else a target glob using the siblings ':' or - descendants '::' suffixes. - - Returns the set of all Targets found. - """ - - spec = CmdLineSpecParser(self.build_root).parse_spec(spec) - addresses = list(self.address_mapper.scan_specs([spec])) - for address in addresses: - self.build_graph.inject_address_closure(address) - targets = [self.build_graph.get_target(address) for address in addresses] - return targets - - def create_files(self, path, files): - """Writes to a file under the buildroot with contents same as file name. - - :API: public - - path: The relative path to the file from the build root. - files: List of file names. - """ - for f in files: - self.create_file(os.path.join(path, f), contents=f) - - def create_library(self, path, target_type, name, sources=None, **kwargs): - """Creates a library target of given type at the BUILD file at path with sources - - :API: public - - path: The relative path to the BUILD file from the build root. - target_type: valid pants target type. - name: Name of the library target. - sources: List of source file at the path relative to path. - **kwargs: Optional attributes that can be set for any library target. - Currently it includes support for resources, java_sources, provides - and dependencies. - """ - if sources: - self.create_files(path, sources) - self.add_to_build_file(path, dedent(''' - %(target_type)s(name='%(name)s', - %(sources)s - %(java_sources)s - %(provides)s - %(dependencies)s - ) - ''' % dict(target_type=target_type, - name=name, - sources=('sources=%s,' % repr(sources) - if sources else ''), - java_sources=('java_sources=[%s],' - % ','.join('"%s"' % str_target for str_target in kwargs.get('java_sources')) - if 'java_sources' in kwargs else ''), - provides=('provides=%s,' % kwargs.get('provides') - if 'provides' in kwargs else ''), - dependencies=('dependencies=%s,' % kwargs.get('dependencies') - if 'dependencies' in kwargs else ''), - ))) - return self.target('%s:%s' % (path, name)) - - def create_resources(self, path, name, *sources): - """ - :API: public - """ - return self.create_library(path, 'resources', name, sources) - - def assertUnorderedPrefixEqual(self, expected, actual_iter): - """Consumes len(expected) items from the given iter, and asserts that they match, unordered. - - :API: public - """ - actual = list(itertools.islice(actual_iter, len(expected))) - self.assertEqual(sorted(expected), sorted(actual)) - - def assertPrefixEqual(self, expected, actual_iter): - """Consumes len(expected) items from the given iter, and asserts that they match, in order. - - :API: public - """ - self.assertEqual(expected, list(itertools.islice(actual_iter, len(expected)))) - - def assertInFile(self, string, file_path): - """Verifies that a string appears in a file - - :API: public - """ - - with open(file_path, 'r') as f: - content = f.read() - self.assertIn(string, content, '"{}" is not in the file {}:\n{}'.format(string, f.name, content)) - - def get_bootstrap_options(self, cli_options=()): - """Retrieves bootstrap options. - - :param cli_options: An iterable of CLI flags to pass as arguments to `OptionsBootstrapper`. - """ - # Can't parse any options without a pants.ini. - self.create_file('pants.ini') - return OptionsBootstrapper.create(args=cli_options).bootstrap_options.for_global_scope() - - class LoggingRecorder(object): - """Simple logging handler to record warnings.""" - - def __init__(self): - self._records = [] - self.level = logging.DEBUG - - def handle(self, record): - self._records.append(record) - - def _messages_for_level(self, levelname): - return ['{}: {}'.format(record.name, record.getMessage()) - for record in self._records if record.levelname == levelname] - - def infos(self): - return self._messages_for_level('INFO') - - def warnings(self): - return self._messages_for_level('WARNING') - - @contextmanager - def captured_logging(self, level=None): - root_logger = logging.getLogger() - - old_level = root_logger.level - root_logger.setLevel(level or logging.NOTSET) - - handler = self.LoggingRecorder() - root_logger.addHandler(handler) - try: - yield handler - finally: - root_logger.setLevel(old_level) - root_logger.removeHandler(handler) diff --git a/tests/python/pants_test/engine/legacy/BUILD b/tests/python/pants_test/engine/legacy/BUILD index 367f31219c2..308bd57441a 100644 --- a/tests/python/pants_test/engine/legacy/BUILD +++ b/tests/python/pants_test/engine/legacy/BUILD @@ -129,6 +129,7 @@ python_tests( 'src/python/pants/engine/legacy:graph', 'src/python/pants/init', 'tests/python/pants_test/engine:util', + 'tests/python/pants_test:test_base' ] ) diff --git a/tests/python/pants_test/engine/legacy/test_graph.py b/tests/python/pants_test/engine/legacy/test_graph.py index 18fb8e997e6..959612a9d63 100644 --- a/tests/python/pants_test/engine/legacy/test_graph.py +++ b/tests/python/pants_test/engine/legacy/test_graph.py @@ -6,23 +6,12 @@ import functools import os -import unittest from builtins import str -from contextlib import contextmanager -import mock - -from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.build_file_aliases import BuildFileAliases, TargetMacro -from pants.build_graph.target import Target -from pants.init.engine_initializer import EngineInitializer -from pants.init.options_initializer import BuildConfigInitializer -from pants.init.target_roots_calculator import TargetRootsCalculator -from pants.option.options_bootstrapper import OptionsBootstrapper -from pants.subsystem.subsystem import Subsystem -from pants.util.contextutil import temporary_dir -from pants_test.engine.util import init_native +from pants.build_graph.files import Files +from pants_test.test_base import TestBase # Macro that adds the specified tag. @@ -32,164 +21,59 @@ def macro(target_cls, tag, parse_context, tags=None, **kwargs): parse_context.create_object(target_cls, tags=tags, **kwargs) -class GraphTestBase(unittest.TestCase): - - _native = init_native() - - def _make_setup_args(self, specs): - options = mock.Mock(target_specs=specs) - options.for_scope.return_value = mock.Mock(diffspec=None, changes_since=None) - options.for_global_scope.return_value = mock.Mock(owner_of=None) - return options - - def _default_build_config(self, options_bootstrapper, build_file_aliases=None): - # TODO: Get default BuildFileAliases by extending BaseTest post - # https://github.com/pantsbuild/pants/issues/4401 - build_config = BuildConfigInitializer.get(options_bootstrapper) - if build_file_aliases: - build_config.register_aliases(build_file_aliases) - return build_config - - @contextmanager - def graph_helper(self, - build_configuration=None, - build_file_imports_behavior='allow', - include_trace_on_error=True, - path_ignore_patterns=None): - - with temporary_dir() as work_dir: - with temporary_dir() as local_store_dir: - path_ignore_patterns = path_ignore_patterns or [] - options_bootstrapper = OptionsBootstrapper.create() - build_config = build_configuration or self._default_build_config(options_bootstrapper) - # TODO: This test should be swapped to using TestBase. - graph_helper = EngineInitializer.setup_legacy_graph_extended( - path_ignore_patterns, - work_dir, - local_store_dir, - build_file_imports_behavior, - options_bootstrapper=options_bootstrapper, - build_configuration=build_config, - native=self._native, - include_trace_on_error=include_trace_on_error - ) - yield graph_helper - - @contextmanager - def open_scheduler(self, specs, build_configuration=None): - with self.graph_helper(build_configuration=build_configuration) as graph_helper: - graph, target_roots = self.create_graph_from_specs(graph_helper, specs) - addresses = tuple(graph.inject_roots_closure(target_roots)) - yield graph, addresses, graph_helper.scheduler.new_session() - - def create_graph_from_specs(self, graph_helper, specs): - Subsystem.reset() - session = graph_helper.new_session() - target_roots = self.create_target_roots(specs, session, session.symbol_table) - graph = session.create_build_graph(target_roots)[0] - return graph, target_roots - - def create_target_roots(self, specs, session, symbol_table): - return TargetRootsCalculator.create(self._make_setup_args(specs), session, symbol_table) - - -class GraphTargetScanFailureTests(GraphTestBase): +class GraphTest(TestBase): + + _TAG = 'tag_added_by_macro' + + @classmethod + def alias_groups(cls): + return super(GraphTest, cls).alias_groups().merge( + BuildFileAliases(targets={ + 'files': Files, + 'tagged_files': TargetMacro.Factory.wrap(functools.partial(macro, Files, cls._TAG), Files), + })) def test_with_missing_target_in_existing_build_file(self): + self.create_library('3rdparty/python', 'target', 'Markdown') + self.create_library('3rdparty/python', 'target', 'Pygments') # When a target is missing, # the suggestions should be in order # and there should only be one copy of the error if tracing is off. - with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper(include_trace_on_error=False) as graph_helper: - self.create_graph_from_specs(graph_helper, ['3rdparty/python:rutabaga']) - self.fail('Expected an exception.') - - error_message = str(cm.exception) expected_message = '"rutabaga" was not found in namespace "3rdparty/python".' \ - ' Did you mean one of:\n' \ - ' :Markdown\n' \ - ' :Pygments\n' - self.assertIn(expected_message, error_message) - self.assertTrue(error_message.count(expected_message) == 1) + '.*Did you mean one of:\n' \ + '.*:Markdown\n' \ + '.*:Pygments\n' + with self.assertRaisesRegexp(AddressLookupError, expected_message): + self.targets('3rdparty/python:rutabaga') def test_with_missing_directory_fails(self): with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper() as graph_helper: - self.create_graph_from_specs(graph_helper, ['no-such-path:']) + self.targets('no-such-path:') self.assertIn('Path "no-such-path" does not contain any BUILD files', str(cm.exception)) - def test_with_existing_directory_with_no_build_files_fails(self): - with self.assertRaises(AddressLookupError) as cm: - path_ignore_patterns=[ - # This is a symlink that points out of the build root. - '/build-support/bin/native/src' - ] - with self.graph_helper(path_ignore_patterns=path_ignore_patterns) as graph_helper: - self.create_graph_from_specs(graph_helper, ['build-support/bin::']) - - self.assertIn('does not match any targets.', str(cm.exception)) - - def test_inject_bad_dir(self): - with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper() as graph_helper: - graph, target_roots = self.create_graph_from_specs(graph_helper, ['3rdparty/python:']) - - graph.inject_address_closure(Address('build-support/bin', 'wat')) - - self.assertIn('Path "build-support/bin" does not contain any BUILD files', - str(cm.exception)) - - -class GraphInvalidationTest(GraphTestBase): - def test_invalidate_fsnode(self): # NB: Invalidation is now more directly tested in unit tests in the `graph` crate. - with self.open_scheduler(['3rdparty/python::']) as (_, _, scheduler): - invalidated_count = scheduler.invalidate_files(['3rdparty/python/BUILD']) - self.assertGreater(invalidated_count, 0) + self.create_library('src/example', 'target', 'things') + self.targets('src/example::') + invalidated_count = self.invalidate_for('src/example/BUILD') + self.assertGreater(invalidated_count, 0) - def test_invalidate_fsnode_incremental(self): - # NB: Invalidation is now more directly tested in unit tests in the `graph` crate. - with self.open_scheduler(['//:', '3rdparty/::']) as (_, _, scheduler): - # Invalidate the '3rdparty/python' DirectoryListing, the `3rdparty` DirectoryListing, - # and then the root DirectoryListing by "touching" files/dirs. - for filename in ('3rdparty/python/BUILD', '3rdparty/jvm', 'non_existing_file'): - invalidated_count = scheduler.invalidate_files([filename]) - self.assertGreater(invalidated_count, - 0, - 'File {} did not invalidate any Nodes.'.format(filename)) - - def _ordering_test(self, spec, expected_sources=None): - expected_sources = expected_sources or ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] - with self.open_scheduler([spec]) as (graph, _, _): - target = graph.get_target(Address.parse(spec)) - sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] - self.assertEqual(expected_sources, sources) - - def test_sources_ordering_literal(self): - self._ordering_test('testprojects/src/resources/org/pantsbuild/testproject/ordering:literal') - - def test_sources_ordering_glob(self): - self._ordering_test('testprojects/src/resources/org/pantsbuild/testproject/ordering:globs') + def test_sources_ordering(self): + expected_sources = ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] + self.create_library('src/example', 'files', 'things', sources=expected_sources) + + target = self.target('src/example:things') + sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] + self.assertEqual(expected_sources, sources) def test_target_macro_override(self): """Tests that we can "wrap" an existing target type with additional functionality. Installs an additional TargetMacro that wraps `target` aliases to add a tag to all definitions. """ - spec = 'testprojects/tests/python/pants/build_parsing:' - - tag = 'tag_added_by_macro' - target_cls = Target - tag_macro = functools.partial(macro, target_cls, tag) - target_symbols = {'target': TargetMacro.Factory.wrap(tag_macro, target_cls)} - - build_config = self._default_build_config(OptionsBootstrapper.create(), BuildFileAliases(targets=target_symbols)) - # Confirm that python_tests in a small directory are marked. - with self.open_scheduler([spec], build_configuration=build_config) as (graph, addresses, _): - self.assertTrue(len(addresses) > 0, 'No targets matched by {}'.format(addresses)) - for address in addresses: - self.assertIn(tag, graph.get_target(address).tags) + files = self.create_library('src/example', 'tagged_files', 'things') + self.assertIn(self._TAG, files.tags) + self.assertEqual(type(files), Files) diff --git a/tests/python/pants_test/engine/legacy/test_graph_integration.py b/tests/python/pants_test/engine/legacy/test_graph_integration.py index 2f9b9b167d3..f517d3953c0 100644 --- a/tests/python/pants_test/engine/legacy/test_graph_integration.py +++ b/tests/python/pants_test/engine/legacy/test_graph_integration.py @@ -138,6 +138,14 @@ def test_existing_bundles(self): self.assert_success(pants_run) self.assertNotIn("WARN]", pants_run.stderr_data) + def test_existing_directory_with_no_build_files_fails(self): + options = [ + '--pants-ignore=+["/build-support/bin/native/src"]', + ] + pants_run = self.run_pants(options + ['list', 'build-support/bin::']) + self.assert_failure(pants_run) + self.assertIn("does not match any targets.", pants_run.stderr_data) + def test_error_message(self): for k in self._ERR_TARGETS: self._list_target_check_error(k) diff --git a/tests/python/pants_test/tasks/BUILD b/tests/python/pants_test/tasks/BUILD index 44cf4c6b7e6..52705df8233 100644 --- a/tests/python/pants_test/tasks/BUILD +++ b/tests/python/pants_test/tasks/BUILD @@ -1,20 +1,6 @@ # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -python_library( - name = 'task_test_base', - sources = ['task_test_base.py'], - dependencies = [ - 'src/python/pants/base:deprecated', - 'src/python/pants/goal:context', - 'src/python/pants/ivy', - 'src/python/pants/task', - 'src/python/pants/util:contextutil', - 'src/python/pants/util:process_handler', - 'tests/python/pants_test:base_test', - ] -) - python_tests( name = 'scalastyle_integration', sources = ['test_scalastyle_integration.py'], diff --git a/tests/python/pants_test/tasks/task_test_base.py b/tests/python/pants_test/tasks/task_test_base.py deleted file mode 100644 index 0a634187aa7..00000000000 --- a/tests/python/pants_test/tasks/task_test_base.py +++ /dev/null @@ -1,305 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import absolute_import, division, print_function, unicode_literals - -import glob -import os -from contextlib import closing, contextmanager -from io import BytesIO - -from future.utils import PY2 - -from pants.base.deprecated import deprecated_module -from pants.goal.goal import Goal -from pants.ivy.bootstrapper import Bootstrapper -from pants.task.console_task import ConsoleTask -from pants.util.contextutil import temporary_dir -from pants.util.process_handler import subprocess -from pants_test.base_test import BaseTest - - -deprecated_module('1.12.0.dev2', 'Use pants_test.TaskTestBase instead') - - -# TODO: Find a better home for this? -def is_exe(name): - result = subprocess.call(['which', name], stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT) - return result == 0 - - -def ensure_cached(task_cls, expected_num_artifacts=None): - """Decorator for a task-executing unit test. Asserts that after running the - decorated test function, the cache for task_cls contains - expected_num_artifacts. - - Uses a new temp dir for the artifact cache, and uses a glob based on the - task's synthesized subtype to find the cache directories within the new temp - dir which were generated by the actions performed within the test method. - - :API: public - - :param task_cls: Class of the task to check the artifact cache - for (e.g. JarCreate). - :param expected_num_artifacts: Expected number of artifacts to be in the - task's cache after running the test. If - unspecified, will assert that the number of - artifacts in the cache is non-zero. - """ - def decorator(test_fn): - def wrapper(self, *args, **kwargs): - with self.cache_check(expected_num_artifacts=expected_num_artifacts): - test_fn(self, *args, **kwargs) - return wrapper - return decorator - - -class TaskTestBase(BaseTest): - """A baseclass useful for testing a single Task type. - - :API: public - """ - - options_scope = 'test_scope' - - @classmethod - def task_type(cls): - """Subclasses must return the type of the Task subclass under test. - - :API: public - """ - raise NotImplementedError() - - def setUp(self): - """ - :API: public - """ - super(TaskTestBase, self).setUp() - self._testing_task_type = self.synthesize_task_subtype(self.task_type(), self.options_scope) - # We locate the workdir below the pants_workdir, which BaseTest locates within the BuildRoot. - # BaseTest cleans this up, so we don't need to. We give it a stable name, so that we can - # use artifact caching to speed up tests. - self._test_workdir = os.path.join(self.pants_workdir, self.task_type().stable_name()) - os.mkdir(self._test_workdir) - # TODO: Push this down to JVM-related tests only? Seems wrong to have an ivy-specific - # action in this non-JVM-specific, high-level base class. - Bootstrapper.reset_instance() - - @property - def test_workdir(self): - """ - :API: public - """ - return self._test_workdir - - def synthesize_task_subtype(self, task_type, options_scope): - """Creates a synthetic subclass of the task type. - - Note that passing in a stable options scope will speed up some tests, as the scope may appear - in the paths of tools used by the task, and if these are stable, tests can get artifact - cache hits when bootstrapping these tools. This doesn't hurt test isolation, as we reset - class-level state between each test. - - # TODO: Use the task type directly once we re-do the Task lifecycle. - - :API: public - - :param task_type: The task type to subtype. - :param options_scope: The scope to give options on the generated task type. - :return: A pair (type, options_scope) - """ - subclass_name = 'test_{0}_{1}'.format(task_type.__name__, options_scope) - if PY2: - subclass_name = subclass_name.encode('utf-8') - return type(subclass_name, (task_type,), {'_stable_name': task_type._compute_stable_name(), - 'options_scope': options_scope}) - - def set_options(self, **kwargs): - """ - :API: public - """ - self.set_options_for_scope(self.options_scope, **kwargs) - - def context(self, for_task_types=None, **kwargs): - """ - :API: public - """ - # Add in our task type. - for_task_types = [self._testing_task_type] + (for_task_types or []) - return super(TaskTestBase, self).context(for_task_types=for_task_types, **kwargs) - - def create_task(self, context, workdir=None): - """ - :API: public - """ - if workdir is None: - workdir = self.test_workdir - return self._testing_task_type(context, workdir) - - @contextmanager - def cache_check(self, expected_num_artifacts=None): - """Sets up a temporary artifact cache and checks that the yielded-to code populates it. - - :param expected_num_artifacts: Expected number of artifacts to be in the cache after yielding. - If unspecified, will assert that the number of artifacts in the - cache is non-zero. - """ - with temporary_dir() as artifact_cache: - self.set_options_for_scope('cache.{}'.format(self.options_scope), - write_to=[artifact_cache]) - - yield - - cache_subdir_glob_str = os.path.join(artifact_cache, '*/') - cache_subdirs = glob.glob(cache_subdir_glob_str) - - if expected_num_artifacts == 0: - self.assertEqual(len(cache_subdirs), 0) - return - - self.assertEqual(len(cache_subdirs), 1) - task_cache = cache_subdirs[0] - - num_artifacts = 0 - for (_, _, files) in os.walk(task_cache): - num_artifacts += len(files) - - if expected_num_artifacts is None: - self.assertNotEqual(num_artifacts, 0) - else: - self.assertEqual(num_artifacts, expected_num_artifacts) - - -class ConsoleTaskTestBase(TaskTestBase): - """A base class useful for testing ConsoleTasks. - - :API: public - """ - - def setUp(self): - """ - :API: public - """ - Goal.clear() - super(ConsoleTaskTestBase, self).setUp() - - task_type = self.task_type() - assert issubclass(task_type, ConsoleTask), \ - 'task_type() must return a ConsoleTask subclass, got %s' % task_type - - def execute_task(self, targets=None, options=None): - """Creates a new task and executes it with the given config, command line args and targets. - - :API: public - - :param targets: Optional list of Target objects passed on the command line. - Returns the text output of the task. - """ - options = options or {} - with closing(BytesIO()) as output: - self.set_options(**options) - context = self.context(target_roots=targets, console_outstream=output) - task = self.create_task(context) - task.execute() - return output.getvalue() - - def execute_console_task(self, targets=None, extra_targets=None, options=None, - passthru_args=None, workspace=None, scheduler=None): - """Creates a new task and executes it with the given config, command line args and targets. - - :API: public - - :param options: option values. - :param targets: optional list of Target objects passed on the command line. - :param extra_targets: optional list of extra targets in the context in addition to those - passed on the command line. - :param passthru_args: optional list of passthru_args - :param workspace: optional Workspace to pass into the context. - - Returns the list of items returned from invoking the console task's console_output method. - """ - options = options or {} - self.set_options(**options) - context = self.context( - target_roots=targets, - passthru_args=passthru_args, - workspace=workspace, - scheduler=scheduler - ) - return self.execute_console_task_given_context(context, extra_targets=extra_targets) - - def execute_console_task_given_context(self, context, extra_targets=None): - """Creates a new task and executes it with the context and extra targets. - - :API: public - - :param context: The pants run context to use. - :param extra_targets: An optional list of extra targets in the context in addition to those - passed on the command line. - :returns: The list of items returned from invoking the console task's console_output method. - :rtype: list of strings - """ - task = self.create_task(context) - return list(task.console_output(list(task.context.targets()) + list(extra_targets or ()))) - - def assert_entries(self, sep, *output, **kwargs): - """Verifies the expected output text is flushed by the console task under test. - - NB: order of entries is not tested, just presence. - - :API: public - - sep: the expected output separator. - *output: the output entries expected between the separators - **options: additional options passed to execute_task. - """ - # We expect each output line to be suffixed with the separator, so for , and [1,2,3] we expect: - # '1,2,3,' - splitting this by the separator we should get ['1', '2', '3', ''] - always an extra - # empty string if the separator is properly always a suffix and not applied just between - # entries. - self.assertEqual(sorted(list(output) + ['']), sorted((self.execute_task(**kwargs)).split(sep))) - - def assert_console_output(self, *output, **kwargs): - """Verifies the expected output entries are emitted by the console task under test. - - NB: order of entries is not tested, just presence. - - :API: public - - *output: the expected output entries - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs))) - - def assert_console_output_contains(self, output, **kwargs): - """Verifies the expected output string is emitted by the console task under test. - - :API: public - - output: the expected output entry(ies) - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertIn(output, self.execute_console_task(**kwargs)) - - def assert_console_output_ordered(self, *output, **kwargs): - """Verifies the expected output entries are emitted by the console task under test. - - NB: order of entries is tested. - - :API: public - - *output: the expected output entries in expected order - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertEqual(list(output), self.execute_console_task(**kwargs)) - - def assert_console_raises(self, exception, **kwargs): - """Verifies the expected exception is raised by the console task under test. - - :API: public - - **kwargs: additional kwargs are passed to execute_console_task. - """ - with self.assertRaises(exception): - self.execute_console_task(**kwargs) diff --git a/tests/python/pants_test/test_base.py b/tests/python/pants_test/test_base.py index 38653ea25b5..abe240977a2 100644 --- a/tests/python/pants_test/test_base.py +++ b/tests/python/pants_test/test_base.py @@ -107,7 +107,7 @@ def create_dir(self, relpath): """ path = os.path.join(self.build_root, relpath) safe_mkdir(path) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path def create_workdir_dir(self, relpath): @@ -119,10 +119,10 @@ def create_workdir_dir(self, relpath): """ path = os.path.join(self.pants_workdir, relpath) safe_mkdir(path) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path - def _invalidate_for(self, *relpaths): + def invalidate_for(self, *relpaths): """Invalidates all files from the relpath, recursively up to the root. Many python operations implicitly create parent directories, so we assume that touching a @@ -131,7 +131,7 @@ def _invalidate_for(self, *relpaths): if self._scheduler is None: return files = {f for relpath in relpaths for f in recursive_dirname(relpath)} - self._scheduler.invalidate_files(files) + return self._scheduler.invalidate_files(files) def create_link(self, relsrc, reldst): """Creates a symlink within the buildroot. @@ -144,7 +144,7 @@ def create_link(self, relsrc, reldst): src = os.path.join(self.build_root, relsrc) dst = os.path.join(self.build_root, reldst) relative_symlink(src, dst) - self._invalidate_for(reldst) + self.invalidate_for(reldst) def create_file(self, relpath, contents='', mode='w'): """Writes to a file under the buildroot. @@ -158,7 +158,7 @@ def create_file(self, relpath, contents='', mode='w'): path = os.path.join(self.build_root, relpath) with safe_open(path, mode=mode) as fp: fp.write(contents) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path def create_files(self, path, files): @@ -432,7 +432,7 @@ def reset_build_graph(self, reset_build_files=False, delete_build_files=False): if delete_build_files: for f in files: os.remove(os.path.join(self.build_root, f)) - self._invalidate_for(*files) + self.invalidate_for(*files) if self._build_graph is not None: self._build_graph.reset() From 594f91fddc03010717d5c8aae7b2889335943987 Mon Sep 17 00:00:00 2001 From: Ekaterina Tyurina Date: Fri, 15 Feb 2019 01:54:51 +0000 Subject: [PATCH 6/8] Add checks if values of flags zipkin-trace-id and zipkin-parent-id are valid (#7242) ### Problem When pants are called with flags zipkin-trace-id and zipkin-parent-id an assertion error is raised if the values of the flag are of the wrong format. The error is not informative. ### Solution Checks of values of flags zipkin-trace-id and zipkin-parent-id are added with a better error explanation. Users of the pants are asked to use 16-character or 32-character hex string. Also, tests are added for these checks. --- src/python/pants/reporting/reporting.py | 35 ++++++-- .../pants_test/reporting/test_reporting.py | 84 +++++++++++++++++++ 2 files changed, 111 insertions(+), 8 deletions(-) diff --git a/src/python/pants/reporting/reporting.py b/src/python/pants/reporting/reporting.py index 76f07e84e87..245f4eef4cc 100644 --- a/src/python/pants/reporting/reporting.py +++ b/src/python/pants/reporting/reporting.py @@ -51,15 +51,15 @@ def register_options(cls, register): help='The full HTTP URL of a zipkin server to which traces should be posted. ' 'No traces will be made if this is not set.') register('--zipkin-trace-id', advanced=True, default=None, - help='The overall 64 or 128-bit ID of the trace. ' - 'Set if Pants trace should be a part of larger trace ' - 'for systems that invoke Pants. If zipkin-trace-id ' - 'and zipkin-parent-id are not set, a trace_id value is randomly generated for a ' - 'Zipkin trace') + help='The overall 64 or 128-bit ID of the trace (the format is 16-character or ' + '32-character hex string). Set if the Pants trace should be a part of a larger ' + 'trace for systems that invoke Pants. If flags zipkin-trace-id and ' + 'zipkin-parent-id are not set, a trace_id value is randomly generated ' + 'for a Zipkin trace.') register('--zipkin-parent-id', advanced=True, default=None, - help='The 64-bit ID for a parent span that invokes Pants. ' - 'zipkin-trace-id and zipkin-parent-id must both either be set or not set ' - 'when run Pants command') + help='The 64-bit ID for a parent span that invokes Pants (the format is 16-character ' + 'hex string). Flags zipkin-trace-id and zipkin-parent-id must both either be set ' + 'or not set when running a Pants command.') register('--zipkin-sample-rate', advanced=True, default=100.0, help='Rate at which to sample Zipkin traces. Value 0.0 - 100.0.') @@ -112,6 +112,16 @@ def initialize(self, run_tracker, all_options, start_time=None): raise ValueError( "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." ) + if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or not is_hex_string(trace_id)): + raise ValueError( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + ) + if parent_id and (len(parent_id) != 16 or not is_hex_string(parent_id)): + raise ValueError( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + ) if zipkin_endpoint is not None: zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO) @@ -195,3 +205,12 @@ def update_reporting(self, global_options, is_quiet, run_tracker): invalidation_report.set_filename(outfile) return invalidation_report + + +def is_hex_string(id_value): + return all(is_hex_ch(ch) for ch in id_value) + + +def is_hex_ch(ch): + num = ord(ch) + return ord('0') <= num <= ord('9') or ord('a') <= num <= ord('f') or ord('A') <= num <= ord('F') diff --git a/tests/python/pants_test/reporting/test_reporting.py b/tests/python/pants_test/reporting/test_reporting.py index 3df1144481c..b221ed0a97a 100644 --- a/tests/python/pants_test/reporting/test_reporting.py +++ b/tests/python/pants_test/reporting/test_reporting.py @@ -94,3 +94,87 @@ def test_raise_if_no_parent_id_and_zipkin_endpoint_set(self): "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." in str(result.exception) ) + + def test_raise_if_parent_id_is_of_wrong_len_format(self): + parent_id = 'ff' + options = {'reporting': { + 'zipkin_trace_id': self.trace_id, + 'zipkin_parent_id': parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + in str(result.exception) + ) + + def test_raise_if_trace_id_is_of_wrong_len_format(self): + trace_id = 'aa' + options = {'reporting': { + 'zipkin_trace_id': trace_id, + 'zipkin_parent_id': self.parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + in str(result.exception) + ) + + def test_raise_if_parent_id_is_of_wrong_ch_format(self): + parent_id = 'gggggggggggggggg' + options = {'reporting': { + 'zipkin_trace_id': self.trace_id, + 'zipkin_parent_id': parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + in str(result.exception) + ) + + def test_raise_if_trace_id_is_of_wrong_ch_format(self): + trace_id = 'gggggggggggggggg' + options = {'reporting': { + 'zipkin_trace_id': trace_id, + 'zipkin_parent_id': self.parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + in str(result.exception) + ) From fedc91cb2e1455b7a8dca9c843bbd8b553a04241 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Fri, 15 Feb 2019 10:01:40 -0800 Subject: [PATCH 7/8] Avoid capturing Snapshots for previously digested codegen outputs (#7241) ### Problem As described in #7229, re-capturing `Snapshots` on noop runs in `SimpleCodegenTask` caused a performance regression for larger codegen usecases. ### Solution Remove features from the python-exposed `Snapshot` API that would prevent them from being roundtrippable via a `Digest` (including preservation of canonical paths, and preservation of literal ordering... ie. #5802), add support for optimistically loading a `Snapshot` from a `Digest`, and then reuse code to dump/load a `Digest` for the codegen directories to skip `Snapshot` capturing in cases where the `Digest` had already been stored. ### Result Very large codegen noop usecase runtimes reduced from `~15.2` seconds to `~3.05` seconds. Fixes #7229, and fixes #5802. --- .../wire/org/pantsbuild/example/element/BUILD | 4 +- .../codegen/wire/java/java_wire_library.py | 5 + .../backend/codegen/wire/java/wire_gen.py | 43 +++- .../pants/backend/graph_info/tasks/cloc.py | 2 +- .../tasks/jvm_compile/javac/javac_compile.py | 4 +- .../jvm/tasks/jvm_compile/rsc/rsc_compile.py | 25 +- .../python/rules/python_test_runner.py | 2 +- src/python/pants/engine/build_files.py | 2 +- src/python/pants/engine/fs.py | 77 ++++-- src/python/pants/engine/native.py | 8 - src/python/pants/engine/scheduler.py | 7 +- src/python/pants/source/filespec.py | 3 + src/python/pants/source/wrapped_globs.py | 6 +- src/python/pants/task/simple_codegen_task.py | 76 +++--- src/rust/engine/fs/src/snapshot.rs | 118 +++++++-- src/rust/engine/fs/src/store.rs | 225 ++++++++++-------- src/rust/engine/src/lib.rs | 26 +- src/rust/engine/src/nodes.rs | 55 ++--- src/rust/engine/src/types.rs | 4 - .../codegen/antlr/java/test_antlr_java_gen.py | 12 +- .../backend/codegen/protobuf/java/BUILD | 1 + .../java/test_protobuf_integration.py | 13 - .../backend/codegen/wire/java/BUILD | 2 + .../codegen/wire/java/test_wire_gen.py | 35 ++- .../pants_test/engine/legacy/test_graph.py | 5 +- .../pants_test/engine/test_build_files.py | 14 +- tests/python/pants_test/engine/test_fs.py | 6 +- .../engine/test_isolated_process.py | 6 +- tests/python/pants_test/reporting/BUILD | 2 +- .../pants_test/source/test_payload_fields.py | 8 +- .../pants_test/source/test_wrapped_globs.py | 6 +- .../task/test_simple_codegen_task.py | 16 +- 32 files changed, 461 insertions(+), 357 deletions(-) diff --git a/examples/src/wire/org/pantsbuild/example/element/BUILD b/examples/src/wire/org/pantsbuild/example/element/BUILD index 916a7f943a1..0afde70a649 100644 --- a/examples/src/wire/org/pantsbuild/example/element/BUILD +++ b/examples/src/wire/org/pantsbuild/example/element/BUILD @@ -3,10 +3,12 @@ java_wire_library( sources=[ - 'elements.proto', # Order matters here. + # NB: Order matters for these two paths, so we set `ordered_sources=True` below. + 'elements.proto', 'compound.proto', ], dependencies=[ 'examples/src/wire/org/pantsbuild/example/temperature', ], + ordered_sources=True, ) diff --git a/src/python/pants/backend/codegen/wire/java/java_wire_library.py b/src/python/pants/backend/codegen/wire/java/java_wire_library.py index d5cbd9b3fbe..93391af49c4 100644 --- a/src/python/pants/backend/codegen/wire/java/java_wire_library.py +++ b/src/python/pants/backend/codegen/wire/java/java_wire_library.py @@ -32,6 +32,7 @@ def __init__(self, registry_class=None, enum_options=None, no_options=None, + ordered_sources=None, **kwargs): """ :param string service_writer: the name of the class to pass as the --service_writer option to @@ -43,6 +44,9 @@ def __init__(self, doubt, specify com.squareup.wire.SimpleServiceWriter :param list enum_options: list of enums to pass to as the --enum-enum_options option, # optional :param boolean no_options: boolean that determines if --no_options flag is passed + :param boolean ordered_sources: boolean that declares whether the sources argument represents + literal ordered sources to be passed directly to the compiler. If false, no ordering is + guaranteed for the sources passed to an individual compiler invoke. """ if not service_writer and service_writer_options: @@ -59,6 +63,7 @@ def __init__(self, 'registry_class': PrimitiveField(registry_class or None), 'enum_options': PrimitiveField(enum_options or []), 'no_options': PrimitiveField(no_options or False), + 'ordered_sources': PrimitiveField(ordered_sources or False), }) super(JavaWireLibrary, self).__init__(payload=payload, **kwargs) diff --git a/src/python/pants/backend/codegen/wire/java/wire_gen.py b/src/python/pants/backend/codegen/wire/java/wire_gen.py index 84b378d9136..acfb8d58f11 100644 --- a/src/python/pants/backend/codegen/wire/java/wire_gen.py +++ b/src/python/pants/backend/codegen/wire/java/wire_gen.py @@ -13,10 +13,12 @@ from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError +from pants.base.exceptions import TargetDefinitionException, TaskError from pants.base.workunit import WorkUnitLabel from pants.java.jar.jar_dependency import JarDependency +from pants.source.filespec import globs_matches from pants.task.simple_codegen_task import SimpleCodegenTask +from pants.util.dirutil import fast_relpath logger = logging.getLogger(__name__) @@ -61,24 +63,47 @@ def synthetic_target_extra_dependencies(self, target, target_workdir): wire_runtime_deps_spec = self.get_options().javadeps return self.resolve_deps([wire_runtime_deps_spec]) - def format_args_for_target(self, target, target_workdir): - """Calculate the arguments to pass to the command line for a single target.""" - sources = OrderedSet(target.sources_relative_to_buildroot()) - + def _compute_sources(self, target): relative_sources = OrderedSet() - source_roots = set() - for source in sources: + source_roots = OrderedSet() + + def capture_and_relativize_to_source_root(source): source_root = self.context.source_roots.find_by_path(source) if not source_root: source_root = self.context.source_roots.find(target) source_roots.add(source_root.path) - relative_source = os.path.relpath(source, source_root.path) - relative_sources.add(relative_source) + return fast_relpath(source, source_root.path) + + if target.payload.get_field_value('ordered_sources'): + # Re-match the filespecs against the sources in order to apply them in the literal order + # they were specified in. + filespec = target.globs_relative_to_buildroot() + excludes = filespec.get('excludes', []) + for filespec in filespec.get('globs', []): + sources = [s for s in target.sources_relative_to_buildroot() + if globs_matches([s], [filespec], excludes)] + if len(sources) != 1: + raise TargetDefinitionException( + target, + 'With `ordered_sources=True`, expected one match for each file literal, ' + 'but got: {} for literal `{}`.'.format(sources, filespec) + ) + relative_sources.add(capture_and_relativize_to_source_root(sources[0])) + else: + # Otherwise, use the default (unspecified) snapshot ordering. + for source in target.sources_relative_to_buildroot(): + relative_sources.add(capture_and_relativize_to_source_root(source)) + return relative_sources, source_roots + + def format_args_for_target(self, target, target_workdir): + """Calculate the arguments to pass to the command line for a single target.""" args = ['--java_out={0}'.format(target_workdir)] # Add all params in payload to args + relative_sources, source_roots = self._compute_sources(target) + if target.payload.get_field_value('no_options'): args.append('--no_options') diff --git a/src/python/pants/backend/graph_info/tasks/cloc.py b/src/python/pants/backend/graph_info/tasks/cloc.py index 6019ef33792..e1903d9dfd5 100644 --- a/src/python/pants/backend/graph_info/tasks/cloc.py +++ b/src/python/pants/backend/graph_info/tasks/cloc.py @@ -40,7 +40,7 @@ def console_output(self, targets): input_snapshots = tuple( target.sources_snapshot(scheduler=self.context._scheduler) for target in targets ) - input_files = {f.path for snapshot in input_snapshots for f in snapshot.files} + input_files = {f for snapshot in input_snapshots for f in snapshot.files} # TODO: Work out a nice library-like utility for writing an argfile, as this will be common. with temporary_dir() as tmpdir: diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py index f9f783947ab..dee55bc492f 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py @@ -209,8 +209,8 @@ def _execute_hermetic_compile(self, cmd, ctx): # Assume no extra .class files to grab. We'll fix up that case soon. # Drop the source_root from the file path. # Assumes `-d .` has been put in the command. - os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base) - for f in input_snapshot.files if f.path.endswith('.java') + os.path.relpath(f.replace('.java', '.class'), ctx.target.target_base) + for f in input_snapshot.files if f.endswith('.java') ) exec_process_request = ExecuteProcessRequest( argv=tuple(cmd), diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py index 6a5675be561..4266f4f5e02 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py @@ -33,8 +33,7 @@ from pants.java.jar.jar_dependency import JarDependency from pants.reporting.reporting_utils import items_to_report_element from pants.util.contextutil import Timer -from pants.util.dirutil import (fast_relpath, fast_relpath_optional, maybe_read_file, - safe_file_dump, safe_mkdir) +from pants.util.dirutil import fast_relpath, fast_relpath_optional, safe_mkdir from pants.util.memo import memoized_property @@ -60,22 +59,6 @@ def stdout_contents(wu): return f.read().rstrip() -def write_digest(output_dir, digest): - safe_file_dump( - '{}.digest'.format(output_dir), - mode='w', - payload='{}:{}'.format(digest.fingerprint, digest.serialized_bytes_length)) - - -def load_digest(output_dir): - read_file = maybe_read_file('{}.digest'.format(output_dir), binary_mode=False) - if read_file: - fingerprint, length = read_file.split(':') - return Digest(fingerprint, int(length)) - else: - return None - - def _create_desandboxify_fn(possible_path_patterns): # Takes a collection of possible canonical prefixes, and returns a function that # if it finds a matching prefix, strips the path prior to the prefix and returns it @@ -132,7 +115,7 @@ def __init__(self, *args, **kwargs): @classmethod def implementation_version(cls): - return super(RscCompile, cls).implementation_version() + [('RscCompile', 171)] + return super(RscCompile, cls).implementation_version() + [('RscCompile', 172)] @classmethod def register_options(cls, register): @@ -218,7 +201,7 @@ def pathglob_for(filename): def to_classpath_entries(paths, scheduler): # list of path -> # list of (path, optional) -> - path_and_digests = [(p, load_digest(os.path.dirname(p))) for p in paths] + path_and_digests = [(p, Digest.load(os.path.dirname(p))) for p in paths] # partition: list of path, list of tuples paths_without_digests = [p for (p, d) in path_and_digests if not d] if paths_without_digests: @@ -825,7 +808,7 @@ def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input raise TaskError(res.stderr) if output_dir: - write_digest(output_dir, res.output_directory_digest) + res.output_directory_digest.dump(output_dir) self.context._scheduler.materialize_directories(( DirectoryToMaterialize( # NB the first element here is the root to materialize into, not the dir to snapshot diff --git a/src/python/pants/backend/python/rules/python_test_runner.py b/src/python/pants/backend/python/rules/python_test_runner.py index 70c6457f77c..26ac23c39e5 100644 --- a/src/python/pants/backend/python/rules/python_test_runner.py +++ b/src/python/pants/backend/python/rules/python_test_runner.py @@ -64,7 +64,7 @@ def run_python_test(transitive_hydrated_target, pytest): # pex27, where it should be hermetically provided in some way. output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex' requirements_pex_argv = [ - './{}'.format(pex_snapshot.files[0].path), + './{}'.format(pex_snapshot.files[0]), '--python', python_binary, '-e', 'pytest:main', '-o', output_pytest_requirements_pex_filename, diff --git a/src/python/pants/engine/build_files.py b/src/python/pants/engine/build_files.py index ee65bd92119..84430073f35 100644 --- a/src/python/pants/engine/build_files.py +++ b/src/python/pants/engine/build_files.py @@ -219,7 +219,7 @@ def addresses_from_address_families(address_mapper, specs): """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) - dirnames = {dirname(f.stat.path) for f in snapshot.files} + dirnames = {dirname(f) for f in snapshot.files} address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] address_family_by_directory = {af.namespace: af for af in address_families} diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py index c202bb3d3f3..8d009f01c06 100644 --- a/src/python/pants/engine/fs.py +++ b/src/python/pants/engine/fs.py @@ -4,14 +4,16 @@ from __future__ import absolute_import, division, print_function, unicode_literals +import os + from future.utils import binary_type, text_type -from pants.base.project_tree import Dir, File from pants.engine.objects import Collection from pants.engine.rules import RootRule from pants.option.custom_types import GlobExpansionConjunction from pants.option.global_options import GlobMatchErrorBehavior -from pants.util.objects import datatype +from pants.util.dirutil import maybe_read_file, safe_delete, safe_file_dump +from pants.util.objects import Exactly, datatype class FileContent(datatype([('path', text_type), ('content', binary_type)])): @@ -62,10 +64,6 @@ def __new__(cls, include, exclude=(), glob_match_error_behavior=None, conjunctio conjunction=GlobExpansionConjunction.create(conjunction, none_is_default=True)) -class PathGlobsAndRoot(datatype([('path_globs', PathGlobs), ('root', text_type)])): - pass - - class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', int)])): """A Digest is a content-digest fingerprint, and a length of underlying content. @@ -84,6 +82,33 @@ class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', i https://github.com/pantsbuild/pants/issues/5802 """ + @classmethod + def _path(cls, directory): + return '{}.digest'.format(directory.rstrip(os.sep)) + + @classmethod + def clear(cls, directory): + """Clear any existing Digest file adjacent to the given directory.""" + safe_delete(cls._path(directory)) + + @classmethod + def load(cls, directory): + """Load a Digest from a `.digest` file adjacent to the given directory. + + :return: A Digest, or None if the Digest did not exist. + """ + read_file = maybe_read_file(cls._path(directory), binary_mode=False) + if read_file: + fingerprint, length = read_file.split(':') + return Digest(fingerprint, int(length)) + else: + return None + + def dump(self, directory): + """Dump this Digest object adjacent to the given directory.""" + payload = '{}:{}'.format(self.fingerprint, self.serialized_bytes_length) + safe_file_dump(self._path(directory), payload=payload, mode='w') + def __repr__(self): return '''Digest(fingerprint={}, serialized_bytes_length={})'''.format( self.fingerprint, @@ -94,8 +119,25 @@ def __str__(self): return repr(self) -class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])): - """A Snapshot is a collection of Files and Dirs fingerprinted by their names/content. +class PathGlobsAndRoot(datatype([ + ('path_globs', PathGlobs), + ('root', text_type), + ('digest_hint', Exactly(Digest, type(None))), +])): + """A set of PathGlobs to capture relative to some root (which may exist outside of the buildroot). + + If the `digest_hint` is set, it must be the Digest that we would expect to get if we were to + expand and Digest the globs. The hint is an optimization that allows for bypassing filesystem + operations in cases where the expected Digest is known, and the content for the Digest is already + stored. + """ + + def __new__(cls, path_globs, root, digest_hint=None): + return super(PathGlobsAndRoot, cls).__new__(cls, path_globs, root, digest_hint) + + +class Snapshot(datatype([('directory_digest', Digest), ('files', tuple), ('dirs', tuple)])): + """A Snapshot is a collection of file paths and dir paths fingerprinted by their names/content. Snapshots are used to make it easier to isolate process execution by fixing the contents of the files being operated on and easing their movement to and from isolated execution @@ -106,22 +148,6 @@ class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])): def is_empty(self): return self == EMPTY_SNAPSHOT - @property - def dirs(self): - return [p for p in self.path_stats if type(p.stat) == Dir] - - @property - def dir_stats(self): - return [p.stat for p in self.dirs] - - @property - def files(self): - return [p for p in self.path_stats if type(p.stat) == File] - - @property - def file_stats(self): - return [p.stat for p in self.files] - class MergedDirectories(datatype([('directories', tuple)])): pass @@ -150,7 +176,8 @@ class UrlToFetch(datatype([('url', text_type), ('digest', Digest)])): EMPTY_SNAPSHOT = Snapshot( directory_digest=EMPTY_DIRECTORY_DIGEST, - path_stats=(), + files=(), + dirs=() ) diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py index 5026b055382..de32a5ab155 100644 --- a/src/python/pants/engine/native.py +++ b/src/python/pants/engine/native.py @@ -690,10 +690,6 @@ def new_scheduler(self, construct_snapshot, construct_file_content, construct_files_content, - construct_path_stat, - construct_dir, - construct_file, - construct_link, construct_process_result, constraint_address, constraint_path_globs, @@ -722,10 +718,6 @@ def tc(constraint): func(construct_snapshot), func(construct_file_content), func(construct_files_content), - func(construct_path_stat), - func(construct_dir), - func(construct_file), - func(construct_link), func(construct_process_result), # TypeConstraints. tc(constraint_address), diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index fa4688c64ab..5727c22b135 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -14,8 +14,7 @@ from pants.base.project_tree import Dir, File, Link from pants.build_graph.address import Address from pants.engine.fs import (Digest, DirectoryToMaterialize, FileContent, FilesContent, - MergedDirectories, Path, PathGlobs, PathGlobsAndRoot, Snapshot, - UrlToFetch) + MergedDirectories, PathGlobs, PathGlobsAndRoot, Snapshot, UrlToFetch) from pants.engine.isolated_process import ExecuteProcessRequest, FallibleExecuteProcessResult from pants.engine.native import Function, TypeConstraint, TypeId from pants.engine.nodes import Return, Throw @@ -101,10 +100,6 @@ def __init__( construct_snapshot=Snapshot, construct_file_content=FileContent, construct_files_content=FilesContent, - construct_path_stat=Path, - construct_dir=Dir, - construct_file=File, - construct_link=Link, construct_process_result=FallibleExecuteProcessResult, constraint_address=constraint_for(Address), constraint_path_globs=constraint_for(PathGlobs), diff --git a/src/python/pants/source/filespec.py b/src/python/pants/source/filespec.py index c36d37176a4..77e967043f5 100644 --- a/src/python/pants/source/filespec.py +++ b/src/python/pants/source/filespec.py @@ -9,6 +9,9 @@ def glob_to_regex(pattern): """Given a glob pattern, return an equivalent regex expression. + + TODO: Replace with implementation in `fs.rs`. See https://github.com/pantsbuild/pants/issues/6795. + :param string glob: The glob pattern. "**" matches 0 or more dirs recursively. "*" only matches patterns in a single dir. :returns: A regex string that matches same paths as the input glob does. diff --git a/src/python/pants/source/wrapped_globs.py b/src/python/pants/source/wrapped_globs.py index 2e68472e748..1deab4c9dda 100644 --- a/src/python/pants/source/wrapped_globs.py +++ b/src/python/pants/source/wrapped_globs.py @@ -99,8 +99,10 @@ def files(self): @memoized_property def files_relative_to_buildroot(self): - fds = self._snapshot.path_stats if self._include_dirs else self._snapshot.files - return tuple(fd.path for fd in fds) + res = self._snapshot.files + if self._include_dirs: + res += self._snapshot.dirs + return res @property def files_hash(self): diff --git a/src/python/pants/task/simple_codegen_task.py b/src/python/pants/task/simple_codegen_task.py index 276c50fe370..162b8dd4d2e 100644 --- a/src/python/pants/task/simple_codegen_task.py +++ b/src/python/pants/task/simple_codegen_task.py @@ -18,11 +18,11 @@ from pants.base.workunit import WorkUnitLabel from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError -from pants.engine.fs import PathGlobs, PathGlobsAndRoot +from pants.engine.fs import Digest, PathGlobs, PathGlobsAndRoot from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper from pants.task.task import Task from pants.util.collections_abc_backport import OrderedDict -from pants.util.dirutil import safe_delete +from pants.util.dirutil import fast_relpath, safe_delete logger = logging.getLogger(__name__) @@ -113,6 +113,10 @@ def synthetic_target_extra_dependencies(self, target, target_workdir): """ return [] + @classmethod + def implementation_version(cls): + return super(SimpleCodegenTask, cls).implementation_version() + [('SimpleCodegenTask', 2)] + def synthetic_target_extra_exports(self, target, target_workdir): """Gets any extra exports generated synthetic targets should have. @@ -206,7 +210,7 @@ def _do_validate_sources_present(self, target): def _get_synthetic_address(self, target, target_workdir): synthetic_name = target.id - sources_rel_path = os.path.relpath(target_workdir, get_buildroot()) + sources_rel_path = fast_relpath(target_workdir, get_buildroot()) synthetic_address = Address(sources_rel_path, synthetic_name) return synthetic_address @@ -230,32 +234,26 @@ def execute(self): with self.context.new_workunit(name='execute', labels=[WorkUnitLabel.MULTITOOL]): vts_to_sources = OrderedDict() for vt in invalidation_check.all_vts: - synthetic_target_dir = self.synthetic_target_dir(vt.target, vt.results_dir) - key = (vt, synthetic_target_dir) - vts_to_sources[key] = None + vts_to_sources[vt] = None # Build the target and handle duplicate sources. if not vt.valid: if self._do_validate_sources_present(vt.target): - self.execute_codegen(vt.target, vt.results_dir) - sources = self._capture_sources((key,))[0] + self.execute_codegen(vt.target, vt.current_results_dir) + sources = self._capture_sources((vt,))[0] # _handle_duplicate_sources may delete files from the filesystem, so we need to # re-capture the sources. - if not self._handle_duplicate_sources(vt.target, vt.results_dir, sources): - vts_to_sources[key] = sources + if not self._handle_duplicate_sources(vt, sources): + vts_to_sources[vt] = sources vt.update() vts_to_capture = tuple(key for key, sources in vts_to_sources.items() if sources is None) filesets = self._capture_sources(vts_to_capture) for key, fileset in zip(vts_to_capture, filesets): vts_to_sources[key] = fileset - for (vt, synthetic_target_dir), fileset in vts_to_sources.items(): - self._inject_synthetic_target( - vt.target, - synthetic_target_dir, - fileset, - ) + for vt, fileset in vts_to_sources.items(): + self._inject_synthetic_target(vt, fileset) self._mark_transitive_invalidation_hashes_dirty( vt.target.address for vt in invalidation_check.all_vts ) @@ -280,17 +278,23 @@ def synthetic_target_dir(self, target, target_workdir): """ return target_workdir - # Accepts tuple of tuples of (target, synthetic_target_dir) + # Accepts tuple of VersionedTarget instances. # Returns tuple of EagerFilesetWithSpecs in matching order. - def _capture_sources(self, targets_and_dirs): + def _capture_sources(self, vts): to_capture = [] results_dirs = [] filespecs = [] - for target, synthetic_target_dir in targets_and_dirs: + for vt in vts: + target = vt.target + # Compute the (optional) subdirectory of the results_dir to generate code to. This + # path will end up in the generated FilesetWithSpec and target, and thus needs to be + # located below the stable/symlinked `vt.results_dir`. + synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir) + files = self.sources_globs - results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot()) + results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot()) buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files) buildroot_relative_excludes = tuple( os.path.join(results_dir_relpath, file) @@ -300,6 +304,8 @@ def _capture_sources(self, targets_and_dirs): PathGlobsAndRoot( PathGlobs(buildroot_relative_globs, buildroot_relative_excludes), text_type(get_buildroot()), + # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`. + Digest.load(vt.current_results_dir), ) ) results_dirs.append(results_dir_relpath) @@ -307,33 +313,35 @@ def _capture_sources(self, targets_and_dirs): snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture)) + for snapshot, vt in zip(snapshots, vts): + snapshot.directory_digest.dump(vt.current_results_dir) + return tuple(EagerFilesetWithSpec( results_dir_relpath, filespec, snapshot, ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots)) - def _inject_synthetic_target( - self, - target, - target_workdir, - sources, - ): + def _inject_synthetic_target(self, vt, sources): """Create, inject, and return a synthetic target for the given target and workdir. - :param target: The target to inject a synthetic target for. - :param target_workdir: The work directory containing the generated code for the target. + :param vt: A codegen input VersionedTarget to inject a synthetic target for. + :param sources: A FilesetWithSpec to inject for the target. """ + target = vt.target + # NB: For stability, the injected target exposes the stable-symlinked `vt.results_dir`, + # rather than the hash-named `vt.current_results_dir`. + synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir) synthetic_target_type = self.synthetic_target_type(target) - synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, target_workdir) + synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, synthetic_target_dir) copied_attributes = {} for attribute in self._copy_target_attributes: copied_attributes[attribute] = getattr(target, attribute) if self._supports_exports(synthetic_target_type): - extra_exports = self.synthetic_target_extra_exports(target, target_workdir) + extra_exports = self.synthetic_target_extra_exports(target, synthetic_target_dir) extra_exports_not_in_extra_dependencies = set(extra_exports).difference( set(synthetic_extra_dependencies)) @@ -349,7 +357,7 @@ def _inject_synthetic_target( copied_attributes['exports'] = sorted(union) synthetic_target = self.context.add_new_target( - address=self._get_synthetic_address(target, target_workdir), + address=self._get_synthetic_address(target, synthetic_target_dir), target_type=synthetic_target_type, dependencies=synthetic_extra_dependencies, sources=sources, @@ -405,7 +413,7 @@ def execute_codegen(self, target, target_workdir): :param target_workdir: A clean directory into which to generate code """ - def _handle_duplicate_sources(self, target, target_workdir, sources): + def _handle_duplicate_sources(self, vt, sources): """Handles duplicate sources generated by the given gen target by either failure or deletion. This method should be called after all dependencies have been injected into the graph, but @@ -420,6 +428,8 @@ def _handle_duplicate_sources(self, target, target_workdir, sources): default, this behavior is disabled, and duplication in generated sources will raise a TaskError. This is controlled by the --allow-dups flag. """ + target = vt.target + target_workdir = vt.results_dir # Walk dependency gentargets and record any sources owned by those targets that are also # owned by this target. @@ -457,6 +467,8 @@ def record_duplicates(dep): for duped_source in duped_sources: safe_delete(os.path.join(target_workdir, duped_source)) did_modify = True + if did_modify: + Digest.clear(vt.current_results_dir) return did_modify class DuplicateSourceError(TaskError): diff --git a/src/rust/engine/fs/src/snapshot.rs b/src/rust/engine/fs/src/snapshot.rs index 1badce3abc7..3701fd2278e 100644 --- a/src/rust/engine/fs/src/snapshot.rs +++ b/src/rust/engine/fs/src/snapshot.rs @@ -3,7 +3,7 @@ use crate::glob_matching::GlobMatching; use crate::pool::ResettablePool; -use crate::{File, PathGlobs, PathStat, PosixFS, Store}; +use crate::{Dir, File, PathGlobs, PathStat, PosixFS, Store}; use bazel_protos; use boxfuture::{try_future, BoxFuture, Boxable}; use futures::future::{self, join_all}; @@ -44,27 +44,57 @@ impl Snapshot { >( store: Store, file_digester: &S, - path_stats: Vec, + mut path_stats: Vec, ) -> BoxFuture { - let mut sorted_path_stats = path_stats.clone(); - sorted_path_stats.sort_by(|a, b| a.path().cmp(b.path())); + path_stats.sort_by(|a, b| a.path().cmp(b.path())); // The helper assumes that if a Path has multiple children, it must be a directory. // Proactively error if we run into identically named files, because otherwise we will treat // them like empty directories. - sorted_path_stats.dedup_by(|a, b| a.path() == b.path()); - if sorted_path_stats.len() != path_stats.len() { + let pre_dedupe_len = path_stats.len(); + path_stats.dedup_by(|a, b| a.path() == b.path()); + if path_stats.len() != pre_dedupe_len { return future::err(format!( "Snapshots must be constructed from unique path stats; got duplicates in {:?}", path_stats )) .to_boxed(); } - Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &sorted_path_stats) + Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &path_stats) .map(|digest| Snapshot { digest, path_stats }) .to_boxed() } + pub fn from_digest(store: Store, digest: Digest) -> BoxFuture { + store + .walk(digest, |_, path_so_far, _, directory| { + let mut path_stats = Vec::new(); + path_stats.extend(directory.get_directories().iter().map(move |dir_node| { + let path = path_so_far.join(dir_node.get_name()); + PathStat::dir(path.clone(), Dir(path)) + })); + path_stats.extend(directory.get_files().iter().map(move |file_node| { + let path = path_so_far.join(file_node.get_name()); + PathStat::file( + path.clone(), + File { + path, + is_executable: file_node.is_executable, + }, + ) + })); + future::ok(path_stats).to_boxed() + }) + .map(move |path_stats_per_directory| { + let mut path_stats = + Iterator::flatten(path_stats_per_directory.into_iter().map(|v| v.into_iter())) + .collect::>(); + path_stats.sort_by(|l, r| l.path().cmp(&r.path())); + Snapshot { digest, path_stats } + }) + .to_boxed() + } + pub fn digest_from_path_stats< S: StoreFileByDigest + Sized + Clone, Error: fmt::Debug + 'static + Send, @@ -312,29 +342,44 @@ impl Snapshot { .to_boxed() } - pub fn capture_snapshot_from_arbitrary_root>( + /// + /// Capture a Snapshot of a presumed-immutable piece of the filesystem. + /// + /// Note that we don't use a Graph here, and don't cache any intermediate steps, we just place + /// the resultant Snapshot into the store and return it. This is important, because we're reading + /// things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch + /// them for changes. + /// + /// If the `digest_hint` is given, first attempt to load the Snapshot using that Digest, and only + /// fall back to actually walking the filesystem if we don't have it (either due to garbage + /// collection or Digest-oblivious legacy caching). + /// + pub fn capture_snapshot_from_arbitrary_root + Send + 'static>( store: Store, fs_pool: Arc, root_path: P, path_globs: PathGlobs, + digest_hint: Option, ) -> BoxFuture { - // Note that we don't use a Graph here, and don't cache any intermediate steps, we just place - // the resultant Snapshot into the store and return it. This is important, because we're reading - // things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch - // them for changes. - // We assume that this Snapshot is of an immutable piece of the filesystem. - - let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[]))); - - posix_fs - .expand(path_globs) - .map_err(|err| format!("Error expanding globs: {:?}", err)) - .and_then(|path_stats| { - Snapshot::from_path_stats( - store.clone(), - &OneOffStoreFileByDigest::new(store, posix_fs), - path_stats, - ) + // Attempt to use the digest hint to load a Snapshot without expanding the globs; otherwise, + // expand the globs to capture a Snapshot. + let store2 = store.clone(); + future::result(digest_hint.ok_or_else(|| "No digest hint provided.".to_string())) + .and_then(move |digest| Snapshot::from_digest(store, digest)) + .or_else(|_| { + let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[]))); + + posix_fs + .expand(path_globs) + .map_err(|err| format!("Error expanding globs: {:?}", err)) + .and_then(|path_stats| { + Snapshot::from_path_stats( + store2.clone(), + &OneOffStoreFileByDigest::new(store2, posix_fs), + path_stats, + ) + }) + .to_boxed() }) .to_boxed() } @@ -507,6 +552,27 @@ mod tests { ); } + #[test] + fn snapshot_from_digest() { + let (store, dir, posix_fs, digester) = setup(); + + let cats = PathBuf::from("cats"); + let roland = cats.join("roland"); + std::fs::create_dir_all(&dir.path().join(cats)).unwrap(); + make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600); + + let path_stats = expand_all_sorted(posix_fs); + let expected_snapshot = Snapshot::from_path_stats(store.clone(), &digester, path_stats.clone()) + .wait() + .unwrap(); + assert_eq!( + expected_snapshot, + Snapshot::from_digest(store, expected_snapshot.digest) + .wait() + .unwrap(), + ); + } + #[test] fn snapshot_recursive_directories_including_empty() { let (store, dir, posix_fs, digester) = setup(); @@ -535,7 +601,7 @@ mod tests { .unwrap(), 232, ), - path_stats: unsorted_path_stats, + path_stats: sorted_path_stats, } ); } diff --git a/src/rust/engine/fs/src/store.rs b/src/rust/engine/fs/src/store.rs index 1e1f5a2fbcf..412be087044 100644 --- a/src/rust/engine/fs/src/store.rs +++ b/src/rust/engine/fs/src/store.rs @@ -461,51 +461,22 @@ impl Store { } pub fn expand_directory(&self, digest: Digest) -> BoxFuture, String> { - let accumulator = Arc::new(Mutex::new(HashMap::new())); - - self - .expand_directory_helper(digest, accumulator.clone()) - .map(|()| { - Arc::try_unwrap(accumulator) - .expect("Arc should have been unwrappable") - .into_inner() - }) - .to_boxed() - } - - fn expand_directory_helper( - &self, - digest: Digest, - accumulator: Arc>>, - ) -> BoxFuture<(), String> { - let store = self.clone(); self - .load_directory(digest) - .and_then(move |maybe_directory| match maybe_directory { - Some(directory) => { - { - let mut accumulator = accumulator.lock(); - accumulator.insert(digest, EntryType::Directory); - for file in directory.get_files() { - accumulator.insert(try_future!(file.get_digest().into()), EntryType::File); - } - } - future::join_all( - directory - .get_directories() - .iter() - .map(move |subdir| { - store.clone().expand_directory_helper( - try_future!(subdir.get_digest().into()), - accumulator.clone(), - ) - }) - .collect::>(), - ) - .map(|_| ()) - .to_boxed() + .walk(digest, |_, _, digest, directory| { + let mut digest_types = Vec::new(); + digest_types.push((digest, EntryType::Directory)); + for file in directory.get_files() { + digest_types.push((try_future!(file.get_digest().into()), EntryType::File)); } - None => future::err(format!("Could not expand unknown directory: {:?}", digest)).to_boxed(), + future::ok(digest_types).to_boxed() + }) + .map(|digest_pairs_per_directory| { + Iterator::flatten( + digest_pairs_per_directory + .into_iter() + .map(|v| v.into_iter()), + ) + .collect() }) .to_boxed() } @@ -579,78 +550,124 @@ impl Store { } // Returns files sorted by their path. - pub fn contents_for_directory( - &self, - directory: &bazel_protos::remote_execution::Directory, - ) -> BoxFuture, String> { - let accumulator = Arc::new(Mutex::new(HashMap::new())); + pub fn contents_for_directory(&self, digest: Digest) -> BoxFuture, String> { self - .contents_for_directory_helper(directory, PathBuf::new(), accumulator.clone()) - .map(|()| { - let map = Arc::try_unwrap(accumulator).unwrap().into_inner(); - let mut vec: Vec = map - .into_iter() - .map(|(path, content)| FileContent { path, content }) - .collect(); + .walk(digest, |store, path_so_far, _, directory| { + future::join_all( + directory + .get_files() + .iter() + .map(move |file_node| { + let path = path_so_far.join(file_node.get_name()); + store + .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b) + .and_then(move |maybe_bytes| { + maybe_bytes + .ok_or_else(|| format!("Couldn't find file contents for {:?}", path)) + .map(|content| FileContent { path, content }) + }) + .to_boxed() + }) + .collect::>(), + ) + .to_boxed() + }) + .map(|file_contents_per_directory| { + let mut vec = Iterator::flatten( + file_contents_per_directory + .into_iter() + .map(|v| v.into_iter()), + ) + .collect::>(); vec.sort_by(|l, r| l.path.cmp(&r.path)); vec }) .to_boxed() } - // Assumes that all fingerprints it encounters are valid. - fn contents_for_directory_helper( + /// + /// Given the Digest for a Directory, recursively walk the Directory, calling the given function + /// with the path so far, and the new Directory. + /// + /// The recursive walk will proceed concurrently, so if order matters, a caller should sort the + /// output after the call. + /// + pub fn walk< + T: Send + 'static, + F: Fn( + &Store, + &PathBuf, + Digest, + &bazel_protos::remote_execution::Directory, + ) -> BoxFuture + + Send + + Sync + + 'static, + >( &self, - directory: &bazel_protos::remote_execution::Directory, + digest: Digest, + f: F, + ) -> BoxFuture, String> { + let f = Arc::new(f); + let accumulator = Arc::new(Mutex::new(Vec::new())); + self + .walk_helper(digest, PathBuf::new(), f, accumulator.clone()) + .map(|()| { + Arc::try_unwrap(accumulator) + .unwrap_or_else(|_| panic!("walk_helper violated its contract.")) + .into_inner() + }) + .to_boxed() + } + + fn walk_helper< + T: Send + 'static, + F: Fn( + &Store, + &PathBuf, + Digest, + &bazel_protos::remote_execution::Directory, + ) -> BoxFuture + + Send + + Sync + + 'static, + >( + &self, + digest: Digest, path_so_far: PathBuf, - contents_wrapped: Arc>>, + f: Arc, + accumulator: Arc>>, ) -> BoxFuture<(), String> { - let contents_wrapped_copy = contents_wrapped.clone(); - let path_so_far_copy = path_so_far.clone(); - let store_copy = self.clone(); - let file_futures = future::join_all( - directory - .get_files() - .iter() - .map(move |file_node| { - let path = path_so_far_copy.join(file_node.get_name()); - let contents_wrapped_copy = contents_wrapped_copy.clone(); - store_copy - .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b) - .and_then(move |maybe_bytes| { - maybe_bytes - .ok_or_else(|| format!("Couldn't find file contents for {:?}", path)) - .map(move |bytes| { - let mut contents = contents_wrapped_copy.lock(); - contents.insert(path, bytes); - }) - }) - .to_boxed() - }) - .collect::>(), - ); let store = self.clone(); - let dir_futures = future::join_all( - directory - .get_directories() - .iter() - .map(move |dir_node| { - let digest = try_future!(dir_node.get_digest().into()); - let path = path_so_far.join(dir_node.get_name()); - let store = store.clone(); - let contents_wrapped = contents_wrapped.clone(); - store - .load_directory(digest) - .and_then(move |maybe_dir| { - maybe_dir - .ok_or_else(|| format!("Could not find sub-directory with digest {:?}", digest)) + self + .load_directory(digest) + .and_then(move |maybe_directory| match maybe_directory { + Some(directory) => { + let result_for_directory = f(&store, &path_so_far, digest, &directory); + result_for_directory + .and_then(move |r| { + { + let mut accumulator = accumulator.lock(); + accumulator.push(r); + } + future::join_all( + directory + .get_directories() + .iter() + .map(move |dir_node| { + let subdir_digest = try_future!(dir_node.get_digest().into()); + let path = path_so_far.join(dir_node.get_name()); + store.walk_helper(subdir_digest, path, f.clone(), accumulator.clone()) + }) + .collect::>(), + ) + .map(|_| ()) }) - .and_then(move |dir| store.contents_for_directory_helper(&dir, path, contents_wrapped)) .to_boxed() - }) - .collect::>(), - ); - file_futures.join(dir_futures).map(|(_, _)| ()).to_boxed() + } + None => future::err(format!("Could not walk unknown directory: {:?}", digest)).to_boxed(), + }) + .to_boxed() } } @@ -3501,7 +3518,7 @@ mod tests { let store = new_local_store(store_dir.path()); let file_contents = store - .contents_for_directory(&TestDirectory::empty().directory()) + .contents_for_directory(TestDirectory::empty().digest()) .wait() .expect("Getting FileContents"); @@ -3535,7 +3552,7 @@ mod tests { .expect("Error saving catnip file bytes"); let file_contents = store - .contents_for_directory(&recursive_testdir.directory()) + .contents_for_directory(recursive_testdir.digest()) .wait() .expect("Getting FileContents"); diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 54a0ed4af0c..84cc83c1fd4 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -177,10 +177,6 @@ pub extern "C" fn scheduler_create( construct_snapshot: Function, construct_file_content: Function, construct_files_content: Function, - construct_path_stat: Function, - construct_dir: Function, - construct_file: Function, - construct_link: Function, construct_process_result: Function, type_address: TypeConstraint, type_path_globs: TypeConstraint, @@ -224,10 +220,6 @@ pub extern "C" fn scheduler_create( construct_snapshot: construct_snapshot, construct_file_content: construct_file_content, construct_files_content: construct_files_content, - construct_path_stat: construct_path_stat, - construct_dir: construct_dir, - construct_file: construct_file, - construct_link: construct_link, construct_process_result: construct_process_result, address: type_address, path_globs: type_path_globs, @@ -666,15 +658,24 @@ pub extern "C" fn capture_snapshots( path_globs_and_root_tuple_wrapper: Handle, ) -> PyResult { let values = externs::project_multi(&path_globs_and_root_tuple_wrapper.into(), "dependencies"); - let path_globs_and_roots_result: Result, String> = values + let path_globs_and_roots_result = values .iter() .map(|value| { let root = PathBuf::from(externs::project_str(&value, "root")); let path_globs = nodes::Snapshot::lift_path_globs(&externs::project_ignoring_type(&value, "path_globs")); - path_globs.map(|path_globs| (path_globs, root)) + let digest_hint = { + let maybe_digest = externs::project_ignoring_type(&value, "digest_hint"); + // TODO: Extract a singleton Key for None. + if maybe_digest == externs::eval("None").unwrap() { + None + } else { + Some(nodes::lift_digest(&maybe_digest)?) + } + }; + path_globs.map(|path_globs| (path_globs, root, digest_hint)) }) - .collect(); + .collect::, _>>(); let path_globs_and_roots = match path_globs_and_roots_result { Ok(v) => v, @@ -689,13 +690,14 @@ pub extern "C" fn capture_snapshots( futures::future::join_all( path_globs_and_roots .into_iter() - .map(|(path_globs, root)| { + .map(|(path_globs, root, digest_hint)| { let core = core.clone(); fs::Snapshot::capture_snapshot_from_arbitrary_root( core.store(), core.fs_pool.clone(), root, path_globs, + digest_hint, ) .map(move |snapshot| nodes::Snapshot::store_snapshot(&core, &snapshot)) }) diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 3a9587e741e..6220326cf83 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -216,22 +216,11 @@ impl WrappedNode for Select { lift_digest(&directory_digest_val).map_err(|str| throw(&str)) }) .and_then(move |digest| { - let store = context.core.store(); context .core .store() - .load_directory(digest) + .contents_for_directory(digest) .map_err(|str| throw(&str)) - .and_then(move |maybe_directory| { - maybe_directory - .ok_or_else(|| format!("Could not find directory with digest {:?}", digest)) - .map_err(|str| throw(&str)) - }) - .and_then(move |directory| { - store - .contents_for_directory(&directory) - .map_err(|str| throw(&str)) - }) .map(move |files_content| Snapshot::store_files_content(&context, &files_content)) }) .to_boxed() @@ -540,16 +529,24 @@ impl Snapshot { } pub fn store_snapshot(core: &Arc, item: &fs::Snapshot) -> Value { - let path_stats: Vec<_> = item - .path_stats - .iter() - .map(|ps| Self::store_path_stat(core, ps)) - .collect(); + let mut files = Vec::new(); + let mut dirs = Vec::new(); + for ps in &item.path_stats { + match ps { + &PathStat::File { ref path, .. } => { + files.push(Self::store_path(path)); + } + &PathStat::Dir { ref path, .. } => { + dirs.push(Self::store_path(path)); + } + } + } externs::unsafe_call( &core.types.construct_snapshot, &[ Self::store_directory(core, &item.digest), - externs::store_tuple(&path_stats), + externs::store_tuple(&files), + externs::store_tuple(&dirs), ], ) } @@ -558,28 +555,6 @@ impl Snapshot { externs::store_utf8_osstr(item.as_os_str()) } - fn store_dir(core: &Arc, item: &Dir) -> Value { - let args = [Self::store_path(item.0.as_path())]; - externs::unsafe_call(&core.types.construct_dir, &args) - } - - fn store_file(core: &Arc, item: &File) -> Value { - let args = [Self::store_path(item.path.as_path())]; - externs::unsafe_call(&core.types.construct_file, &args) - } - - fn store_path_stat(core: &Arc, item: &PathStat) -> Value { - let args = match item { - &PathStat::Dir { ref path, ref stat } => { - vec![Self::store_path(path), Self::store_dir(core, stat)] - } - &PathStat::File { ref path, ref stat } => { - vec![Self::store_path(path), Self::store_file(core, stat)] - } - }; - externs::unsafe_call(&core.types.construct_path_stat, &args) - } - fn store_file_content(context: &Context, item: &FileContent) -> Value { externs::unsafe_call( &context.core.types.construct_file_content, diff --git a/src/rust/engine/src/types.rs b/src/rust/engine/src/types.rs index c4333deb737..3b517e7919c 100644 --- a/src/rust/engine/src/types.rs +++ b/src/rust/engine/src/types.rs @@ -5,10 +5,6 @@ pub struct Types { pub construct_snapshot: Function, pub construct_file_content: Function, pub construct_files_content: Function, - pub construct_path_stat: Function, - pub construct_dir: Function, - pub construct_file: Function, - pub construct_link: Function, pub construct_process_result: Function, pub address: TypeConstraint, pub path_globs: TypeConstraint, diff --git a/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py b/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py index c775a60e2a7..d7406a6d71a 100644 --- a/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py +++ b/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py @@ -17,9 +17,16 @@ from pants.base.exceptions import TaskError from pants.build_graph.build_file_aliases import BuildFileAliases from pants.util.dirutil import safe_mkdtemp +from pants.util.objects import datatype from pants_test.jvm.nailgun_task_test_base import NailgunTaskTestBase +class DummyVersionedTarget(datatype(['target', 'results_dir'])): + @property + def current_results_dir(self): + return self.results_dir + + class AntlrJavaGenTest(NailgunTaskTestBase): @classmethod def task_type(cls): @@ -74,11 +81,12 @@ def execute_antlr_test(self, expected_package, target_workdir_fun=None): # Do not use task.workdir here, because when we calculating hash for synthetic target # we need persistent source paths in terms of relative position to build root. target_workdir = target_workdir_fun(self.build_root) + vt = DummyVersionedTarget(target, target_workdir) # Generate code, then create a synthetic target. task.execute_codegen(target, target_workdir) - sources = task._capture_sources(((target, target_workdir),))[0] - syn_target = task._inject_synthetic_target(target, target_workdir, sources) + sources = task._capture_sources((vt,))[0] + syn_target = task._inject_synthetic_target(vt, sources) actual_sources = [s for s in Fileset.rglobs('*.java', root=target_workdir)] expected_sources = syn_target.sources_relative_to_source_root() diff --git a/tests/python/pants_test/backend/codegen/protobuf/java/BUILD b/tests/python/pants_test/backend/codegen/protobuf/java/BUILD index da9c887c158..90451d248da 100644 --- a/tests/python/pants_test/backend/codegen/protobuf/java/BUILD +++ b/tests/python/pants_test/backend/codegen/protobuf/java/BUILD @@ -26,4 +26,5 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, + timeout = 240, ) diff --git a/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py b/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py index 4c1c944cfa7..4075f0ea228 100644 --- a/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py +++ b/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py @@ -112,9 +112,7 @@ def find_protoc_blocks(lines): out=pants_run.stderr_data, blocks=block_text)) - biggest_proto = -1 for block in all_blocks: - last_proto = -1 seen_extracted = False for line in block: # Make sure import bases appear after the bases for actual sources. @@ -124,14 +122,3 @@ def find_protoc_blocks(lines): else: self.assertFalse(seen_extracted, 'Local protoc bases must be ordered before imported bases!') - continue - # Check to make sure, eg, testproto4.proto never precedes testproto2.proto. - match = re.search(r'(?P\d+)\.proto[\\.]?$', line) - if match: - number = int(match.group('sequence')) - self.assertTrue(number > last_proto, '{proto} succeeded proto #{number}!\n{blocks}' - .format(proto=line, number=last_proto, blocks=block_text)) - last_proto = number - if last_proto > biggest_proto: - biggest_proto = last_proto - self.assertEqual(biggest_proto, 6, 'Not all protos were seen!\n{}'.format(block_text)) diff --git a/tests/python/pants_test/backend/codegen/wire/java/BUILD b/tests/python/pants_test/backend/codegen/wire/java/BUILD index 99e8abec771..03c82fe8d84 100644 --- a/tests/python/pants_test/backend/codegen/wire/java/BUILD +++ b/tests/python/pants_test/backend/codegen/wire/java/BUILD @@ -6,6 +6,7 @@ python_tests( sources = globs('*.py', exclude=[globs('*_integration.py')]), dependencies = [ '3rdparty/python/twitter/commons:twitter.common.collections', + '3rdparty/python:parameterized', 'src/python/pants/backend/codegen/wire/java', 'src/python/pants/java/jar', 'src/python/pants/backend/jvm/targets:jvm', @@ -29,4 +30,5 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, + timeout = 300, ) diff --git a/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py b/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py index e05d37b0f4e..b297af56110 100644 --- a/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py +++ b/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py @@ -4,6 +4,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals +from parameterized import parameterized + from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary from pants.backend.codegen.wire.java.register import build_file_aliases as register_codegen from pants.backend.codegen.wire.java.wire_gen import WireGen @@ -59,28 +61,35 @@ def test_compiler_args_wirev1(self): 'bar.proto'], task.format_args_for_target(wire_targetv1, self.TARGET_WORKDIR)) - def test_compiler_args_all(self): + @parameterized.expand([(True,), (False,)]) + def test_compiler_args_all(self, ordered_sources): self._create_fake_wire_tool(version='1.8.0') kitchen_sink = self.make_target('src/wire:kitchen-sink', JavaWireLibrary, sources=['foo.proto', 'bar.proto', 'baz.proto'], registry_class='org.pantsbuild.Registry', service_writer='org.pantsbuild.DummyServiceWriter', no_options=True, + ordered_sources=ordered_sources, roots=['root1', 'root2', 'root3'], enum_options=['enum1', 'enum2', 'enum3'],) task = self.create_task(self.context(target_roots=[kitchen_sink])) - self.assertEqual([ - '--java_out={}'.format(self.TARGET_WORKDIR), - '--no_options', - '--service_writer=org.pantsbuild.DummyServiceWriter', - '--registry_class=org.pantsbuild.Registry', - '--roots=root1,root2,root3', - '--enum_options=enum1,enum2,enum3', - '--proto_path={}/src/wire'.format(self.build_root), - 'foo.proto', - 'bar.proto', - 'baz.proto'], - task.format_args_for_target(kitchen_sink, self.TARGET_WORKDIR)) + expected = [ + '--java_out={}'.format(self.TARGET_WORKDIR), + '--no_options', + '--service_writer=org.pantsbuild.DummyServiceWriter', + '--registry_class=org.pantsbuild.Registry', + '--roots=root1,root2,root3', + '--enum_options=enum1,enum2,enum3', + '--proto_path={}/src/wire'.format(self.build_root), + 'foo.proto', + 'bar.proto', + 'baz.proto', + ] + actual = task.format_args_for_target(kitchen_sink, self.TARGET_WORKDIR) + if not ordered_sources: + expected = set(expected) + actual = set(actual) + self.assertEqual(expected, actual) def test_compiler_args_proto_paths(self): self._create_fake_wire_tool() diff --git a/tests/python/pants_test/engine/legacy/test_graph.py b/tests/python/pants_test/engine/legacy/test_graph.py index 959612a9d63..0ca73b013bc 100644 --- a/tests/python/pants_test/engine/legacy/test_graph.py +++ b/tests/python/pants_test/engine/legacy/test_graph.py @@ -61,8 +61,9 @@ def test_invalidate_fsnode(self): self.assertGreater(invalidated_count, 0) def test_sources_ordering(self): - expected_sources = ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] - self.create_library('src/example', 'files', 'things', sources=expected_sources) + input_sources = ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] + expected_sources = sorted(input_sources) + self.create_library('src/example', 'files', 'things', sources=input_sources) target = self.target('src/example:things') sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] diff --git a/tests/python/pants_test/engine/test_build_files.py b/tests/python/pants_test/engine/test_build_files.py index 1c5c94850f8..625d3c7ab57 100644 --- a/tests/python/pants_test/engine/test_build_files.py +++ b/tests/python/pants_test/engine/test_build_files.py @@ -8,14 +8,13 @@ import re import unittest -from pants.base.project_tree import Dir, File +from pants.base.project_tree import Dir from pants.base.specs import SiblingAddresses, SingleAddress, Specs from pants.build_graph.address import Address from pants.engine.addressable import addressable, addressable_dict from pants.engine.build_files import (ResolvedTypeMismatchError, addresses_from_address_families, create_graph_rules, parse_address_family) -from pants.engine.fs import (Digest, FileContent, FilesContent, Path, PathGlobs, Snapshot, - create_fs_rules) +from pants.engine.fs import Digest, FileContent, FilesContent, PathGlobs, Snapshot, create_fs_rules from pants.engine.legacy.structs import TargetAdaptor from pants.engine.mapper import AddressFamily, AddressMapper, ResolveError from pants.engine.nodes import Return, Throw @@ -34,7 +33,7 @@ def test_empty(self): """Test that parsing an empty BUILD file results in an empty AddressFamily.""" address_mapper = AddressMapper(JsonParser(TestTable())) af = run_rule(parse_address_family, address_mapper, Dir('/dev/null'), { - (Snapshot, PathGlobs): lambda _: Snapshot(Digest('abc', 10), (File('/dev/null/BUILD'),)), + (Snapshot, PathGlobs): lambda _: Snapshot(Digest('abc', 10), ('/dev/null/BUILD',), ()), (FilesContent, Digest): lambda _: FilesContent([FileContent('/dev/null/BUILD', b'')]), }) self.assertEqual(len(af.objects_by_name), 0) @@ -46,9 +45,7 @@ def _address_mapper(self): return AddressMapper(JsonParser(TestTable())) def _snapshot(self): - return Snapshot( - Digest('xx', 2), - (Path('root/BUILD', File('root/BUILD')),)) + return Snapshot(Digest('xx', 2), ('root/BUILD',), ()) def _resolve_build_file_addresses(self, specs, address_family, snapshot, address_mapper): return run_rule(addresses_from_address_families, address_mapper, specs, { @@ -59,8 +56,7 @@ def _resolve_build_file_addresses(self, specs, address_family, snapshot, address def test_duplicated(self): """Test that matching the same Spec twice succeeds.""" address = SingleAddress('a', 'a') - snapshot = Snapshot(Digest('xx', 2), - (Path('a/BUILD', File('a/BUILD')),)) + snapshot = Snapshot(Digest('xx', 2), ('a/BUILD',), ()) address_family = AddressFamily('a', {'a': ('a/BUILD', 'this is an object!')}) specs = Specs([address, address]) diff --git a/tests/python/pants_test/engine/test_fs.py b/tests/python/pants_test/engine/test_fs.py index 4f6c8cfaa8d..ecfdf29afc6 100644 --- a/tests/python/pants_test/engine/test_fs.py +++ b/tests/python/pants_test/engine/test_fs.py @@ -61,7 +61,7 @@ def assert_walk_snapshot(self, field, filespecs_or_globs, paths, ignore_patterns if prepare: prepare(project_tree) result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0] - self.assertEqual(sorted([p.path for p in getattr(result, field)]), sorted(paths)) + self.assertEqual(sorted(getattr(result, field)), sorted(paths)) def assert_content(self, filespecs_or_globs, expected_content): with self.mk_project_tree() as project_tree: @@ -76,7 +76,7 @@ def assert_digest(self, filespecs_or_globs, expected_files): scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0] # Confirm all expected files were digested. - self.assertEqual(set(expected_files), {f.path for f in result.files}) + self.assertEqual(set(expected_files), set(result.files)) self.assertTrue(result.directory_digest.fingerprint is not None) def test_walk_literal(self): @@ -270,7 +270,7 @@ def test_snapshot_from_outside_buildroot_failure(self): self.assertIn("doesnotexist", str(cm.exception)) def assert_snapshot_equals(self, snapshot, files, directory_digest): - self.assertEqual([file.path for file in snapshot.files], files) + self.assertEqual(list(snapshot.files), files) self.assertEqual(snapshot.directory_digest, directory_digest) def test_merge_zero_directories(self): diff --git a/tests/python/pants_test/engine/test_isolated_process.py b/tests/python/pants_test/engine/test_isolated_process.py index 5a47b1412d0..3e62fda72f7 100644 --- a/tests/python/pants_test/engine/test_isolated_process.py +++ b/tests/python/pants_test/engine/test_isolated_process.py @@ -53,7 +53,7 @@ def bin_path(self): return self.binary_location.bin_path def argv_from_snapshot(self, snapshot): - cat_file_paths = [f.path for f in snapshot.files] + cat_file_paths = snapshot.files option_like_files = [p for p in cat_file_paths if p.startswith('-')] if option_like_files: @@ -138,9 +138,7 @@ def bin_path(self): return self.binary_location.bin_path def argv_from_source_snapshot(self, snapshot): - snapshot_file_paths = [f.path for f in snapshot.files] - - return (self.bin_path,) + tuple(snapshot_file_paths) + return (self.bin_path,) + snapshot.files class JavacCompileResult(datatype([ diff --git a/tests/python/pants_test/reporting/BUILD b/tests/python/pants_test/reporting/BUILD index e5614eb33a5..eaaf806cc1b 100644 --- a/tests/python/pants_test/reporting/BUILD +++ b/tests/python/pants_test/reporting/BUILD @@ -22,7 +22,7 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, - timeout = 240, + timeout = 600, ) python_tests( diff --git a/tests/python/pants_test/source/test_payload_fields.py b/tests/python/pants_test/source/test_payload_fields.py index 7bb5ff42694..189bf1f9ff2 100644 --- a/tests/python/pants_test/source/test_payload_fields.py +++ b/tests/python/pants_test/source/test_payload_fields.py @@ -6,8 +6,7 @@ from future.utils import text_type -from pants.base.project_tree import File -from pants.engine.fs import Digest, Path, Snapshot +from pants.engine.fs import Digest, Snapshot from pants.source.payload_fields import SourcesField from pants.source.wrapped_globs import Globs, LazyFilesetWithSpec from pants_test.test_base import TestBase @@ -83,10 +82,7 @@ def test_passes_eager_fileset_with_spec_through(self): self.assertEqual(['foo/foo/a.txt'], list(sf.relative_to_buildroot())) digest = '56001a7e48555f156420099a99da60a7a83acc90853046709341bf9f00a6f944' - want_snapshot = Snapshot( - Digest(text_type(digest), 77), - (Path('foo/foo/a.txt', stat=File('foo/foo/a.txt')),) - ) + want_snapshot = Snapshot(Digest(text_type(digest), 77), ('foo/foo/a.txt',), ()) # We explicitly pass a None scheduler because we expect no scheduler lookups to be required # in order to get a Snapshot. diff --git a/tests/python/pants_test/source/test_wrapped_globs.py b/tests/python/pants_test/source/test_wrapped_globs.py index c4a0a01500e..331f72af843 100644 --- a/tests/python/pants_test/source/test_wrapped_globs.py +++ b/tests/python/pants_test/source/test_wrapped_globs.py @@ -266,10 +266,6 @@ def test_source_snapshot(self): self.add_to_build_file('package/dir', 'files(name = "target", sources = ["foo"])') target = self.target('package/dir:target') snapshot = target.sources_snapshot(scheduler=self.scheduler) - snapshot_paths = tuple(file.path for file in snapshot.path_stats) - self.assertEqual( - ('package/dir/foo',), - snapshot_paths - ) + self.assertEqual(('package/dir/foo',), snapshot.files) self.assertEqual(target.sources_relative_to_target_base().files, ('foo',)) self.assertEqual(target.sources_relative_to_buildroot(), ['package/dir/foo']) diff --git a/tests/python/pants_test/task/test_simple_codegen_task.py b/tests/python/pants_test/task/test_simple_codegen_task.py index 01d8143bd5a..3aecf4fd82f 100644 --- a/tests/python/pants_test/task/test_simple_codegen_task.py +++ b/tests/python/pants_test/task/test_simple_codegen_task.py @@ -14,6 +14,7 @@ from pants.build_graph.target import Target from pants.task.simple_codegen_task import SimpleCodegenTask from pants.util.dirutil import safe_mkdtemp +from pants.util.objects import datatype from pants_test.task_test_base import TaskTestBase, ensure_cached @@ -113,6 +114,12 @@ def _copy_target_attributes(self): return ['copied'] +class DummyVersionedTarget(datatype(['target', 'results_dir'])): + @property + def current_results_dir(self): + return self.results_dir + + class SimpleCodegenTaskTest(TaskTestBase): @classmethod @@ -212,12 +219,13 @@ def _do_test_duplication(self, targets, allow_dups, should_fail): def execute(): for target in targets: target_workdir = target_workdirs[target] + vt = DummyVersionedTarget(target, target_workdir) task.execute_codegen(target, target_workdir) - sources = task._capture_sources(((target, target_workdir),))[0] - task._handle_duplicate_sources(target, target_workdir, sources) + sources = task._capture_sources((vt,))[0] + task._handle_duplicate_sources(vt, sources) # _handle_duplicate_sources may delete files from the filesystem, so we need to re-capture. - sources = task._capture_sources(((target, target_workdir),))[0] - syn_targets.append(task._inject_synthetic_target(target, target_workdir, sources)) + sources = task._capture_sources((vt,))[0] + syn_targets.append(task._inject_synthetic_target(vt, sources)) if should_fail: # If we're expected to fail, validate the resulting message. From 1ece4616e78a03318334284b11de26480941ac2a Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Fri, 15 Feb 2019 10:07:23 -0800 Subject: [PATCH 8/8] Prepare 1.14.0 (#7246) --- src/python/pants/notes/1.14.x.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/python/pants/notes/1.14.x.rst b/src/python/pants/notes/1.14.x.rst index 9945bef401f..9914a2bb1f0 100644 --- a/src/python/pants/notes/1.14.x.rst +++ b/src/python/pants/notes/1.14.x.rst @@ -3,6 +3,11 @@ This document describes releases leading up to the ``1.14.x`` ``stable`` series. +1.14.0 (2/15/2019) +------------------ + +The first stable release of the ``1.14.x`` series, with no changes since ``rc1``. + 1.14.0rc1 (2/06/2019) ---------------------