From b059d50fe9f938b5cc4be607da43c98b1e871421 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 22 Feb 2019 10:53:38 -0800 Subject: [PATCH 1/5] move the declarative task stuff out of the python backend testing --- .../python/tasks/native/test_ctypes.py | 6 +- .../test_build_local_python_distributions.py | 2 +- .../tasks/util/build_local_dists_test_base.py | 117 ++--------------- .../pants_test/engine/scheduler_test_base.py | 120 ++++++++++++++++++ 4 files changed, 134 insertions(+), 111 deletions(-) diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py index d1b014a45ea..802e3cbd67b 100644 --- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py +++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py @@ -23,14 +23,14 @@ class TestBuildLocalDistsWithCtypesNativeSources(BuildLocalPythonDistributionsTestBase): @classproperty - def _run_before_task_types(cls): + def run_before_task_types(cls): return [ CCompile, CppCompile, LinkSharedLibraries, - ] + super(TestBuildLocalDistsWithCtypesNativeSources, cls)._run_before_task_types + ] + super(TestBuildLocalDistsWithCtypesNativeSources, cls).run_before_task_types - _dist_specs = OrderedDict([ + dist_specs = OrderedDict([ ('src/python/plat_specific_c_dist:ctypes_c_library', { 'key': 'ctypes_c_library', diff --git a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py index 1e92cdf6aaa..510193ed13f 100644 --- a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py +++ b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py @@ -19,7 +19,7 @@ class TestBuildLocalDistsNativeSources(BuildLocalPythonDistributionsTestBase): - _dist_specs = OrderedDict([ + dist_specs = OrderedDict([ ('src/python/dist:universal_dist', { 'key': 'universal', diff --git a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py index 0251efd28c7..6a397e024c2 100644 --- a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py +++ b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py @@ -4,7 +4,6 @@ from __future__ import absolute_import, division, print_function, unicode_literals -import os import re from builtins import next, str @@ -15,101 +14,34 @@ BuildLocalPythonDistributions from pants.backend.python.tasks.resolve_requirements import ResolveRequirements from pants.backend.python.tasks.select_interpreter import SelectInterpreter -from pants.build_graph.address import Address from pants.util.collections import assert_single_element -from pants.util.memo import memoized_method from pants.util.meta import classproperty from pants_test.backend.python.tasks.python_task_test_base import (PythonTaskTestBase, name_and_platform) -from pants_test.engine.scheduler_test_base import SchedulerTestBase +from pants_test.engine.scheduler_test_base import DeclarativeTaskTestBase -class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, SchedulerTestBase): +class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestBase): @classmethod def task_type(cls): return BuildLocalPythonDistributions @classproperty - def _dist_specs(cls): - """ - This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special - keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the - specified relative paths). The rest of the keys are fed into `self.make_target()`. An - `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if - they have dependencies on each other). - """ - raise NotImplementedError('_dist_specs must be implemented!') - - @classproperty - def _run_before_task_types(cls): - """ - By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets, - we want to compile and link them as well to get the resulting dist to build, so we add those - task types here and execute them beforehand. - """ + def run_before_task_types(cls): return [SelectInterpreter] @classproperty - def _run_after_task_types(cls): - """Tasks to run after local dists are built, similar to `_run_before_task_types`.""" + def run_after_task_types(cls): return [ResolveRequirements] - @memoized_method - def _synthesize_task_types(self, task_types=()): - return [ - self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__)) - # TODO: make @memoized_method convert lists to tuples for hashing! - for tsk in task_types - ] + @classmethod + def rules(cls): + return super(BuildLocalPythonDistributionsTestBase, cls).rules() + native_backend_rules() def setUp(self): super(BuildLocalPythonDistributionsTestBase, self).setUp() - - self.target_dict = {} - - # Create a target from each specification and insert it into `self.target_dict`. - for target_spec, target_kwargs in self._dist_specs.items(): - unprocessed_kwargs = target_kwargs.copy() - - target_base = Address.parse(target_spec).spec_path - - # Populate the target's owned files from the specification. - filemap = unprocessed_kwargs.pop('filemap', {}) - for rel_path, content in filemap.items(): - buildroot_path = os.path.join(target_base, rel_path) - self.create_file(buildroot_path, content) - - # Ensure any dependencies exist in the target dict (`_dist_specs` must then be an - # OrderedDict). - # The 'key' is used to access the target in `self.target_dict`. - key = unprocessed_kwargs.pop('key') - dep_targets = [] - for dep_spec in unprocessed_kwargs.pop('dependencies', []): - existing_tgt_key = self._dist_specs[dep_spec]['key'] - dep_targets.append(self.target_dict[existing_tgt_key]) - - # Register the generated target. - generated_target = self.make_target( - spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) - self.target_dict[key] = generated_target - - def _all_specified_targets(self): - return list(self.target_dict.values()) - - def _scheduling_context(self, **kwargs): - scheduler = self.mk_scheduler(rules=native_backend_rules()) - return self.context(scheduler=scheduler, **kwargs) - - def _retrieve_single_product_at_target_base(self, product_mapping, target): - product = product_mapping.get(target) - base_dirs = list(product.keys()) - self.assertEqual(1, len(base_dirs)) - single_base_dir = base_dirs[0] - all_products = product[single_base_dir] - self.assertEqual(1, len(all_products)) - single_product = all_products[0] - return single_product + self.populate_target_dict() def _get_dist_snapshot_version(self, task, python_dist_target): """Get the target's fingerprint, and guess the resulting version string of the built dist. @@ -132,20 +64,9 @@ def _get_dist_snapshot_version(self, task, python_dist_target): # --tag-build option. return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower()) - def _create_task(self, task_type, context): - return task_type(context, self.test_workdir) - def _create_distribution_synthetic_target(self, python_dist_target, extra_targets=[]): - run_before_synthesized_task_types = self._synthesize_task_types(tuple(self._run_before_task_types)) - python_create_distributions_task_type = self._testing_task_type - run_after_synthesized_task_types = self._synthesize_task_types(tuple(self._run_after_task_types)) - all_synthesized_task_types = run_before_synthesized_task_types + [ - python_create_distributions_task_type, - ] + run_after_synthesized_task_types - - context = self._scheduling_context( - target_roots=([python_dist_target] + extra_targets), - for_task_types=all_synthesized_task_types, + context, _, python_create_distributions_task_instance, _ = self.invoke_tasks( + target_roots=[python_dist_target] + extra_targets, for_subsystems=[PythonRepos, LibcDev], # TODO(#6848): we should be testing all of these with both of our toolchains. options={ @@ -153,24 +74,6 @@ def _create_distribution_synthetic_target(self, python_dist_target, extra_target 'toolchain_variant': 'llvm', }, }) - self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets())) - - run_before_task_instances = [ - self._create_task(task_type, context) - for task_type in run_before_synthesized_task_types - ] - python_create_distributions_task_instance = self._create_task( - python_create_distributions_task_type, context) - run_after_task_instances = [ - self._create_task(task_type, context) - for task_type in run_after_synthesized_task_types - ] - all_task_instances = run_before_task_instances + [ - python_create_distributions_task_instance - ] + run_after_task_instances - - for tsk in all_task_instances: - tsk.execute() synthetic_tgts = set(context.build_graph.targets()) - set(self._all_specified_targets()) self.assertEqual(1, len(synthetic_tgts)) diff --git a/tests/python/pants_test/engine/scheduler_test_base.py b/tests/python/pants_test/engine/scheduler_test_base.py index 200bc6c778b..5497565bd02 100644 --- a/tests/python/pants_test/engine/scheduler_test_base.py +++ b/tests/python/pants_test/engine/scheduler_test_base.py @@ -9,11 +9,14 @@ from builtins import object from pants.base.file_system_project_tree import FileSystemProjectTree +from pants.build_graph.address import Address from pants.engine.nodes import Throw from pants.engine.scheduler import Scheduler from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS from pants.util.contextutil import temporary_file_path from pants.util.dirutil import safe_mkdtemp, safe_rmtree +from pants.util.memo import memoized_method +from pants.util.meta import classproperty from pants_test.engine.util import init_native @@ -97,3 +100,120 @@ def execute_raising_throw(self, scheduler, product, subject): self.assertTrue(type(resulting_value) is Throw) raise resulting_value.exc + + +class DeclarativeTaskTestBase(SchedulerTestBase): + """???/experimental blah, makes things declarative, whatever""" + + @classproperty + def dist_specs(cls): + """ + This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special + keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the + specified relative paths). The rest of the keys are fed into `self.make_target()`. An + `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if + they have dependencies on each other). + """ + raise NotImplementedError('dist_specs must be implemented!') + + @classproperty + def run_before_task_types(cls): + """ + By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets, + we want to compile and link them as well to get the resulting dist to build, so we add those + task types here and execute them beforehand. + """ + return [] + + @classproperty + def run_after_task_types(cls): + """Tasks to run after local dists are built, similar to `run_before_task_types`.""" + return [] + + def populate_target_dict(self): + self.target_dict = {} + + # Create a target from each specification and insert it into `self.target_dict`. + for target_spec, target_kwargs in self.dist_specs.items(): + unprocessed_kwargs = target_kwargs.copy() + + target_base = Address.parse(target_spec).spec_path + + # Populate the target's owned files from the specification. + filemap = unprocessed_kwargs.pop('filemap', {}) + for rel_path, content in filemap.items(): + buildroot_path = os.path.join(target_base, rel_path) + self.create_file(buildroot_path, content) + + # Ensure any dependencies exist in the target dict (`dist_specs` must then be an + # OrderedDict). + # The 'key' is used to access the target in `self.target_dict`. + key = unprocessed_kwargs.pop('key') + dep_targets = [] + for dep_spec in unprocessed_kwargs.pop('dependencies', []): + existing_tgt_key = self.dist_specs[dep_spec]['key'] + dep_targets.append(self.target_dict[existing_tgt_key]) + + # Register the generated target. + generated_target = self.make_target( + spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) + self.target_dict[key] = generated_target + + @memoized_method + def _synthesize_task_types(self, task_types=()): + return [ + self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__)) + # TODO: make @memoized_method convert lists to tuples for hashing! + for tsk in task_types + ] + + def _all_specified_targets(self): + return list(self.target_dict.values()) + + def _scheduling_context(self, **kwargs): + scheduler = self.mk_scheduler(rules=self.rules()) + return self.context(scheduler=scheduler, **kwargs) + + def _retrieve_single_product_at_target_base(self, product_mapping, target): + product = product_mapping.get(target) + base_dirs = list(product.keys()) + self.assertEqual(1, len(base_dirs)) + single_base_dir = base_dirs[0] + all_products = product[single_base_dir] + self.assertEqual(1, len(all_products)) + single_product = all_products[0] + return single_product + + def _create_task(self, task_type, context): + return task_type(context, self.test_workdir) + + def invoke_tasks(self, **context_kwargs): + run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types)) + run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types)) + all_synthesized_task_types = run_before_synthesized_task_types + [ + self._testing_task_type, + ] + run_after_synthesized_task_types + + context = self._scheduling_context( + for_task_types=all_synthesized_task_types, + **context_kwargs) + self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets())) + + run_before_task_instances = [ + self._create_task(task_type, context) + for task_type in run_before_synthesized_task_types + ] + current_task_instance = self._create_task( + self._testing_task_type, context) + run_after_task_instances = [ + self._create_task(task_type, context) + for task_type in run_after_synthesized_task_types + ] + all_task_instances = run_before_task_instances + [ + current_task_instance + ] + run_after_task_instances + + for tsk in all_task_instances: + tsk.execute() + + return (context, run_before_task_instances, current_task_instance, run_after_task_instances) From 9be7fde004f3269e5cee34991292d38f748d0544 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 1 Mar 2019 00:06:09 -0800 Subject: [PATCH 2/5] move DeclarativeTaskTestMixin to TaskTestBase --- .../python/tasks/native/test_ctypes.py | 4 +- .../tasks/util/build_local_dists_test_base.py | 6 +- .../pants_test/engine/scheduler_test_base.py | 120 ------------------ tests/python/pants_test/task_test_base.py | 108 ++++++++++++++++ tests/python/pants_test/test_base.py | 7 + 5 files changed, 120 insertions(+), 125 deletions(-) diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py index 802e3cbd67b..4b0548e033f 100644 --- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py +++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes.py @@ -111,7 +111,7 @@ def test_ctypes_c_dist(self): self.assertEqual(['platform_specific_ctypes_c_dist==0.0.0+{}'.format(snapshot_version)], [str(x.requirement) for x in synthetic_target.requirements.value]) local_wheel_products = context.products.get('local_wheels') - local_wheel = self._retrieve_single_product_at_target_base( + local_wheel = self.retrieve_single_product_at_target_base( local_wheel_products, platform_specific_dist) self.assertTrue(check_wheel_platform_matches_host(local_wheel)) @@ -123,6 +123,6 @@ def test_ctypes_cpp_dist(self): [str(x.requirement) for x in synthetic_target.requirements.value]) local_wheel_products = context.products.get('local_wheels') - local_wheel = self._retrieve_single_product_at_target_base( + local_wheel = self.retrieve_single_product_at_target_base( local_wheel_products, platform_specific_dist) self.assertTrue(check_wheel_platform_matches_host(local_wheel)) diff --git a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py index 6a397e024c2..21b8a384878 100644 --- a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py +++ b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py @@ -18,10 +18,10 @@ from pants.util.meta import classproperty from pants_test.backend.python.tasks.python_task_test_base import (PythonTaskTestBase, name_and_platform) -from pants_test.engine.scheduler_test_base import DeclarativeTaskTestBase +from pants_test.task_test_base import DeclarativeTaskTestMixin -class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestBase): +class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestMixin): @classmethod def task_type(cls): @@ -95,7 +95,7 @@ def _assert_dist_and_wheel_identity(self, expected_name, expected_version, expec str(resulting_dist_req.requirement)) local_wheel_products = context.products.get('local_wheels') - local_wheel = self._retrieve_single_product_at_target_base(local_wheel_products, dist_target) + local_wheel = self.retrieve_single_product_at_target_base(local_wheel_products, dist_target) dist, version, platform = name_and_platform(local_wheel) self.assertEquals(dist, expected_name) self.assertEquals(version, expected_snapshot_version) diff --git a/tests/python/pants_test/engine/scheduler_test_base.py b/tests/python/pants_test/engine/scheduler_test_base.py index 5497565bd02..200bc6c778b 100644 --- a/tests/python/pants_test/engine/scheduler_test_base.py +++ b/tests/python/pants_test/engine/scheduler_test_base.py @@ -9,14 +9,11 @@ from builtins import object from pants.base.file_system_project_tree import FileSystemProjectTree -from pants.build_graph.address import Address from pants.engine.nodes import Throw from pants.engine.scheduler import Scheduler from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS from pants.util.contextutil import temporary_file_path from pants.util.dirutil import safe_mkdtemp, safe_rmtree -from pants.util.memo import memoized_method -from pants.util.meta import classproperty from pants_test.engine.util import init_native @@ -100,120 +97,3 @@ def execute_raising_throw(self, scheduler, product, subject): self.assertTrue(type(resulting_value) is Throw) raise resulting_value.exc - - -class DeclarativeTaskTestBase(SchedulerTestBase): - """???/experimental blah, makes things declarative, whatever""" - - @classproperty - def dist_specs(cls): - """ - This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special - keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the - specified relative paths). The rest of the keys are fed into `self.make_target()`. An - `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if - they have dependencies on each other). - """ - raise NotImplementedError('dist_specs must be implemented!') - - @classproperty - def run_before_task_types(cls): - """ - By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets, - we want to compile and link them as well to get the resulting dist to build, so we add those - task types here and execute them beforehand. - """ - return [] - - @classproperty - def run_after_task_types(cls): - """Tasks to run after local dists are built, similar to `run_before_task_types`.""" - return [] - - def populate_target_dict(self): - self.target_dict = {} - - # Create a target from each specification and insert it into `self.target_dict`. - for target_spec, target_kwargs in self.dist_specs.items(): - unprocessed_kwargs = target_kwargs.copy() - - target_base = Address.parse(target_spec).spec_path - - # Populate the target's owned files from the specification. - filemap = unprocessed_kwargs.pop('filemap', {}) - for rel_path, content in filemap.items(): - buildroot_path = os.path.join(target_base, rel_path) - self.create_file(buildroot_path, content) - - # Ensure any dependencies exist in the target dict (`dist_specs` must then be an - # OrderedDict). - # The 'key' is used to access the target in `self.target_dict`. - key = unprocessed_kwargs.pop('key') - dep_targets = [] - for dep_spec in unprocessed_kwargs.pop('dependencies', []): - existing_tgt_key = self.dist_specs[dep_spec]['key'] - dep_targets.append(self.target_dict[existing_tgt_key]) - - # Register the generated target. - generated_target = self.make_target( - spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) - self.target_dict[key] = generated_target - - @memoized_method - def _synthesize_task_types(self, task_types=()): - return [ - self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__)) - # TODO: make @memoized_method convert lists to tuples for hashing! - for tsk in task_types - ] - - def _all_specified_targets(self): - return list(self.target_dict.values()) - - def _scheduling_context(self, **kwargs): - scheduler = self.mk_scheduler(rules=self.rules()) - return self.context(scheduler=scheduler, **kwargs) - - def _retrieve_single_product_at_target_base(self, product_mapping, target): - product = product_mapping.get(target) - base_dirs = list(product.keys()) - self.assertEqual(1, len(base_dirs)) - single_base_dir = base_dirs[0] - all_products = product[single_base_dir] - self.assertEqual(1, len(all_products)) - single_product = all_products[0] - return single_product - - def _create_task(self, task_type, context): - return task_type(context, self.test_workdir) - - def invoke_tasks(self, **context_kwargs): - run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types)) - run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types)) - all_synthesized_task_types = run_before_synthesized_task_types + [ - self._testing_task_type, - ] + run_after_synthesized_task_types - - context = self._scheduling_context( - for_task_types=all_synthesized_task_types, - **context_kwargs) - self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets())) - - run_before_task_instances = [ - self._create_task(task_type, context) - for task_type in run_before_synthesized_task_types - ] - current_task_instance = self._create_task( - self._testing_task_type, context) - run_after_task_instances = [ - self._create_task(task_type, context) - for task_type in run_after_synthesized_task_types - ] - all_task_instances = run_before_task_instances + [ - current_task_instance - ] + run_after_task_instances - - for tsk in all_task_instances: - tsk.execute() - - return (context, run_before_task_instances, current_task_instance, run_after_task_instances) diff --git a/tests/python/pants_test/task_test_base.py b/tests/python/pants_test/task_test_base.py index 633faee3afe..501c36b0241 100644 --- a/tests/python/pants_test/task_test_base.py +++ b/tests/python/pants_test/task_test_base.py @@ -6,15 +6,19 @@ import glob import os +from builtins import object from contextlib import closing, contextmanager from io import BytesIO from future.utils import PY2 +from pants.build_graph.address import Address from pants.goal.goal import Goal from pants.ivy.bootstrapper import Bootstrapper from pants.task.console_task import ConsoleTask from pants.util.contextutil import temporary_dir +from pants.util.memo import memoized_method +from pants.util.meta import classproperty from pants.util.process_handler import subprocess from pants_test.test_base import TestBase @@ -299,3 +303,107 @@ def assert_console_raises(self, exception, **kwargs): """ with self.assertRaises(exception): self.execute_console_task(**kwargs) + + +class DeclarativeTaskTestMixin(object): + """???/experimental blah, makes things declarative, whatever""" + + @classproperty + def dist_specs(cls): + """ + This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special + keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the + specified relative paths). The rest of the keys are fed into `self.make_target()`. An + `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if + they have dependencies on each other). + """ + raise NotImplementedError('dist_specs must be implemented!') + + @classproperty + def run_before_task_types(cls): + """ + By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets, + we want to compile and link them as well to get the resulting dist to build, so we add those + task types here and execute them beforehand. + """ + return [] + + @classproperty + def run_after_task_types(cls): + """Tasks to run after local dists are built, similar to `run_before_task_types`.""" + return [] + + def populate_target_dict(self): + self.target_dict = {} + + # Create a target from each specification and insert it into `self.target_dict`. + for target_spec, target_kwargs in self.dist_specs.items(): + unprocessed_kwargs = target_kwargs.copy() + + target_base = Address.parse(target_spec).spec_path + + # Populate the target's owned files from the specification. + filemap = unprocessed_kwargs.pop('filemap', {}) + for rel_path, content in filemap.items(): + buildroot_path = os.path.join(target_base, rel_path) + self.create_file(buildroot_path, content) + + # Ensure any dependencies exist in the target dict (`dist_specs` must then be an + # OrderedDict). + # The 'key' is used to access the target in `self.target_dict`. + key = unprocessed_kwargs.pop('key') + dep_targets = [] + for dep_spec in unprocessed_kwargs.pop('dependencies', []): + existing_tgt_key = self.dist_specs[dep_spec]['key'] + dep_targets.append(self.target_dict[existing_tgt_key]) + + # Register the generated target. + generated_target = self.make_target( + spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) + self.target_dict[key] = generated_target + + @memoized_method + def _synthesize_task_types(self, task_types=()): + return [ + self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__)) + # TODO: make @memoized_method convert lists to tuples for hashing! + for tsk in task_types + ] + + def _all_specified_targets(self): + return list(self.target_dict.values()) + + def _create_task(self, task_type, context): + """Helper method to instantiate tasks besides self._testing_task_type in the test workdir.""" + return task_type(context, self.test_workdir) + + def invoke_tasks(self, **context_kwargs): + run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types)) + run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types)) + all_synthesized_task_types = run_before_synthesized_task_types + [ + self._testing_task_type, + ] + run_after_synthesized_task_types + + context = self.context( + for_task_types=all_synthesized_task_types, + **context_kwargs) + self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets())) + + run_before_task_instances = [ + self._create_task(task_type, context) + for task_type in run_before_synthesized_task_types + ] + current_task_instance = self._create_task( + self._testing_task_type, context) + run_after_task_instances = [ + self._create_task(task_type, context) + for task_type in run_after_synthesized_task_types + ] + all_task_instances = run_before_task_instances + [ + current_task_instance + ] + run_after_task_instances + + for tsk in all_task_instances: + tsk.execute() + + return (context, run_before_task_instances, current_task_instance, run_after_task_instances) diff --git a/tests/python/pants_test/test_base.py b/tests/python/pants_test/test_base.py index abe240977a2..58063ea0d21 100644 --- a/tests/python/pants_test/test_base.py +++ b/tests/python/pants_test/test_base.py @@ -35,6 +35,7 @@ from pants.source.source_root import SourceRootConfig from pants.subsystem.subsystem import Subsystem from pants.task.goal_options_mixin import GoalOptionsMixin +from pants.util.collections import assert_single_element from pants.util.contextutil import temporary_dir from pants.util.dirutil import (recursive_dirname, relative_symlink, safe_file_dump, safe_mkdir, safe_mkdtemp, safe_open, safe_rmtree) @@ -674,3 +675,9 @@ def captured_logging(self, level=None): finally: root_logger.setLevel(old_level) root_logger.removeHandler(handler) + + def retrieve_single_product_at_target_base(self, product_mapping, target): + mapping_for_target = product_mapping.get(target) + single_base_dir = assert_single_element(list(mapping_for_target.keys())) + single_product = assert_single_element(mapping_for_target[single_base_dir]) + return single_product From d4321552d30f7f2d5cebec1f71da365411bfb20f Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 1 Mar 2019 00:16:25 -0800 Subject: [PATCH 3/5] flesh out docstrings! --- tests/python/pants_test/task_test_base.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/python/pants_test/task_test_base.py b/tests/python/pants_test/task_test_base.py index 501c36b0241..881ee46ffaf 100644 --- a/tests/python/pants_test/task_test_base.py +++ b/tests/python/pants_test/task_test_base.py @@ -306,14 +306,23 @@ def assert_console_raises(self, exception, **kwargs): class DeclarativeTaskTestMixin(object): - """???/experimental blah, makes things declarative, whatever""" + """Experimental mixin combining target descriptions with a file path dict. + + This class should be mixed in to subclasses of `TaskTestBase`! + + NB: `self.populate_target_dict()` should be called in the `setUp()` method to use the target specs + specified in `dist_specs`! + + This mixin also allows specifying tasks to be run before or after the task_type() is executed when + calling `self.invoke_tasks()`. + """ @classproperty def dist_specs(cls): - """ - This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special - keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the - specified relative paths). The rest of the keys are fed into `self.make_target()`. An + """This is an informally-specified nested dict. + + Special keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at + the specified relative paths). The rest of the keys are fed into `self.make_target()`. An `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if they have dependencies on each other). """ From 543e62746092e14c3d4851420e1ab0919e7a79ec Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Sat, 2 Mar 2019 17:06:04 -0800 Subject: [PATCH 4/5] move target dict to TestBase and clean up DeclarativeTaskTestMixin --- .../test_build_local_python_distributions.py | 2 +- .../tasks/util/build_local_dists_test_base.py | 18 +++- tests/python/pants_test/task_test_base.py | 95 +++++++------------ tests/python/pants_test/test_base.py | 48 ++++++++++ 4 files changed, 97 insertions(+), 66 deletions(-) diff --git a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py index 510193ed13f..a069ed5045a 100644 --- a/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py +++ b/tests/python/pants_test/backend/python/tasks/test_build_local_python_distributions.py @@ -17,7 +17,7 @@ BuildLocalPythonDistributionsTestBase -class TestBuildLocalDistsNativeSources(BuildLocalPythonDistributionsTestBase): +class TestBuildLocalPythonDistributions(BuildLocalPythonDistributionsTestBase): dist_specs = OrderedDict([ diff --git a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py index 21b8a384878..249c573115f 100644 --- a/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py +++ b/tests/python/pants_test/backend/python/tasks/util/build_local_dists_test_base.py @@ -39,9 +39,15 @@ def run_after_task_types(cls): def rules(cls): return super(BuildLocalPythonDistributionsTestBase, cls).rules() + native_backend_rules() + @classproperty + def dist_specs(cls): + """Fed into `self.populate_target_dict()`.""" + raise NotImplementedError('dist_specs must be implemented!') + def setUp(self): super(BuildLocalPythonDistributionsTestBase, self).setUp() - self.populate_target_dict() + # Share the target mapping across all test cases. + self.target_dict = self.populate_target_dict(self.dist_specs) def _get_dist_snapshot_version(self, task, python_dist_target): """Get the target's fingerprint, and guess the resulting version string of the built dist. @@ -65,7 +71,11 @@ def _get_dist_snapshot_version(self, task, python_dist_target): return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower()) def _create_distribution_synthetic_target(self, python_dist_target, extra_targets=[]): - context, _, python_create_distributions_task_instance, _ = self.invoke_tasks( + all_specified_targets = list(self.target_dict.values()) + result = self.invoke_tasks( + # We set `target_closure` to check that all the targets in the build graph are exactly the + # ones we've just created before building python_dist()s (which creates further targets). + target_closure=all_specified_targets, target_roots=[python_dist_target] + extra_targets, for_subsystems=[PythonRepos, LibcDev], # TODO(#6848): we should be testing all of these with both of our toolchains. @@ -74,8 +84,10 @@ def _create_distribution_synthetic_target(self, python_dist_target, extra_target 'toolchain_variant': 'llvm', }, }) + context = result.context + python_create_distributions_task_instance = result.this_task - synthetic_tgts = set(context.build_graph.targets()) - set(self._all_specified_targets()) + synthetic_tgts = set(context.build_graph.targets()) - set(all_specified_targets) self.assertEqual(1, len(synthetic_tgts)) synthetic_target = next(iter(synthetic_tgts)) diff --git a/tests/python/pants_test/task_test_base.py b/tests/python/pants_test/task_test_base.py index 881ee46ffaf..b0ed37e68d1 100644 --- a/tests/python/pants_test/task_test_base.py +++ b/tests/python/pants_test/task_test_base.py @@ -12,13 +12,14 @@ from future.utils import PY2 -from pants.build_graph.address import Address from pants.goal.goal import Goal from pants.ivy.bootstrapper import Bootstrapper from pants.task.console_task import ConsoleTask +from pants.task.task import Task from pants.util.contextutil import temporary_dir from pants.util.memo import memoized_method from pants.util.meta import classproperty +from pants.util.objects import SubclassesOf, TypedCollection, datatype from pants.util.process_handler import subprocess from pants_test.test_base import TestBase @@ -306,87 +307,52 @@ def assert_console_raises(self, exception, **kwargs): class DeclarativeTaskTestMixin(object): - """Experimental mixin combining target descriptions with a file path dict. + """Experimental mixin for task tests allows specifying tasks to be run before or after the task. - This class should be mixed in to subclasses of `TaskTestBase`! - - NB: `self.populate_target_dict()` should be called in the `setUp()` method to use the target specs - specified in `dist_specs`! - - This mixin also allows specifying tasks to be run before or after the task_type() is executed when - calling `self.invoke_tasks()`. + Calling `self.invoke_tasks()` will create instances of and execute task types in + `self.run_before_task_types()`, then `task_type()`, then `self.run_after_task_types()`. """ - @classproperty - def dist_specs(cls): - """This is an informally-specified nested dict. - - Special keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at - the specified relative paths). The rest of the keys are fed into `self.make_target()`. An - `OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if - they have dependencies on each other). - """ - raise NotImplementedError('dist_specs must be implemented!') - @classproperty def run_before_task_types(cls): - """ - By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets, - we want to compile and link them as well to get the resulting dist to build, so we add those - task types here and execute them beforehand. - """ return [] @classproperty def run_after_task_types(cls): - """Tasks to run after local dists are built, similar to `run_before_task_types`.""" return [] - def populate_target_dict(self): - self.target_dict = {} - - # Create a target from each specification and insert it into `self.target_dict`. - for target_spec, target_kwargs in self.dist_specs.items(): - unprocessed_kwargs = target_kwargs.copy() - - target_base = Address.parse(target_spec).spec_path - - # Populate the target's owned files from the specification. - filemap = unprocessed_kwargs.pop('filemap', {}) - for rel_path, content in filemap.items(): - buildroot_path = os.path.join(target_base, rel_path) - self.create_file(buildroot_path, content) - - # Ensure any dependencies exist in the target dict (`dist_specs` must then be an - # OrderedDict). - # The 'key' is used to access the target in `self.target_dict`. - key = unprocessed_kwargs.pop('key') - dep_targets = [] - for dep_spec in unprocessed_kwargs.pop('dependencies', []): - existing_tgt_key = self.dist_specs[dep_spec]['key'] - dep_targets.append(self.target_dict[existing_tgt_key]) - - # Register the generated target. - generated_target = self.make_target( - spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) - self.target_dict[key] = generated_target - @memoized_method def _synthesize_task_types(self, task_types=()): return [ self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__)) - # TODO: make @memoized_method convert lists to tuples for hashing! + # TODO(#7127): make @memoized_method convert lists to tuples for hashing! for tsk in task_types ] - def _all_specified_targets(self): - return list(self.target_dict.values()) - def _create_task(self, task_type, context): """Helper method to instantiate tasks besides self._testing_task_type in the test workdir.""" return task_type(context, self.test_workdir) - def invoke_tasks(self, **context_kwargs): + class TaskInvocationResult(datatype([ + 'context', + ('before_tasks', TypedCollection(SubclassesOf(Task))), + ('this_task', SubclassesOf(Task)), + ('after_tasks', TypedCollection(SubclassesOf(Task))), + ])): pass + + def invoke_tasks(self, target_closure=None, **context_kwargs): + """Create and execute the declaratively specified tasks in order. + + Create instances of and execute task types in `self.run_before_task_types()`, then + `task_type()`, then `self.run_after_task_types()`. + + :param Iterable target_closure: If not None, check that the build graph contains exactly these + targets before executing the tasks. + :param **context_kwargs: kwargs passed to `self.context()`. Note that this method already sets + `for_task_types`. + :return: A datatype containing the created context and the task instances which were executed. + :rtype: :class:`DeclarativeTaskTestMixin.TaskInvocationResult` + """ run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types)) run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types)) all_synthesized_task_types = run_before_synthesized_task_types + [ @@ -396,7 +362,8 @@ def invoke_tasks(self, **context_kwargs): context = self.context( for_task_types=all_synthesized_task_types, **context_kwargs) - self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets())) + if target_closure is not None: + self.assertEqual(set(target_closure), set(context.build_graph.targets())) run_before_task_instances = [ self._create_task(task_type, context) @@ -415,4 +382,8 @@ def invoke_tasks(self, **context_kwargs): for tsk in all_task_instances: tsk.execute() - return (context, run_before_task_instances, current_task_instance, run_after_task_instances) + return self.TaskInvocationResult( + context=context, + before_tasks=run_before_task_instances, + this_task=current_task_instance, + after_tasks=run_after_task_instances) diff --git a/tests/python/pants_test/test_base.py b/tests/python/pants_test/test_base.py index 58063ea0d21..66fc2e1d2f4 100644 --- a/tests/python/pants_test/test_base.py +++ b/tests/python/pants_test/test_base.py @@ -681,3 +681,51 @@ def retrieve_single_product_at_target_base(self, product_mapping, target): single_base_dir = assert_single_element(list(mapping_for_target.keys())) single_product = assert_single_element(mapping_for_target[single_base_dir]) return single_product + + def populate_target_dict(self, target_map): + """Return a dict containing targets with files generated according to `target_map`. + + The values of `target_map` should themselves be a flat dict, which contains keyword arguments + fed into `self.make_target()`, along with a few special keys. Special keys are: + - 'key' (required -- used to index into the returned dict). + - 'filemap' (creates files at the specified relative paths to the target). + + An `OrderedDict` of 2-tuples must be used with the targets topologically ordered, if + they have dependencies on each other. Note that dependency cycles are not currently supported + with this method. + + :param target_map: Dict mapping each target address to generate -> kwargs for + `self.make_target()`, along with a 'key' and optionally a 'filemap' argument. + :return: Dict mapping the required 'key' argument -> target instance for each element of + `target_map`. + :rtype: dict + """ + target_dict = {} + + # Create a target from each specification and insert it into `target_dict`. + for target_spec, target_kwargs in target_map.items(): + unprocessed_kwargs = target_kwargs.copy() + + target_base = Address.parse(target_spec).spec_path + + # Populate the target's owned files from the specification. + filemap = unprocessed_kwargs.pop('filemap', {}) + for rel_path, content in filemap.items(): + buildroot_path = os.path.join(target_base, rel_path) + self.create_file(buildroot_path, content) + + # Ensure any dependencies exist in the target dict (`target_map` must then be an + # OrderedDict). + # The 'key' is used to access the target in `target_dict`. + key = unprocessed_kwargs.pop('key') + dep_targets = [] + for dep_spec in unprocessed_kwargs.pop('dependencies', []): + existing_tgt_key = target_map[dep_spec]['key'] + dep_targets.append(target_dict[existing_tgt_key]) + + # Register the generated target. + generated_target = self.make_target( + spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) + target_dict[key] = generated_target + + return target_dict From c6a403144fc5c4d3f860626b5c4681046c8d4608 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Sat, 2 Mar 2019 19:07:14 -0800 Subject: [PATCH 5/5] make 'key' default to the target spec! --- tests/python/pants_test/test_base.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/python/pants_test/test_base.py b/tests/python/pants_test/test_base.py index 66fc2e1d2f4..3e5ee0f57f2 100644 --- a/tests/python/pants_test/test_base.py +++ b/tests/python/pants_test/test_base.py @@ -685,10 +685,11 @@ def retrieve_single_product_at_target_base(self, product_mapping, target): def populate_target_dict(self, target_map): """Return a dict containing targets with files generated according to `target_map`. - The values of `target_map` should themselves be a flat dict, which contains keyword arguments - fed into `self.make_target()`, along with a few special keys. Special keys are: - - 'key' (required -- used to index into the returned dict). - - 'filemap' (creates files at the specified relative paths to the target). + The keys of `target_map` are target address strings, while the values of `target_map` should be + a dict which contains keyword arguments fed into `self.make_target()`, along with a few special + keys. Special keys are: + - 'key': used to access the target in the returned dict. Defaults to the target address spec. + - 'filemap': creates files at the specified relative paths to the target. An `OrderedDict` of 2-tuples must be used with the targets topologically ordered, if they have dependencies on each other. Note that dependency cycles are not currently supported @@ -716,8 +717,8 @@ def populate_target_dict(self, target_map): # Ensure any dependencies exist in the target dict (`target_map` must then be an # OrderedDict). - # The 'key' is used to access the target in `target_dict`. - key = unprocessed_kwargs.pop('key') + # The 'key' is used to access the target in `target_dict`, and defaults to `target_spec`. + key = unprocessed_kwargs.pop('key', target_spec) dep_targets = [] for dep_spec in unprocessed_kwargs.pop('dependencies', []): existing_tgt_key = target_map[dep_spec]['key']