Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move the declarative task stuff out of the python backend testing #7279

Merged
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@
class TestBuildLocalDistsWithCtypesNativeSources(BuildLocalPythonDistributionsTestBase):

@classproperty
def _run_before_task_types(cls):
def run_before_task_types(cls):
return [
CCompile,
CppCompile,
LinkSharedLibraries,
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls)._run_before_task_types
] + super(TestBuildLocalDistsWithCtypesNativeSources, cls).run_before_task_types

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/plat_specific_c_dist:ctypes_c_library', {
'key': 'ctypes_c_library',
Expand Down Expand Up @@ -111,7 +111,7 @@ def test_ctypes_c_dist(self):
self.assertEqual(['platform_specific_ctypes_c_dist==0.0.0+{}'.format(snapshot_version)],
[str(x.requirement) for x in synthetic_target.requirements.value])
local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(
local_wheel = self.retrieve_single_product_at_target_base(
local_wheel_products, platform_specific_dist)
self.assertTrue(check_wheel_platform_matches_host(local_wheel))

Expand All @@ -123,6 +123,6 @@ def test_ctypes_cpp_dist(self):
[str(x.requirement) for x in synthetic_target.requirements.value])

local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(
local_wheel = self.retrieve_single_product_at_target_base(
local_wheel_products, platform_specific_dist)
self.assertTrue(check_wheel_platform_matches_host(local_wheel))
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
BuildLocalPythonDistributionsTestBase


class TestBuildLocalDistsNativeSources(BuildLocalPythonDistributionsTestBase):
class TestBuildLocalPythonDistributions(BuildLocalPythonDistributionsTestBase):

_dist_specs = OrderedDict([
dist_specs = OrderedDict([

('src/python/dist:universal_dist', {
'key': 'universal',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from __future__ import absolute_import, division, print_function, unicode_literals

import os
import re
from builtins import next, str

Expand All @@ -15,101 +14,40 @@
BuildLocalPythonDistributions
from pants.backend.python.tasks.resolve_requirements import ResolveRequirements
from pants.backend.python.tasks.select_interpreter import SelectInterpreter
from pants.build_graph.address import Address
from pants.util.collections import assert_single_element
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants_test.backend.python.tasks.python_task_test_base import (PythonTaskTestBase,
name_and_platform)
from pants_test.engine.scheduler_test_base import SchedulerTestBase
from pants_test.task_test_base import DeclarativeTaskTestMixin


class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, SchedulerTestBase):
class BuildLocalPythonDistributionsTestBase(PythonTaskTestBase, DeclarativeTaskTestMixin):

@classmethod
def task_type(cls):
return BuildLocalPythonDistributions

@classproperty
def _dist_specs(cls):
"""
This is an informally-specified nested dict -- see ../test_ctypes.py for an example. Special
keys are 'key' (used to index into `self.target_dict`) and 'filemap' (creates files at the
specified relative paths). The rest of the keys are fed into `self.make_target()`. An
`OrderedDict` of 2-tuples may be used if targets need to be created in a specific order (e.g. if
they have dependencies on each other).
"""
raise NotImplementedError('_dist_specs must be implemented!')

@classproperty
def _run_before_task_types(cls):
"""
By default, we just use a `BuildLocalPythonDistributions` task. When testing with C/C++ targets,
we want to compile and link them as well to get the resulting dist to build, so we add those
task types here and execute them beforehand.
"""
def run_before_task_types(cls):
return [SelectInterpreter]

@classproperty
def _run_after_task_types(cls):
"""Tasks to run after local dists are built, similar to `_run_before_task_types`."""
def run_after_task_types(cls):
return [ResolveRequirements]

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO: make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]
@classmethod
def rules(cls):
return super(BuildLocalPythonDistributionsTestBase, cls).rules() + native_backend_rules()

@classproperty
def dist_specs(cls):
"""Fed into `self.populate_target_dict()`."""
raise NotImplementedError('dist_specs must be implemented!')

def setUp(self):
super(BuildLocalPythonDistributionsTestBase, self).setUp()

self.target_dict = {}

# Create a target from each specification and insert it into `self.target_dict`.
for target_spec, target_kwargs in self._dist_specs.items():
unprocessed_kwargs = target_kwargs.copy()

target_base = Address.parse(target_spec).spec_path

# Populate the target's owned files from the specification.
filemap = unprocessed_kwargs.pop('filemap', {})
for rel_path, content in filemap.items():
buildroot_path = os.path.join(target_base, rel_path)
self.create_file(buildroot_path, content)

# Ensure any dependencies exist in the target dict (`_dist_specs` must then be an
# OrderedDict).
# The 'key' is used to access the target in `self.target_dict`.
key = unprocessed_kwargs.pop('key')
dep_targets = []
for dep_spec in unprocessed_kwargs.pop('dependencies', []):
existing_tgt_key = self._dist_specs[dep_spec]['key']
dep_targets.append(self.target_dict[existing_tgt_key])

# Register the generated target.
generated_target = self.make_target(
spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs)
self.target_dict[key] = generated_target

def _all_specified_targets(self):
return list(self.target_dict.values())

def _scheduling_context(self, **kwargs):
scheduler = self.mk_scheduler(rules=native_backend_rules())
return self.context(scheduler=scheduler, **kwargs)

def _retrieve_single_product_at_target_base(self, product_mapping, target):
product = product_mapping.get(target)
base_dirs = list(product.keys())
self.assertEqual(1, len(base_dirs))
single_base_dir = base_dirs[0]
all_products = product[single_base_dir]
self.assertEqual(1, len(all_products))
single_product = all_products[0]
return single_product
# Share the target mapping across all test cases.
self.target_dict = self.populate_target_dict(self.dist_specs)

def _get_dist_snapshot_version(self, task, python_dist_target):
"""Get the target's fingerprint, and guess the resulting version string of the built dist.
Expand All @@ -132,47 +70,24 @@ def _get_dist_snapshot_version(self, task, python_dist_target):
# --tag-build option.
return re.sub(r'[^a-zA-Z0-9]', '.', versioned_target_fingerprint.lower())

def _create_task(self, task_type, context):
return task_type(context, self.test_workdir)

def _create_distribution_synthetic_target(self, python_dist_target, extra_targets=[]):
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self._run_before_task_types))
python_create_distributions_task_type = self._testing_task_type
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self._run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
python_create_distributions_task_type,
] + run_after_synthesized_task_types

context = self._scheduling_context(
target_roots=([python_dist_target] + extra_targets),
for_task_types=all_synthesized_task_types,
all_specified_targets = list(self.target_dict.values())
result = self.invoke_tasks(
# We set `target_closure` to check that all the targets in the build graph are exactly the
# ones we've just created before building python_dist()s (which creates further targets).
target_closure=all_specified_targets,
target_roots=[python_dist_target] + extra_targets,
for_subsystems=[PythonRepos, LibcDev],
# TODO(#6848): we should be testing all of these with both of our toolchains.
options={
'native-build-step': {
'toolchain_variant': 'llvm',
},
})
self.assertEqual(set(self._all_specified_targets()), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
python_create_distributions_task_instance = self._create_task(
python_create_distributions_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
python_create_distributions_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

synthetic_tgts = set(context.build_graph.targets()) - set(self._all_specified_targets())
context = result.context
python_create_distributions_task_instance = result.this_task

synthetic_tgts = set(context.build_graph.targets()) - set(all_specified_targets)
self.assertEqual(1, len(synthetic_tgts))
synthetic_target = next(iter(synthetic_tgts))

Expand All @@ -192,7 +107,7 @@ def _assert_dist_and_wheel_identity(self, expected_name, expected_version, expec
str(resulting_dist_req.requirement))

local_wheel_products = context.products.get('local_wheels')
local_wheel = self._retrieve_single_product_at_target_base(local_wheel_products, dist_target)
local_wheel = self.retrieve_single_product_at_target_base(local_wheel_products, dist_target)
dist, version, platform = name_and_platform(local_wheel)
self.assertEquals(dist, expected_name)
self.assertEquals(version, expected_snapshot_version)
Expand Down
88 changes: 88 additions & 0 deletions tests/python/pants_test/task_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import glob
import os
from builtins import object
from contextlib import closing, contextmanager
from io import BytesIO

Expand All @@ -14,7 +15,11 @@
from pants.goal.goal import Goal
from pants.ivy.bootstrapper import Bootstrapper
from pants.task.console_task import ConsoleTask
from pants.task.task import Task
from pants.util.contextutil import temporary_dir
from pants.util.memo import memoized_method
from pants.util.meta import classproperty
from pants.util.objects import SubclassesOf, TypedCollection, datatype
from pants.util.process_handler import subprocess
from pants_test.test_base import TestBase

Expand Down Expand Up @@ -299,3 +304,86 @@ def assert_console_raises(self, exception, **kwargs):
"""
with self.assertRaises(exception):
self.execute_console_task(**kwargs)


class DeclarativeTaskTestMixin(object):
"""Experimental mixin for task tests allows specifying tasks to be run before or after the task.

Calling `self.invoke_tasks()` will create instances of and execute task types in
`self.run_before_task_types()`, then `task_type()`, then `self.run_after_task_types()`.
"""

@classproperty
def run_before_task_types(cls):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The run_before and run_after tasks look very important for correct usage of this class, but it's not clear to me what they would do (without reading the implementation of this class).

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added lots more documentation (and removed the previous docstrings)!

return []

@classproperty
def run_after_task_types(cls):
return []

@memoized_method
def _synthesize_task_types(self, task_types=()):
return [
self.synthesize_task_subtype(tsk, '__tmp_{}'.format(tsk.__name__))
# TODO(#7127): make @memoized_method convert lists to tuples for hashing!
for tsk in task_types
]

def _create_task(self, task_type, context):
"""Helper method to instantiate tasks besides self._testing_task_type in the test workdir."""
return task_type(context, self.test_workdir)

class TaskInvocationResult(datatype([
'context',
('before_tasks', TypedCollection(SubclassesOf(Task))),
('this_task', SubclassesOf(Task)),
('after_tasks', TypedCollection(SubclassesOf(Task))),
])): pass

def invoke_tasks(self, target_closure=None, **context_kwargs):
"""Create and execute the declaratively specified tasks in order.

Create instances of and execute task types in `self.run_before_task_types()`, then
`task_type()`, then `self.run_after_task_types()`.

:param Iterable target_closure: If not None, check that the build graph contains exactly these
targets before executing the tasks.
:param **context_kwargs: kwargs passed to `self.context()`. Note that this method already sets
`for_task_types`.
:return: A datatype containing the created context and the task instances which were executed.
:rtype: :class:`DeclarativeTaskTestMixin.TaskInvocationResult`
"""
run_before_synthesized_task_types = self._synthesize_task_types(tuple(self.run_before_task_types))
run_after_synthesized_task_types = self._synthesize_task_types(tuple(self.run_after_task_types))
all_synthesized_task_types = run_before_synthesized_task_types + [
self._testing_task_type,
] + run_after_synthesized_task_types

context = self.context(
for_task_types=all_synthesized_task_types,
**context_kwargs)
if target_closure is not None:
self.assertEqual(set(target_closure), set(context.build_graph.targets()))

run_before_task_instances = [
self._create_task(task_type, context)
for task_type in run_before_synthesized_task_types
]
current_task_instance = self._create_task(
self._testing_task_type, context)
run_after_task_instances = [
self._create_task(task_type, context)
for task_type in run_after_synthesized_task_types
]
all_task_instances = run_before_task_instances + [
current_task_instance
] + run_after_task_instances

for tsk in all_task_instances:
tsk.execute()

return self.TaskInvocationResult(
context=context,
before_tasks=run_before_task_instances,
this_task=current_task_instance,
after_tasks=run_after_task_instances)
Loading