diff --git a/src/python/pants/backend/jvm/argfile.py b/src/python/pants/backend/jvm/argfile.py index fc377477fec..488f8eaa3bd 100644 --- a/src/python/pants/backend/jvm/argfile.py +++ b/src/python/pants/backend/jvm/argfile.py @@ -12,6 +12,7 @@ from pants.util.contextutil import temporary_file from pants.util.dirutil import safe_open + logger = logging.getLogger(__name__) diff --git a/src/python/pants/cache/cache_setup.py b/src/python/pants/cache/cache_setup.py index 1b644c41236..4adc51fd65e 100644 --- a/src/python/pants/cache/cache_setup.py +++ b/src/python/pants/cache/cache_setup.py @@ -19,6 +19,7 @@ from pants.cache.resolver import NoopResolver, Resolver, RESTfulResolver from pants.cache.restful_artifact_cache import RESTfulArtifactCache from pants.subsystem.subsystem import Subsystem +from pants.util.memo import memoized_property class EmptyCacheSpecError(ArtifactCacheError): pass @@ -87,26 +88,26 @@ def register_options(cls, register): help='Permissions to use when writing artifacts to a local cache, in octal.') @classmethod - def create_cache_factory_for_task(cls, task, pinger=None, resolver=None): - return CacheFactory(cls.scoped_instance(task).get_options(), - task.context.log, task.stable_name(), pinger=pinger, resolver=resolver) + def create_cache_factory_for_task(cls, task, **kwargs): + scoped_options = cls.scoped_instance(task).get_options() + return CacheFactory(scoped_options, task.context.log, task, **kwargs) class CacheFactory(object): - def __init__(self, options, log, stable_name, pinger=None, resolver=None): + def __init__(self, options, log, task, pinger=None, resolver=None): """Create a cache factory from settings. :param options: Task's scoped options. :param log: Task's context log. - :param stable_name: Task's stable name. + :param task: Task to cache results for. :param pinger: Pinger to choose the best remote artifact cache URL. :param resolver: Resolver to look up remote artifact cache URLs. :return: cache factory. """ self._options = options self._log = log - self._stable_name = stable_name + self._task = task # Created on-demand. self._read_cache = None @@ -130,6 +131,16 @@ def __init__(self, options, log, stable_name, pinger=None, resolver=None): else: self._resolver = NoopResolver() + @staticmethod + def make_task_cache_dirname(task): + """Use the task fingerprint as the name of the cache subdirectory to store + results from the task.""" + return task.fingerprint + + @memoized_property + def _cache_dirname(self): + return self.make_task_cache_dirname(self._task) + @property def ignore(self): return self._options.ignore @@ -261,9 +272,9 @@ def _do_create_artifact_cache(self, spec, action): artifact_root = self._options.pants_workdir def create_local_cache(parent_path): - path = os.path.join(parent_path, self._stable_name) + path = os.path.join(parent_path, self._cache_dirname) self._log.debug('{0} {1} local artifact cache at {2}' - .format(self._stable_name, action, path)) + .format(self._task.stable_name(), action, path)) return LocalArtifactCache(artifact_root, path, compression, self._options.max_entries_per_target, permissions=self._options.write_permissions, @@ -273,8 +284,9 @@ def create_remote_cache(remote_spec, local_cache): urls = self.get_available_urls(remote_spec.split('|')) if len(urls) > 0: - best_url_selector = BestUrlSelector(['{}/{}'.format(url.rstrip('/'), self._stable_name) - for url in urls]) + best_url_selector = BestUrlSelector( + ['{}/{}'.format(url.rstrip('/'), self._cache_dirname) for url in urls] + ) local_cache = local_cache or TempLocalArtifactCache(artifact_root, compression) return RESTfulArtifactCache(artifact_root, best_url_selector, local_cache) diff --git a/src/python/pants/invalidation/build_invalidator.py b/src/python/pants/invalidation/build_invalidator.py index ebf96c6c761..7280e31a579 100644 --- a/src/python/pants/invalidation/build_invalidator.py +++ b/src/python/pants/invalidation/build_invalidator.py @@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) - import errno import hashlib import os diff --git a/src/python/pants/option/options.py b/src/python/pants/option/options.py index 1c655bc6f4a..e9a1eb7fcc4 100644 --- a/src/python/pants/option/options.py +++ b/src/python/pants/option/options.py @@ -13,7 +13,7 @@ from pants.option.global_options import GlobalOptionsRegistrar from pants.option.option_util import is_list_option from pants.option.option_value_container import OptionValueContainer -from pants.option.parser_hierarchy import ParserHierarchy, enclosing_scope +from pants.option.parser_hierarchy import ParserHierarchy, all_enclosing_scopes, enclosing_scope from pants.option.scope import ScopeInfo @@ -86,11 +86,9 @@ def complete_scopes(cls, scope_infos): # components) we can replace this line with `for si in scope_infos:`, because it will # not be possible for a deprecated_scope to introduce any new intermediate scopes. for si in copy.copy(ret): - scope = si.scope - while scope != '': + for scope in all_enclosing_scopes(si.scope, allow_global=False): if scope not in original_scopes: ret.add(ScopeInfo(scope, ScopeInfo.INTERMEDIATE)) - scope = enclosing_scope(scope) return ret @classmethod @@ -312,15 +310,18 @@ def for_scope(self, scope, inherit_from_enclosing_scope=True): return values - def get_fingerprintable_for_scope(self, scope, include_passthru=False, fingerprint_key=None, - invert=False): + def get_fingerprintable_for_scope(self, bottom_scope, include_passthru=False, + fingerprint_key=None, invert=False): """Returns a list of fingerprintable (option type, option value) pairs for the given scope. Fingerprintable options are options registered via a "fingerprint=True" kwarg. This flag can be parameterized with `fingerprint_key` for special cases. - :param str scope: The scope to gather fingerprintable options for. - :param bool include_passthru: Whether to include passthru args captured by `scope` in the + This method also searches enclosing options scopes of `bottom_scope` to determine the set of + fingerprintable pairs. + + :param str bottom_scope: The scope to gather fingerprintable options for. + :param bool include_passthru: Whether to include passthru args captured by `bottom_scope` in the fingerprintable options. :param string fingerprint_key: The option kwarg to match against (defaults to 'fingerprint'). :param bool invert: Whether or not to invert the boolean check for the fingerprint_key value. @@ -328,39 +329,35 @@ def get_fingerprintable_for_scope(self, scope, include_passthru=False, fingerpri :API: public """ fingerprint_key = fingerprint_key or 'fingerprint' - fingerprint_default = False if invert else None pairs = [] if include_passthru: # Passthru args can only be sent to outermost scopes so we gather them once here up-front. - passthru_args = self.passthru_args_for_scope(scope) + passthru_args = self.passthru_args_for_scope(bottom_scope) # NB: We can't sort passthru args, the underlying consumer may be order-sensitive. - pairs.extend((str, passthru_arg) for passthru_arg in passthru_args) + pairs.extend((str, pass_arg) for pass_arg in passthru_args) - # Note that we iterate over options registered at `scope` and at all enclosing scopes, since - # option-using code can read those values indirectly via its own OptionValueContainer, so - # they can affect that code's output. - registration_scope = scope - while registration_scope is not None: + # Note that we iterate over options registered at `bottom_scope` and at all + # enclosing scopes, since option-using code can read those values indirectly + # via its own OptionValueContainer, so they can affect that code's output. + for registration_scope in all_enclosing_scopes(bottom_scope): parser = self._parser_hierarchy.get_parser_by_scope(registration_scope) # Sort the arguments, so that the fingerprint is consistent. for (_, kwargs) in sorted(parser.option_registrations_iter()): - if kwargs.get('recursive') and not kwargs.get('recursive_root'): + if kwargs.get('recursive', False) and not kwargs.get('recursive_root', False): continue # We only need to fprint recursive options once. - if kwargs.get(fingerprint_key, fingerprint_default) is not (False if invert else True): + if bool(invert) == bool(kwargs.get(fingerprint_key, False)): continue # Note that we read the value from scope, even if the registration was on an enclosing # scope, to get the right value for recursive options (and because this mirrors what # option-using code does). - val = self.for_scope(scope)[kwargs['dest']] + val = self.for_scope(bottom_scope)[kwargs['dest']] # If we have a list then we delegate to the fingerprinting implementation of the members. if is_list_option(kwargs): val_type = kwargs.get('member_type', str) else: val_type = kwargs.get('type', str) pairs.append((val_type, val)) - registration_scope = (None if registration_scope == '' - else enclosing_scope(registration_scope)) return pairs def __getitem__(self, scope): diff --git a/src/python/pants/option/options_fingerprinter.py b/src/python/pants/option/options_fingerprinter.py index b93f38274bf..78d1306c7a9 100644 --- a/src/python/pants/option/options_fingerprinter.py +++ b/src/python/pants/option/options_fingerprinter.py @@ -37,24 +37,23 @@ class OptionsFingerprinter(object): """ @classmethod - def combined_options_fingerprint_for_scope(cls, scope, options, fingerprint_key=None, - invert=None, build_graph=None): + def combined_options_fingerprint_for_scope(cls, scope, options, + build_graph=None, **kwargs): """Given options and a scope, compute a combined fingerprint for the scope. :param string scope: The scope to fingerprint. :param Options options: The `Options` object to fingerprint. - :param string fingerprint_key: The options kwarg to filter against. - :param bool invert: Whether or not to invert the boolean check for the fingerprint_key value. :param BuildGraph build_graph: A `BuildGraph` instance, only needed if fingerprinting target options. + :param dict **kwargs: Keyword parameters passed on to + `Options#get_fingerprintable_for_scope`. + :return: Hexadecimal string representing the fingerprint for all `options` + values in `scope`. """ fingerprinter = cls(build_graph) hasher = sha1() - for (option_type, option_value) in options.get_fingerprintable_for_scope( - scope, - fingerprint_key=fingerprint_key, - invert=invert - ): + pairs = options.get_fingerprintable_for_scope(scope, **kwargs) + for (option_type, option_value) in pairs: hasher.update( # N.B. `OptionsFingerprinter.fingerprint()` can return `None`, # so we always cast to bytes here. diff --git a/src/python/pants/option/parser_hierarchy.py b/src/python/pants/option/parser_hierarchy.py index 62e28182c27..09c60f14bd5 100644 --- a/src/python/pants/option/parser_hierarchy.py +++ b/src/python/pants/option/parser_hierarchy.py @@ -5,16 +5,51 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) +import re + from pants.option.arg_splitter import GLOBAL_SCOPE from pants.option.config import Config from pants.option.parser import Parser +class InvalidScopeError(Exception): + pass + +_empty_scope_component_re = re.compile(r'\.\.') + + +def _validate_full_scope(scope): + if _empty_scope_component_re.search(scope): + raise InvalidScopeError( + "full scope '{}' has at least one empty component".format(scope)) + + def enclosing_scope(scope): """Utility function to return the scope immediately enclosing a given scope.""" + _validate_full_scope(scope) return scope.rpartition('.')[0] +def all_enclosing_scopes(scope, allow_global=True): + """Utility function to return all scopes up to the global scope enclosing a + given scope.""" + + _validate_full_scope(scope) + + # TODO(cosmicexplorer): validate scopes here and/or in `enclosing_scope()` + # instead of assuming correctness. + def scope_within_range(tentative_scope): + if tentative_scope is None: + return False + if not allow_global and tentative_scope == GLOBAL_SCOPE: + return False + return True + + while scope_within_range(scope): + yield scope + scope = (None if scope == GLOBAL_SCOPE else enclosing_scope(scope)) + + class ParserHierarchy(object): """A hierarchy of scoped Parser instances. diff --git a/src/python/pants/task/task.py b/src/python/pants/task/task.py index c300b5901c7..916e669d0d1 100644 --- a/src/python/pants/task/task.py +++ b/src/python/pants/task/task.py @@ -179,12 +179,11 @@ def __init__(self, *args, **kwargs): self._task_name = type(self).__name__ self._cache_key_errors = set() self._cache_factory = CacheSetup.create_cache_factory_for_task(self) - self._options_fingerprinter = OptionsFingerprinter(self.context.build_graph) self._force_invalidated = False @memoized_method def _build_invalidator(self, root=False): - build_task = None if root else self.stable_name() + build_task = None if root else self.fingerprint return BuildInvalidator.Factory.create(build_task=build_task) def get_options(self): @@ -216,16 +215,16 @@ def workdir(self): return self._workdir def _options_fingerprint(self, scope): - pairs = self.context.options.get_fingerprintable_for_scope( + options_hasher = sha1() + options_hasher.update(scope) + options_fp = OptionsFingerprinter.combined_options_fingerprint_for_scope( scope, - include_passthru=self.supports_passthru_args() + self.context.options, + build_graph=self.context.build_graph, + include_passthru=self.supports_passthru_args(), ) - hasher = sha1() - for (option_type, option_val) in pairs: - fp = self._options_fingerprinter.fingerprint(option_type, option_val) - if fp is not None: - hasher.update(fp) - return hasher.hexdigest() + options_hasher.update(options_fp) + return options_hasher.hexdigest() @memoized_property def fingerprint(self): @@ -238,6 +237,7 @@ def fingerprint(self): A task's fingerprint is only valid afer the task has been fully initialized. """ hasher = sha1() + hasher.update(self.stable_name()) hasher.update(self._options_fingerprint(self.options_scope)) hasher.update(self.implementation_version_str()) # TODO: this is not recursive, but should be: see #2739 @@ -364,10 +364,10 @@ def invalidated(self, targets.extend(vt.targets) if len(targets): - msg_elements = ['Invalidated ', - items_to_report_element([t.address.reference() for t in targets], 'target'), - '.'] - self.context.log.info(*msg_elements) + msg, detail = items_to_report_element( + [t.address.reference() for t in targets], + 'target') + self.context.log.info('Invalidated {}: {}.'.format(msg, detail)) self._update_invalidation_report(invalidation_check, 'pre-check') @@ -582,11 +582,10 @@ def _get_update_artifact_cache_work(self, vts_artifactfiles_pairs): def _report_targets(self, prefix, targets, suffix, logger=None): logger = logger or self.context.log.info - logger( - prefix, - items_to_report_element([t.address.reference() for t in targets], 'target'), - suffix, - ) + msg, detail = items_to_report_element( + [t.address.reference() for t in targets], + 'target') + logger(prefix + '{}: {}'.format(msg, detail) + suffix) def require_single_root_target(self): """If a single target was specified on the cmd line, returns that target. diff --git a/tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py b/tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py index fc9c66d2018..67fd5d72bf4 100644 --- a/tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py +++ b/tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py @@ -11,7 +11,6 @@ from pants.backend.codegen.jaxb.jaxb_library import JaxbLibrary from pants.backend.codegen.jaxb.register import build_file_aliases as register_codegen from pants.build_graph.register import build_file_aliases as register_core - from pants_test.jvm.nailgun_task_test_base import NailgunTaskTestBase diff --git a/tests/python/pants_test/backend/jvm/tasks/coverage/test_cobertura.py b/tests/python/pants_test/backend/jvm/tasks/coverage/test_cobertura.py index 365892e7d3f..569411f007e 100644 --- a/tests/python/pants_test/backend/jvm/tasks/coverage/test_cobertura.py +++ b/tests/python/pants_test/backend/jvm/tasks/coverage/test_cobertura.py @@ -236,6 +236,3 @@ def test_coverage_forced(self): no_force_cobertura._nothing_to_instrument = False self.assertEquals(no_force_cobertura.should_report(exception), False, 'Don\'t report after a failure if coverage isn\'t forced.') - - - diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py index 1c2cecb6c89..0c1c8d8e741 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py @@ -9,7 +9,6 @@ from collections import namedtuple from textwrap import dedent -from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile from pants.base.build_environment import get_buildroot from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_mkdir, safe_open, safe_rmtree @@ -202,9 +201,7 @@ def complete_config(config): buildfile = os.path.join(src_dir, 'BUILD') spec = os.path.join(src_dir, ':cachetest') - artifact_dir = os.path.join(cache_dir, - ZincCompile.stable_name(), - '{}.cachetest'.format(os.path.basename(src_dir))) + artifact_dir = None for c in compiles: # Clear the src directory and recreate the files. @@ -216,7 +213,13 @@ def complete_config(config): # Compile, and confirm that we have the right count of artifacts. self.run_compile(spec, complete_config(c.config), workdir) - self.assertEquals(c.artifact_count, len(os.listdir(artifact_dir))) + + artifact_dir = self.get_cache_subdir(cache_dir) + cache_test_subdir = os.path.join( + artifact_dir, + '{}.cachetest'.format(os.path.basename(src_dir)), + ) + self.assertEquals(c.artifact_count, len(os.listdir(cache_test_subdir))) class CacheCompileIntegrationWithZjarsTest(CacheCompileIntegrationTest): diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py index ffc0fc59775..1274c72c2da 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py @@ -7,7 +7,6 @@ import os -from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile from pants.fs.archive import TarArchiver from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_walk @@ -31,19 +30,17 @@ def test_basic_binary(self): def test_nocache(self): with temporary_dir() as cache_dir: - bad_artifact_dir = os.path.join(cache_dir, - ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache') - good_artifact_dir = os.path.join(cache_dir, - ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me') config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(['compile', - 'testprojects/src/java/org/pantsbuild/testproject/nocache::'], - config) - self.assert_success(pants_run) + self.assert_success(self.run_pants([ + 'compile', + 'testprojects/src/java/org/pantsbuild/testproject/nocache::', + ], config=config)) + zinc_task_dir = self.get_cache_subdir(cache_dir) + + bad_artifact_dir = os.path.join(zinc_task_dir, 'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache') + good_artifact_dir = os.path.join(zinc_task_dir, 'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me') # The nocache target is labeled with no_cache so it should not be written to the # artifact cache. self.assertFalse(os.path.exists(bad_artifact_dir)) @@ -62,42 +59,57 @@ def test_java_compile_produces_different_artifact_depending_on_java_version(self # produces two different artifacts. with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(self.create_platform_args(6) + - ['compile', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], - config) - self.assert_success(pants_run) - + java_6_args = self.create_platform_args(6) + [ + 'compile', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + ] + self.assert_success(self.run_pants(java_6_args, config)) + + java_6_artifact_dir = self.get_cache_subdir(cache_dir) + main_java_6_dir = os.path.join( + java_6_artifact_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + ) # One artifact for java 6 - self.assertEqual(len(os.listdir(artifact_dir)), 1) + self.assertEqual(len(os.listdir(main_java_6_dir)), 1) # Rerun for java 7 - pants_run = self.run_pants(self.create_platform_args(7) + - ['compile', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], - config) - self.assert_success(pants_run) - - # One artifact for java 6 and one for 7 - self.assertEqual(len(os.listdir(artifact_dir)), 2) + java_7_args = self.create_platform_args(7) + [ + 'compile', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + ] + self.assert_success(self.run_pants(java_7_args, config)) + + java_7_artifact_dir = self.get_cache_subdir( + cache_dir, + other_dirs=[java_6_artifact_dir], + ) + main_java_7_dir = os.path.join( + java_7_artifact_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + ) + # java 7 platform args should change the result dir name + self.assertEqual(len(os.listdir(main_java_7_dir)), 1) def test_java_compile_reads_resource_mapping(self): # Ensure that if an annotation processor produces a resource-mapping, # the artifact contains that resource mapping. with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main') config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(['compile', - 'testprojects/src/java/org/pantsbuild/testproject/annotation/main'], - config) - self.assert_success(pants_run) + self.assert_success(self.run_pants([ + 'compile', + 'testprojects/src/java/org/pantsbuild/testproject/annotation/main', + ], config=config)) + + base_artifact_dir = self.get_cache_subdir(cache_dir) + artifact_dir = os.path.join( + base_artifact_dir, + 'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main', + ) self.assertTrue(os.path.exists(artifact_dir)) artifacts = os.listdir(artifact_dir) @@ -151,8 +163,7 @@ def test_java_compile_with_different_resolved_jars_produce_different_artifacts(s with self.temporary_workdir() as workdir, temporary_dir() as cache_dir: path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility' dotted_path = path_prefix.replace(os.path.sep, '.') - artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), - '{}.jarversionincompatibility'.format(dotted_path)) + config = { 'cache.compile.zinc': { 'write_to': [cache_dir], @@ -163,24 +174,36 @@ def test_java_compile_with_different_resolved_jars_produce_different_artifacts(s }, } - pants_run = self.run_pants_with_workdir(['compile', - ('{}:only-15-directly'.format(path_prefix))], - workdir, - config) - self.assert_success(pants_run) + self.assert_success(self.run_pants_with_workdir([ + 'compile', + '{}:only-15-directly'.format(path_prefix), + ], workdir, config)) + guava_15_base_dir = self.get_cache_subdir(cache_dir) + guava_15_artifact_dir = os.path.join( + guava_15_base_dir, + '{}.jarversionincompatibility'.format(dotted_path), + ) # One artifact for guava 15 - self.assertEqual(len(os.listdir(artifact_dir)), 1) + self.assertEqual(len(os.listdir(guava_15_artifact_dir)), 1) # Rerun for guava 16 - pants_run = self.run_pants_with_workdir(['compile', - (u'{}:alongside-16'.format(path_prefix))], - workdir, - config) - self.assert_success(pants_run) + self.assert_success(self.run_pants_with_workdir([ + 'compile', + (u'{}:alongside-16'.format(path_prefix)), + ], workdir, config)) + + guava_16_base_dir = self.get_cache_subdir(cache_dir) + # the zinc compile task has the same option values in both runs, so the + # results directory should be the same + guava_16_artifact_dir = os.path.join( + guava_16_base_dir, + '{}.jarversionincompatibility'.format(dotted_path), + ) # One artifact for guava 15 and one for guava 16 - self.assertEqual(len(os.listdir(artifact_dir)), 2) + self.assertEqual(guava_16_artifact_dir, guava_15_artifact_dir) + self.assertEqual(len(os.listdir(guava_16_artifact_dir)), 2) def test_java_compile_with_corrupt_remote(self): """Tests that a corrupt artifact in the remote cache still results in a successful compile.""" @@ -205,7 +228,7 @@ def test_java_compile_with_corrupt_remote(self): self.assertFalse("Compiling" in second_run.stdout_data) # Corrupt the remote artifact. - self.assertTrue(server.corrupt_artifacts(r'.*zinc.*matcher.*') == 1) + self.assertEqual(server.corrupt_artifacts(r'.*'), 1) # Ensure that the third run succeeds, despite a failed attempt to fetch. third_run = self.run_pants_with_workdir(['clean-all', 'test', target], workdir, config) diff --git a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py index d3623538da8..c78d327e17b 100644 --- a/tests/python/pants_test/backend/python/tasks/test_pytest_run.py +++ b/tests/python/pants_test/backend/python/tasks/test_pytest_run.py @@ -269,12 +269,14 @@ def test_red(self): self.run_failing_tests(targets=[self.red], failed_targets=[self.red]) def test_fail_fast_skips_second_red_test_with_single_chroot(self): - self.run_failing_tests(targets=[self.red, self.red_in_class], failed_targets=[self.red], + self.run_failing_tests(targets=[self.red, self.red_in_class], + failed_targets=[self.red], fail_fast=True, fast=False) def test_fail_fast_skips_second_red_test_with_isolated_chroot(self): - self.run_failing_tests(targets=[self.red, self.red_in_class], failed_targets=[self.red], + self.run_failing_tests(targets=[self.red, self.red_in_class], + failed_targets=[self.red], fail_fast=True, fast=True) diff --git a/tests/python/pants_test/backend/python/tasks2/test_pytest_run.py b/tests/python/pants_test/backend/python/tasks2/test_pytest_run.py index 2d5bb2126fa..e3c3355125f 100644 --- a/tests/python/pants_test/backend/python/tasks2/test_pytest_run.py +++ b/tests/python/pants_test/backend/python/tasks2/test_pytest_run.py @@ -459,14 +459,17 @@ def test_red(self): @ensure_cached(PytestRun, expected_num_artifacts=0) def test_fail_fast_skips_second_red_test_with_single_chroot(self): - self.run_failing_tests(targets=[self.red, self.red_in_class], failed_targets=[self.red], - fail_fast=True, fast=False) + self.run_failing_tests(targets=[self.red, self.red_in_class], + failed_targets=[self.red], + fail_fast=True, + fast=False) @ensure_cached(PytestRun, expected_num_artifacts=0) def test_fail_fast_skips_second_red_test_with_isolated_chroot(self): self.run_failing_tests(targets=[self.red, self.red_in_class], failed_targets=[self.red_in_class], - fail_fast=True, fast=True) + fail_fast=True, + fast=True) @ensure_cached(PytestRun, expected_num_artifacts=0) def test_red_test_in_class(self): diff --git a/tests/python/pants_test/base_test.py b/tests/python/pants_test/base_test.py index 344b2e3979f..d2480c90e2b 100644 --- a/tests/python/pants_test/base_test.py +++ b/tests/python/pants_test/base_test.py @@ -275,10 +275,13 @@ def reset_build_graph(self): def set_options_for_scope(self, scope, **kwargs): self.options[scope].update(kwargs) - def context(self, for_task_types=None, options=None, passthru_args=None, target_roots=None, - console_outstream=None, workspace=None, for_subsystems=None): + def context(self, for_task_types=None, for_subsystems=None, options=None, + target_roots=None, console_outstream=None, workspace=None, + **kwargs): """ :API: public + + :param dict **kwargs: keyword arguments passed in to `create_options_for_optionables`. """ # Many tests use source root functionality via the SourceRootConfig.global_instance(). # (typically accessed via Target.target_base), so we always set it up, for convenience. @@ -309,10 +312,8 @@ def context(self, for_task_types=None, options=None, passthru_args=None, target_ scoped_opts = options.setdefault(s, {}) scoped_opts.update(opts) - options = create_options_for_optionables(optionables, - extra_scopes=extra_scopes, - options=options, - passthru_args=passthru_args) + options = create_options_for_optionables( + optionables, extra_scopes=extra_scopes, options=options, **kwargs) Subsystem.reset(reset_options=True) Subsystem.set_options(options) diff --git a/tests/python/pants_test/cache/test_cache_cleanup_integration.py b/tests/python/pants_test/cache/test_cache_cleanup_integration.py index a5ddd6de159..5e0eaee5499 100644 --- a/tests/python/pants_test/cache/test_cache_cleanup_integration.py +++ b/tests/python/pants_test/cache/test_cache_cleanup_integration.py @@ -5,57 +5,84 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) +import glob import os import time -from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_mkdir, touch +from pants.util.dirutil import safe_delete, safe_mkdir, touch from pants_test.pants_run_integration_test import PantsRunIntegrationTest class CacheCleanupIntegrationTest(PantsRunIntegrationTest): - def create_platform_args(self, version): + def _create_platform_args(self, version): return [("""--jvm-platform-platforms={{'default': {{'target': '{version}'}}}}""" .format(version=version)), '--jvm-platform-default-platform=default'] + def _run_pants_get_artifact_dir(self, args, cache_dir, subdir, num_files_to_insert, expected_num_files, config=None, prev_dirs=[]): + """Run Pants with the given `args` and `config`, delete the results, add + some files, then run pants again and ensure there are exactly + `expected_num_files` in the output. + + Pants needs to be run twice because we don't know what the results directory + will be named before we run Pants, and we want to insert files into that + specific directory to test cache cleanup procedures. + """ + self.assert_success(self.run_pants(args, config=config)) + + artifact_base_dir = self.get_cache_subdir(cache_dir, other_dirs=prev_dirs) + artifact_dir = os.path.join(artifact_base_dir, subdir) + + for tgz in glob.glob(os.path.join(artifact_dir, '*.tgz')): + safe_delete(tgz) + for i in range(0, num_files_to_insert): + touch(os.path.join(artifact_dir, 'old_cache_test{}'.format(i + 1))) + + self.assert_success(self.run_pants(args, config=config)) + self.assertEqual(len(os.listdir(artifact_dir)), expected_num_files) + + return artifact_base_dir + def test_buildcache_leave_one(self): """Ensure that max-old of 1 removes all but one files""" with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') - - touch(os.path.join(artifact_dir, 'old_cache_test1')) - touch(os.path.join(artifact_dir, 'old_cache_test2')) - touch(os.path.join(artifact_dir, 'old_cache_test3')) - touch(os.path.join(artifact_dir, 'old_cache_test4')) - touch(os.path.join(artifact_dir, 'old_cache_test5')) - config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(self.create_platform_args(6) + - ['compile.zinc', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - '--cache-max-entries-per-target=1'], - config=config) - self.assert_success(pants_run) - - # One artifact for java 6 - self.assertEqual(len(os.listdir(artifact_dir)), 1) + java_6_args = self._create_platform_args(6) + [ + 'compile.zinc', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--cache-max-entries-per-target=1', + ] + java_6_artifact_base_dir = self._run_pants_get_artifact_dir( + java_6_args, + cache_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + num_files_to_insert=5, + # One artifact for java 6 + expected_num_files=1, + config=config, + ) # Rerun for java 7 - pants_run = self.run_pants(self.create_platform_args(7) + - ['compile.zinc', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - '--cache-max-entries-per-target=1'], - config) - self.assert_success(pants_run) - - # One artifact for java 7 - self.assertEqual(len(os.listdir(artifact_dir)), 1) + java_7_args = self._create_platform_args(7) + [ + 'compile.zinc', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--cache-max-entries-per-target=1', + ] + self._run_pants_get_artifact_dir( + java_7_args, + cache_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + num_files_to_insert=2, + # One artifact for java 6 + expected_num_files=1, + config=config, + # java 7 platform args should change the name of the cache directory + prev_dirs=[java_6_artifact_base_dir], + ) def test_buildcache_leave_none(self): """Ensure that max-old of zero removes all files @@ -65,38 +92,40 @@ def test_buildcache_leave_none(self): """ with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), - 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') - - touch(os.path.join(artifact_dir, 'old_cache_test1')) - touch(os.path.join(artifact_dir, 'old_cache_test2')) - touch(os.path.join(artifact_dir, 'old_cache_test3')) - touch(os.path.join(artifact_dir, 'old_cache_test4')) - touch(os.path.join(artifact_dir, 'old_cache_test5')) - config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(self.create_platform_args(6) + - ['compile.zinc', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - '--cache-max-entries-per-target=0'], - config=config) - self.assert_success(pants_run) - - # Cache cleanup disabled for 0 - - self.assertEqual(len(os.listdir(artifact_dir)), 6) + java_6_args = self._create_platform_args(6) + [ + 'compile.zinc', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--cache-max-entries-per-target=0', + ] + java_6_artifact_base_dir = self._run_pants_get_artifact_dir( + java_6_args, + cache_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + num_files_to_insert=5, + # Cache cleanup disabled for 0 + expected_num_files=6, + config=config, + ) # Rerun for java 7 - pants_run = self.run_pants(self.create_platform_args(7) + - ['compile.zinc', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - '--cache-max-entries-per-target=0'], - config) - self.assert_success(pants_run) - - # Cache cleanup disabled for 0 - self.assertEqual(len(os.listdir(artifact_dir)), 7) + java_7_args = self._create_platform_args(7) + [ + 'compile.zinc', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--cache-max-entries-per-target=0', + ] + self._run_pants_get_artifact_dir( + java_7_args, + cache_dir, + 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main', + num_files_to_insert=2, + # Cache cleanup disabled for 0 + expected_num_files=3, + config=config, + # java 7 platform args should change the name of the cache directory + prev_dirs=[java_6_artifact_base_dir], + ) def test_workdir_stale_builds_cleanup(self): """Ensure that current and previous build result_dirs and the newest `--workdir-max-build-entries` number of dirs @@ -105,11 +134,12 @@ def test_workdir_stale_builds_cleanup(self): with temporary_dir() as tmp_dir: workdir = os.path.join(tmp_dir, '.pants.d') - pants_run = self.run_pants_with_workdir(['compile', - 'export-classpath', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - ], workdir) - self.assert_success(pants_run) + + self.assert_success(self.run_pants_with_workdir([ + 'compile', + 'export-classpath', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + ], workdir)) # Use the static exported classpath symlink to access the artifact in workdir # in order to avoid computing hashed task version used in workdir. @@ -121,32 +151,62 @@ def test_workdir_stale_builds_cleanup(self): # /compile/zinc/d4600a981d5d/testprojects.src.java.org.pantsbuild.testproject.unicode.main.main/ target_dir_in_pantsd = os.path.dirname(os.path.dirname(jar_path_in_pantsd)) - safe_mkdir(os.path.join(target_dir_in_pantsd, 'old_cache_test1_dir')) - safe_mkdir(os.path.join(target_dir_in_pantsd, 'old_cache_test2_dir')) - safe_mkdir(os.path.join(target_dir_in_pantsd, 'old_cache_test3_dir')) + old_cache_dirnames = set([ + 'old_cache_test1_dir/', + 'old_cache_test2_dir/', + 'old_cache_test3_dir/', + ]) + new_cache_dirnames = set([ + 'old_cache_test4_dir/', + 'old_cache_test5_dir/', + ]) + old_cache_entries = {os.path.join(target_dir_in_pantsd, subdir) for subdir in old_cache_dirnames} + new_cache_entries = {os.path.join(target_dir_in_pantsd, subdir) for subdir in new_cache_dirnames} + for old_entry in old_cache_entries: + safe_mkdir(old_entry) + # sleep for a bit so these files are all newer than the other ones time.sleep(1.1) - safe_mkdir(os.path.join(target_dir_in_pantsd, 'old_cache_test4_dir')) - safe_mkdir(os.path.join(target_dir_in_pantsd, 'old_cache_test5_dir')) + for new_entry in new_cache_entries: + safe_mkdir(new_entry) + expected_dirs = set([os.path.join(target_dir_in_pantsd, 'current/')]) | old_cache_entries | new_cache_entries # stable symlink, current version directory, and synthetically created directories. - self.assertTrue(os.path.exists(os.path.join(target_dir_in_pantsd, 'current'))) - self.assertEqual(len(os.listdir(target_dir_in_pantsd)), 7) + remaining_cache_dir_fingerprinted = self.get_cache_subdir(target_dir_in_pantsd, other_dirs=expected_dirs) + fingerprinted_realdir = os.path.realpath(os.path.join(target_dir_in_pantsd, 'current')) + self.assertEqual( + fingerprinted_realdir, + remaining_cache_dir_fingerprinted.rstrip('/')) max_entries_per_target = 2 - # 2nd run with --compile-zinc-debug-symbols will invalidate previous build thus triggering the clean up. - pants_run_2 = self.run_pants_with_workdir(['compile', - 'export-classpath', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', - '--compile-zinc-debug-symbols', - '--workdir-max-build-entries={}'.format(max_entries_per_target) - ], workdir) - self.assert_success(pants_run_2) - # stable, current, previous builds stay, and 2 newest dirs - self.assertEqual(len(os.listdir(target_dir_in_pantsd)), 5) - self.assertTrue(os.path.exists(os.path.join(target_dir_in_pantsd, 'current'))) - self.assertTrue(os.path.exists(os.path.join(target_dir_in_pantsd, 'old_cache_test4_dir'))) - self.assertTrue(os.path.exists(os.path.join(target_dir_in_pantsd, 'old_cache_test5_dir'))) - - self.assertFalse(os.path.exists(os.path.join(target_dir_in_pantsd, 'old_cache_test1_dir'))) - self.assertFalse(os.path.exists(os.path.join(target_dir_in_pantsd, 'old_cache_test2_dir'))) - self.assertFalse(os.path.exists(os.path.join(target_dir_in_pantsd, 'old_cache_test3_dir'))) + self.assert_success(self.run_pants_with_workdir([ + 'compile', + 'export-classpath', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--workdir-max-build-entries={}'.format(max_entries_per_target) + ], workdir)) + + # stable (same as before), current, and 2 newest dirs + self.assertEqual(os.path.dirname(os.path.dirname(os.path.realpath(classpath))), target_dir_in_pantsd) + newest_expected_dirs = expected_dirs - old_cache_entries + other_cache_dir_fingerprinted = self.get_cache_subdir(target_dir_in_pantsd, other_dirs=newest_expected_dirs) + self.assertEqual(other_cache_dir_fingerprinted, remaining_cache_dir_fingerprinted) + self.assertEqual( + os.path.realpath(os.path.join(target_dir_in_pantsd, 'current')), + fingerprinted_realdir) + + self.assert_success(self.run_pants_with_workdir([ + 'compile', + 'export-classpath', + 'testprojects/src/java/org/pantsbuild/testproject/unicode/main', + '--compile-zinc-debug-symbols', + '--workdir-max-build-entries={}'.format(max_entries_per_target) + ], workdir)) + + # stable, current, and 2 newest dirs + self.assertEqual(os.path.dirname(os.path.dirname(os.path.realpath(classpath))), target_dir_in_pantsd) + new_cache_dir_fingerprinted = self.get_cache_subdir(target_dir_in_pantsd, other_dirs=newest_expected_dirs) + # subsequent run with --compile-zinc-debug-symbols will invalidate previous build thus triggering the clean up. + self.assertNotEqual(new_cache_dir_fingerprinted, remaining_cache_dir_fingerprinted) + new_fingerprinted_realdir = os.path.realpath(os.path.join(target_dir_in_pantsd, 'current')) + self.assertEqual(new_fingerprinted_realdir, + new_cache_dir_fingerprinted.rstrip('/')) diff --git a/tests/python/pants_test/cache/test_cache_setup.py b/tests/python/pants_test/cache/test_cache_setup.py index bafe156053c..b574b51a691 100644 --- a/tests/python/pants_test/cache/test_cache_setup.py +++ b/tests/python/pants_test/cache/test_cache_setup.py @@ -24,19 +24,16 @@ from pants_test.testutils.mock_logger import MockLogger -class DummyContext(object): - log = MockLogger() - - class DummyTask(Task): options_scope = 'dummy' - - context = DummyContext() + _stable_name = 'test' @classmethod def subsystem_dependencies(cls): return super(DummyTask, cls).subsystem_dependencies() + (CacheSetup, ) + def execute(self): pass + class MockPinger(object): @@ -64,6 +61,10 @@ class TestCacheSetup(BaseTest): CACHE_SPEC_RESOLVE_ONLY = CacheSpec(local=None, remote=TEST_RESOLVED_FROM) CACHE_SPEC_LOCAL_RESOLVE = CacheSpec(local=LOCAL_URI, remote=TEST_RESOLVED_FROM) + def create_task(self): + return DummyTask(self.context(for_task_types=[DummyTask]), + self.pants_workdir) + def setUp(self): super(TestCacheSetup, self).setUp() @@ -89,9 +90,9 @@ def cache_factory(self, **options): 'pants_workdir': self.pants_workdir } cache_options.update(**options) - return CacheFactory(options=create_options(options={'test': cache_options}).for_scope('test'), - log=MockLogger(), - stable_name='test', + return CacheFactory(create_options(options={'test': cache_options}).for_scope('test'), + MockLogger(), + self.create_task(), resolver=self.resolver) def test_sanitize_cache_spec(self): @@ -166,9 +167,10 @@ def mk_cache(spec, resolver=None): self.set_options_for_scope(CacheSetup.subscope(DummyTask.options_scope), read_from=spec, compression=1) self.context(for_task_types=[DummyTask]) # Force option initialization. - cache_factory = CacheSetup.create_cache_factory_for_task(DummyTask, - pinger=self.pinger, - resolver=resolver) + cache_factory = CacheSetup.create_cache_factory_for_task( + self.create_task(), + pinger=self.pinger, + resolver=resolver) return cache_factory.get_read_cache() def check(expected_type, spec, resolver=None): diff --git a/tests/python/pants_test/cache/test_caching.py b/tests/python/pants_test/cache/test_caching.py index ae633dd7c75..a18c038f676 100644 --- a/tests/python/pants_test/cache/test_caching.py +++ b/tests/python/pants_test/cache/test_caching.py @@ -9,7 +9,7 @@ from pants.base.payload import Payload from pants.build_graph.target import Target -from pants.cache.cache_setup import CacheSetup +from pants.cache.cache_setup import CacheFactory, CacheSetup from pants.task.task import Task from pants_test.tasks.task_test_base import TaskTestBase @@ -62,9 +62,11 @@ def test_cache_written_to(self): all_vts, invalid_vts = self.task.execute() self.assertGreater(len(invalid_vts), 0) for vt in invalid_vts: - artifact_address = "{}{}".format( - os.path.join(self.artifact_cache, self.task.stable_name(), self.target.id, vt.cache_key.hash), - '.tgz', + artifact_address = os.path.join( + self.artifact_cache, + CacheFactory.make_task_cache_dirname(self.task), + self.target.id, + '{}.tgz'.format(vt.cache_key.hash), ) self.assertTrue(os.path.isfile(artifact_address)) diff --git a/tests/python/pants_test/cache/test_caching_tarball_deference.py b/tests/python/pants_test/cache/test_caching_tarball_deference.py index f7623bc47f8..87c0db6a50f 100644 --- a/tests/python/pants_test/cache/test_caching_tarball_deference.py +++ b/tests/python/pants_test/cache/test_caching_tarball_deference.py @@ -9,7 +9,7 @@ from pants.base.payload import Payload from pants.build_graph.target import Target -from pants.cache.cache_setup import CacheSetup +from pants.cache.cache_setup import CacheFactory, CacheSetup from pants.task.task import Task from pants.util.contextutil import open_tar, temporary_dir from pants.util.dirutil import safe_open @@ -80,8 +80,11 @@ def _find_first_file_in_path(path, file_name): return None def _get_artifact_path(self, vt): - return "{}.tgz".format( - os.path.join(self.artifact_cache, self.task.stable_name(), self.target.id, vt.cache_key.hash) + return os.path.join( + self.artifact_cache, + CacheFactory.make_task_cache_dirname(self.task), + self.target.id, + '{}.tgz'.format(vt.cache_key.hash), ) def _prepare_task(self, deference, regular_file, regular_file_in_results_dir): diff --git a/tests/python/pants_test/jvm/jvm_tool_task_test_base.py b/tests/python/pants_test/jvm/jvm_tool_task_test_base.py index ddc8d2e74fb..4ab8f8dcd4f 100644 --- a/tests/python/pants_test/jvm/jvm_tool_task_test_base.py +++ b/tests/python/pants_test/jvm/jvm_tool_task_test_base.py @@ -19,7 +19,6 @@ from pants.java.jar.exclude import Exclude from pants.java.jar.jar_dependency import JarDependency from pants.util.dirutil import safe_mkdir - from pants_test.jvm.jvm_task_test_base import JvmTaskTestBase @@ -89,20 +88,13 @@ def setUp(self): Bootstrapper.reset_instance() - def context(self, for_task_types=None, options=None, passthru_args=None, target_roots=None, - console_outstream=None, workspace=None, for_subsystems=None): + def context(self, for_task_types=None, **kwargs): """ :API: public """ # Add in the bootstrapper task type, so its options get registered and set. for_task_types = [self.bootstrap_task_type] + (for_task_types or []) - return super(JvmToolTaskTestBase, self).context(for_task_types=for_task_types, - options=options, - passthru_args=passthru_args, - target_roots=target_roots, - console_outstream=console_outstream, - workspace=workspace, - for_subsystems=for_subsystems) + return super(JvmToolTaskTestBase, self).context(for_task_types=for_task_types, **kwargs) def prepare_execute(self, context): """Prepares a jvm tool-using task for execution, first bootstrapping any required jvm tools. @@ -114,8 +106,8 @@ def prepare_execute(self, context): :returns: The prepared Task instance. """ - task = self.create_task(context) - task.invalidate() + # test_workdir is an @property + workdir = self.test_workdir # Bootstrap the tools needed by the task under test. # We need the bootstrap task's workdir to be under the test's .pants.d, so that it can @@ -123,8 +115,10 @@ def prepare_execute(self, context): self.bootstrap_task_type.get_alternate_target_roots(context.options, self.address_mapper, self.build_graph) - bootstrap_workdir = os.path.join(os.path.dirname(task.workdir), 'bootstrap_jvm_tools') + bootstrap_workdir = os.path.join(os.path.dirname(workdir), 'bootstrap_jvm_tools') self.bootstrap_task_type(context, bootstrap_workdir).execute() + + task = self.create_task(context, workdir) return task def execute(self, context): diff --git a/tests/python/pants_test/option/test_enclosing_scopes.py b/tests/python/pants_test/option/test_enclosing_scopes.py new file mode 100644 index 00000000000..e585f610596 --- /dev/null +++ b/tests/python/pants_test/option/test_enclosing_scopes.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import (absolute_import, division, generators, nested_scopes, print_function, + unicode_literals, with_statement) + +from pants.option.arg_splitter import GLOBAL_SCOPE +from pants.option.parser_hierarchy import InvalidScopeError, all_enclosing_scopes, enclosing_scope +from pants_test.base_test import BaseTest + + +class TestEnclosingScopeTraversal(BaseTest): + def test_enclosing_scope(self): + """The enclosing scope of any non-nested scope should be the global scope, + and the enclosing scope of a nested scope should be the scope without its + final component.""" + self.assertEqual(GLOBAL_SCOPE, enclosing_scope(GLOBAL_SCOPE)) + self.assertEqual(GLOBAL_SCOPE, enclosing_scope('scope')) + self.assertEqual('base', enclosing_scope('base.subscope')) + + def test_all_enclosing_scopes(self): + """`all_enclosing_scopes` should repeatedly apply `enclosing_scope` to any + valid single- or multiple- component scope. `all_enclosing_scopes` should + not yield the global scope if `allow_global=False`.""" + global_closure = list(all_enclosing_scopes(GLOBAL_SCOPE, allow_global=True)) + self.assertEqual(global_closure, [GLOBAL_SCOPE]) + + global_closure_excluded = list(all_enclosing_scopes(GLOBAL_SCOPE, allow_global=False)) + self.assertEqual(global_closure_excluded, []) + + base_scope = 'scope' + base_scope_closure = list(all_enclosing_scopes(base_scope)) + self.assertEqual(base_scope_closure, [base_scope, GLOBAL_SCOPE]) + + subscope = 'subscope' + compound_scope = '{}.{}'.format(base_scope, subscope) + compound_scope_closure = list(all_enclosing_scopes(compound_scope)) + self.assertEqual(compound_scope_closure, [compound_scope, base_scope, GLOBAL_SCOPE]) + + compound_scope_closure_no_global = list(all_enclosing_scopes(compound_scope, allow_global=False)) + self.assertEqual(compound_scope_closure_no_global, [compound_scope, base_scope]) + + def test_valid_invalid_scope(self): + """Scopes with dashes or underscores are treated as a single component, and + scopes with empty components raise an InvalidScopeError.""" + base_dashed_scope = 'base-scope' + self.assertEqual(enclosing_scope(base_dashed_scope), GLOBAL_SCOPE) + + subscope_underscore = 'sub_scope' + self.assertEqual(enclosing_scope(subscope_underscore), GLOBAL_SCOPE) + + compound_scope = '{}.{}'.format(base_dashed_scope, subscope_underscore) + self.assertEqual(enclosing_scope(compound_scope), base_dashed_scope) + self.assertEqual(list(all_enclosing_scopes(compound_scope)), [ + compound_scope, + base_dashed_scope, + GLOBAL_SCOPE, + ]) + + invalid_scope = 'a.b..c.d' + with self.assertRaises(InvalidScopeError): + enclosing_scope(invalid_scope) + with self.assertRaises(InvalidScopeError): + # need to bounce to list to get it to error since this is a generator + list(all_enclosing_scopes(invalid_scope)) diff --git a/tests/python/pants_test/option/util/fakes.py b/tests/python/pants_test/option/util/fakes.py index 42615eb6dd7..7b861abd3d8 100644 --- a/tests/python/pants_test/option/util/fakes.py +++ b/tests/python/pants_test/option/util/fakes.py @@ -109,23 +109,37 @@ def items(self): def scope_to_flags(self): return {} - def get_fingerprintable_for_scope(self, scope, include_passthru=False): + def get_fingerprintable_for_scope(self, bottom_scope, include_passthru=False): + """Returns a list of fingerprintable (option type, option value) pairs for + the given scope. + + Note that this method only collects values for a single scope, NOT from + all enclosing scopes as in the Options class! + + :param str bottom_scope: The scope to gather fingerprintable options for. + :param bool include_passthru: Whether to include passthru args captured by `bottom_scope` in the + fingerprintable options. + """ pairs = [] - if include_passthru and passthru_args: - pairs.extend((str, passthru_arg) for passthru_arg in passthru_args) - option_values = self.for_scope(scope) - pairs.extend((option_type, option_values[option_name]) - for option_name, option_type in fingerprintable[scope].items()) + if include_passthru: + passthru_args = self.passthru_args_for_scope(bottom_scope) + pairs.extend((str, arg) for arg in passthru_args) + + option_values = self.for_scope(bottom_scope) + for option_name, option_type in fingerprintable[bottom_scope].items(): + pairs.append((option_type, option_values[option_name])) return pairs - def __getitem__(self, key): - return self.for_scope(key) + def __getitem__(self, scope): + return self.for_scope(scope) + return FakeOptions() def create_options_for_optionables(optionables, extra_scopes=None, options=None, + options_fingerprintable=None, passthru_args=None): """Create a fake Options object for testing with appropriate defaults for the given optionables. @@ -135,6 +149,9 @@ def create_options_for_optionables(optionables, :param iterable extra_scopes: An optional series of extra known scopes in play. :param dict options: A dict of scope -> (dict of option name -> value) representing option values explicitly set via the command line. + :param dict options_fingerprintable: A dict of scope -> (dict of option name -> option type) + representing the fingerprintable options + and the scopes they are registered for. :param list passthru_args: A list of passthrough args (specified after `--` on the command line). :returns: A fake `Options` object with defaults populated for the given `optionables` and any explicitly set `options` overlayed. @@ -143,6 +160,15 @@ def create_options_for_optionables(optionables, fingerprintable_options = defaultdict(dict) bootstrap_option_values = None + # NB(cosmicexplorer): we do this again for all_options after calling + # register_func below, this is a hack + if options: + for scope, opts in options.items(): + all_options[scope].update(opts) + if options_fingerprintable: + for scope, opts in options_fingerprintable.items(): + fingerprintable_options[scope].update(opts) + def complete_scopes(scopes): """Return all enclosing scopes. @@ -151,7 +177,7 @@ def complete_scopes(scopes): """ completed_scopes = set(scopes) for scope in scopes: - while scope != '': + while scope != GLOBAL_SCOPE: if scope not in completed_scopes: completed_scopes.add(scope) scope = enclosing_scope(scope) diff --git a/tests/python/pants_test/pants_run_integration_test.py b/tests/python/pants_test/pants_run_integration_test.py index a01275de6c5..df659539c15 100644 --- a/tests/python/pants_test/pants_run_integration_test.py +++ b/tests/python/pants_test/pants_run_integration_test.py @@ -6,6 +6,7 @@ unicode_literals, with_statement) import ConfigParser +import glob import os import shutil import unittest @@ -176,6 +177,30 @@ def _get_profile_disambiguator(cls): cls._profile_disambiguator += 1 return ret + def get_cache_subdir(self, cache_dir, subdir_glob='*/', other_dirs=[]): + """Check that there is only one entry of `cache_dir` which matches the glob + specified by `subdir_glob`, excluding `other_dirs`, and + return it. + + :param str cache_dir: absolute path to some directory. + :param str subdir_glob: string specifying a glob for (one level down) + subdirectories of `cache_dir`. + :param list other_dirs: absolute paths to subdirectories of `cache_dir` + which must exist and match `subdir_glob`. + :return: Assert that there is a single remaining directory entry matching + `subdir_glob` after removing `other_dirs`, and return it. + + This method oes not check if its arguments or return values are + files or directories. If `subdir_glob` has a trailing slash, so + will the return value of this method. + """ + subdirs = set(glob.glob(os.path.join(cache_dir, subdir_glob))) + other_dirs = set(other_dirs) + self.assertTrue(other_dirs.issubset(subdirs)) + remaining_dirs = subdirs - other_dirs + self.assertEqual(len(remaining_dirs), 1) + return list(remaining_dirs)[0] + def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, extra_env=None, build_root=None, tee_output=False, **kwargs): diff --git a/tests/python/pants_test/task/test_task.py b/tests/python/pants_test/task/test_task.py index 060b6160056..53815c24a1e 100644 --- a/tests/python/pants_test/task/test_task.py +++ b/tests/python/pants_test/task/test_task.py @@ -11,6 +11,9 @@ from pants.base.exceptions import TaskError from pants.build_graph.files import Files from pants.cache.cache_setup import CacheSetup +from pants.option.arg_splitter import GLOBAL_SCOPE +from pants.subsystem.subsystem import Subsystem +from pants.subsystem.subsystem_client_mixin import SubsystemDependency from pants.task.task import Task from pants.util.dirutil import safe_rmtree from pants_test.tasks.task_test_base import TaskTestBase @@ -55,6 +58,73 @@ def execute(self): return vt, was_valid +class FakeTask(Task): + _impls = [] + + @classmethod + def implementation_version(cls): + return super(FakeTask, cls).implementation_version() + cls._impls + + @classmethod + def supports_passthru_args(cls): + return True + + options_scope = 'fake-task' + + _deps = () + + @classmethod + def subsystem_dependencies(cls): + return super(FakeTask, cls).subsystem_dependencies() + cls._deps + + def execute(self): pass + + +class OtherFakeTask(FakeTask): + _other_impls = [] + + @classmethod + def supports_passthru_args(cls): + return False + + @classmethod + def implementation_version(cls): + return super(OtherFakeTask, cls).implementation_version() + cls._other_impls + + options_scope = 'other-fake-task' + + +class FakeSubsystem(Subsystem): + options_scope = 'fake-subsystem' + + @classmethod + def register_options(cls, register): + super(FakeSubsystem, cls).register_options(register) + register('--fake-option', type=bool) + + +class AnotherFakeTask(Task): + options_scope = 'another-fake-task' + + @classmethod + def supports_passthru_args(cls): + return True + + @classmethod + def subsystem_dependencies(cls): + return super(AnotherFakeTask, cls).subsystem_dependencies() + (FakeSubsystem.scoped(cls),) + + def execute(self): pass + + +class YetAnotherFakeTask(AnotherFakeTask): + options_scope = 'yet-another-fake-task' + + @classmethod + def supports_passthru_args(cls): + return False + + class TaskTest(TaskTestBase): _filename = 'f' @@ -99,6 +169,39 @@ def _create_clean_file(self, target, content): self.create_file(self._filename, content) target.mark_invalidation_hash_dirty() + def _cache_ignore_options(self, globally=False): + return { + 'cache' + ('' if globally else '.' + self.options_scope): { + 'ignore': True + } + } + + def _synthesize_subtype(self, name='A', scope=None, cls=FakeTask, **kwargs): + """Generate a synthesized subtype of `cls`.""" + if scope is None: + scope = cls.options_scope + subclass_name = b'test_{0}_{1}_{2}'.format(cls.__name__, scope, name) + kwargs['options_scope'] = scope + return type(subclass_name, (cls,), kwargs) + + def _instantiate_synthesized_type(self, task_type, **kwargs): + """Generate a new instance of the synthesized type `task_type`.""" + ctx = super(TaskTestBase, self).context(for_task_types=[task_type], **kwargs) + return task_type(ctx, self.test_workdir) + + def _task_type_to_fp(self, task_type, **kwargs): + """Instantiate the `task_type` and return its fingerprint.""" + task_object = self._instantiate_synthesized_type(task_type, **kwargs) + return task_object.fingerprint + + def _synth_fp(self, scope=None, cls=FakeTask, options_fingerprintable=None, **kwargs): + """Synthesize a subtype of `cls`, instantiate it, and take its + fingerprint. `options_fingerprintable` describes the registered options in + their respective scopes which can contribute to the task fingerprint.""" + task_type = self._synthesize_subtype(scope=scope, cls=cls, **kwargs) + return self._task_type_to_fp( + task_type, options_fingerprintable=options_fingerprintable) + def test_revert_after_failure(self): # Regression test to catch the following scenario: # @@ -309,13 +412,6 @@ def test_live_dirs(self): self.assertNotIn(vtB.current_results_dir, vtC_live) self.assertEqual(len(vtC_live), 2) - def _cache_ignore_options(self, globally=False): - return { - 'cache' + ('' if globally else '.' + self.options_scope): { - 'ignore': True - } - } - def test_ignore_global(self): _, vtA, was_valid = self._run_fixture() self.assertFalse(was_valid) @@ -345,3 +441,121 @@ def test_ignore(self): _, vtA, was_valid = self._run_fixture(options=self._cache_ignore_options()) self.assertFalse(was_valid) self.assertFalse(vtA.cacheable) + + def test_fingerprint_identity(self): + """Tasks formed with the same parameters should have the same fingerprint + (smoke test).""" + x = self._synth_fp() + y = self._synth_fp() + self.assertEqual(y, x) + + def test_fingerprint_implementation_version_single(self): + """Tasks with a different implementation_version() should have different + fingerprints.""" + empty_impls = self._synth_fp(_impls=[]) + zero_version = self._synth_fp(_impls=[('asdf', 0)]) + self.assertNotEqual(zero_version, empty_impls) + one_version = self._synth_fp(_impls=[('asdf', 1)]) + self.assertNotEqual(one_version, empty_impls) + alt_name_version = self._synth_fp(_impls=[('xxx', 0)]) + self.assertNotEqual(alt_name_version, zero_version) + zero_one_version = self._synth_fp(_impls=[('asdf', 0), ('asdf', 1)]) + self.assertNotEqual(zero_one_version, zero_version) + self.assertNotEqual(zero_one_version, one_version) + + def test_fingerprint_implementation_version_inheritance(self): + """The implementation_version() of superclasses of the task should affect + the task fingerprint.""" + versioned_fake = self._synth_fp(_impls=[('asdf', 0)]) + base_version_other_fake = self._synth_fp( + cls=OtherFakeTask, + _impls=[('asdf', 0)], + _other_impls=[], + ) + self.assertNotEqual(base_version_other_fake, versioned_fake) + extended_version_other_fake = self._synth_fp( + cls=OtherFakeTask, + _impls=[('asdf', 0)], + _other_impls=[('xxx', 0)], + ) + self.assertNotEqual(extended_version_other_fake, base_version_other_fake) + + extended_version_copy = self._synth_fp( + cls=OtherFakeTask, + _impls=[('asdf', 1)], + _other_impls=[('xxx', 0)], + ) + self.assertNotEqual(extended_version_copy, extended_version_other_fake) + + def test_stable_name(self): + """The stable_name() should be used to form the task fingerprint.""" + a_fingerprint = self._synth_fp(name='some_name', _stable_name='xxx') + b_fingerprint = self._synth_fp(name='some_name', _stable_name='yyy') + self.assertNotEqual(b_fingerprint, a_fingerprint) + + def test_fingerprint_changing_options_scope(self): + """The options_scope of the task and any of its subsystem_dependencies + should affect the task fingerprint.""" + task_fp = self._synth_fp(scope='xxx') + other_task_fp = self._synth_fp(scope='yyy') + self.assertNotEqual(other_task_fp, task_fp) + + subsystem_deps_fp = self._synth_fp(scope='xxx', _deps=(SubsystemDependency(FakeSubsystem, GLOBAL_SCOPE),)) + self.assertNotEqual(subsystem_deps_fp, task_fp) + + scoped_subsystems_fp = self._synth_fp(scope='xxx', _deps=(SubsystemDependency(FakeSubsystem, 'xxx'),)) + self.assertNotEqual(scoped_subsystems_fp, subsystem_deps_fp) + + def test_fingerprint_options_on_registered_scopes_only(self): + """Changing or setting an option value should only affect the task + fingerprint if it is registered as a fingerprintable option.""" + default_fp = self._synth_fp(cls=AnotherFakeTask, options_fingerprintable={}) + self.set_options_for_scope( + AnotherFakeTask.options_scope, **{'fake-option': False}) + unregistered_option_fp = self._synth_fp(cls=AnotherFakeTask, options_fingerprintable={}) + self.assertEqual(unregistered_option_fp, default_fp) + + registered_option_fp = self._synth_fp(cls=AnotherFakeTask, options_fingerprintable={ + AnotherFakeTask.options_scope: {'fake-option': bool}, + }) + self.assertNotEqual(registered_option_fp, default_fp) + + def test_fingerprint_changing_option_value(self): + """Changing an option value in some scope should affect the task + fingerprint.""" + cur_option_spec = { + AnotherFakeTask.options_scope: {'fake-option': bool}, + } + self.set_options_for_scope( + AnotherFakeTask.options_scope, **{'fake-option': False}) + task_opt_false_fp = self._synth_fp(cls=AnotherFakeTask, options_fingerprintable=cur_option_spec) + self.set_options_for_scope( + AnotherFakeTask.options_scope, **{'fake-option': True}) + task_opt_true_fp = self._synth_fp(cls=AnotherFakeTask, options_fingerprintable=cur_option_spec) + self.assertNotEqual(task_opt_true_fp, task_opt_false_fp) + + def test_fingerprint_passthru_args(self): + """Passthrough arguments should affect fingerprints iff the task + supports passthrough args.""" + task_type_base = self._synthesize_subtype(cls=AnotherFakeTask) + empty_passthru_args_fp = self._task_type_to_fp( + task_type_base, + passthru_args=[], + ) + non_empty_passthru_args_fp = self._task_type_to_fp( + task_type_base, + passthru_args=['something'], + ) + self.assertNotEqual(non_empty_passthru_args_fp, empty_passthru_args_fp) + + # YetAnotherFakeTask.supports_passthru_args() returns False + task_type_derived_ignore_passthru = self._synthesize_subtype(cls=YetAnotherFakeTask) + different_task_with_same_opts_fp = self._task_type_to_fp( + task_type_derived_ignore_passthru, + passthru_args=[], + ) + different_task_with_passthru_fp = self._task_type_to_fp( + task_type_derived_ignore_passthru, + passthru_args=['asdf'], + ) + self.assertEqual(different_task_with_passthru_fp, different_task_with_same_opts_fp) diff --git a/tests/python/pants_test/tasks/task_test_base.py b/tests/python/pants_test/tasks/task_test_base.py index 17e12b30c95..fa5bdb4f2f4 100644 --- a/tests/python/pants_test/tasks/task_test_base.py +++ b/tests/python/pants_test/tasks/task_test_base.py @@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) +import glob import os from contextlib import closing from StringIO import StringIO @@ -24,17 +25,22 @@ def is_exe(name): def ensure_cached(task_cls, expected_num_artifacts=None): - """Decorator for a task-executing unit test. Asserts that after running - the decorated test function, the cache for task_cls contains expected_num_artifacts. - Clears the task's cache before running the test. + """Decorator for a task-executing unit test. Asserts that after running the + decorated test function, the cache for task_cls contains + expected_num_artifacts. + + Uses a new temp dir for the artifact cache, and uses a glob based on the + task's synthesized subtype to find the cache directories within the new temp + dir which were generated by the actions performed within the test method. :API: public - :param task_cls: Class of the task to check the artifact cache for. (e.g. JarCreate) - :param expected_num_artifacts: Expected number of artifacts to be in the task's - cache after running the test. If unspecified, will - assert that the number of artifacts in the cache is - non-zero. + :param task_cls: Class of the task to check the artifact cache + for (e.g. JarCreate). + :param expected_num_artifacts: Expected number of artifacts to be in the + task's cache after running the test. If + unspecified, will assert that the number of + artifacts in the cache is non-zero. """ def decorator(test_fn): @@ -42,11 +48,19 @@ def wrapper(self, *args, **kwargs): with temporary_dir() as artifact_cache: self.set_options_for_scope('cache.{}'.format(self.options_scope), write_to=[artifact_cache]) - task_cache = os.path.join(artifact_cache, task_cls.stable_name()) - os.mkdir(task_cache) test_fn(self, *args, **kwargs) + cache_subdir_glob_str = os.path.join(artifact_cache, '*/') + cache_subdirs = glob.glob(cache_subdir_glob_str) + + if expected_num_artifacts == 0: + self.assertEqual(len(cache_subdirs), 0) + return + + self.assertEqual(len(cache_subdirs), 1) + task_cache = cache_subdirs[0] + num_artifacts = 0 for (_, _, files) in os.walk(task_cache): num_artifacts += len(files) @@ -125,26 +139,21 @@ def set_options(self, **kwargs): """ self.set_options_for_scope(self.options_scope, **kwargs) - def context(self, for_task_types=None, options=None, passthru_args=None, target_roots=None, - console_outstream=None, workspace=None, for_subsystems=None): + def context(self, for_task_types=None, **kwargs): """ :API: public """ # Add in our task type. for_task_types = [self._testing_task_type] + (for_task_types or []) - return super(TaskTestBase, self).context(for_task_types=for_task_types, - options=options, - passthru_args=passthru_args, - target_roots=target_roots, - console_outstream=console_outstream, - workspace=workspace, - for_subsystems=for_subsystems) + return super(TaskTestBase, self).context(for_task_types=for_task_types, **kwargs) def create_task(self, context, workdir=None): """ :API: public """ - return self._testing_task_type(context, workdir or self._test_workdir) + if workdir is None: + workdir = self.test_workdir + return self._testing_task_type(context, workdir) class ConsoleTaskTestBase(TaskTestBase):