diff --git a/3rdparty/BUILD b/3rdparty/BUILD index 107553cc58b..435b11fbd59 100644 --- a/3rdparty/BUILD +++ b/3rdparty/BUILD @@ -96,8 +96,13 @@ jar_library(name='slf4j-api', jar_library(name = 'spindle-runtime', jars = [ jar(org = 'com.foursquare', name = 'spindle-runtime_2.10', rev = '3.0.0-M7'), + ], + dependencies = [ + 'contrib/spindle/3rdparty:rogue', ]) +# NB: we have two versions of thrift here due to scrooge requiring one version while apache +# thrift requires another. this is not usually recommended jar_library(name='libthrift-0.9.2', jars = [ jar(org='org.apache.thrift', name='libthrift', rev='0.9.2') @@ -110,7 +115,17 @@ target(name='thrift-0.9.2', ':slf4j-api', ]) -target(name='thrift', dependencies = [ ':thrift-0.9.2' ]) +jar_library(name='libthrift-0.6.1', + jars = [ + jar(org='org.apache.thrift', name='libthrift', rev='0.6.1') + ]) + +target(name='thrift-0.6.1', + dependencies = [ + ':commons-lang', + ':libthrift-0.6.1', + ':slf4j-api', + ]) ############### diff --git a/3rdparty/jvm/com/twitter/BUILD b/3rdparty/jvm/com/twitter/BUILD new file mode 100644 index 00000000000..d75cd618224 --- /dev/null +++ b/3rdparty/jvm/com/twitter/BUILD @@ -0,0 +1,22 @@ +# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +jar_library(name='finagle-thrift', + jars=[ + jar(org='com.twitter', name='finagle-thrift_2.10', rev='6.28.0') + .exclude(org = 'org.apache.thrift', name = 'libthrift'), + ], + dependencies=[ + '3rdparty:thrift-0.6.1', + ], +) + +jar_library(name='scrooge-core', + jars=[ + jar(org='com.twitter', name='scrooge-core_2.10', rev='3.20.0') + .exclude(org = 'org.apache.thrift', name = 'libthrift'), + ], + dependencies=[ + '3rdparty:thrift-0.6.1', + ], +) diff --git a/BUILD.tools b/BUILD.tools index e9a0b9e809c..1979aa4ebb8 100644 --- a/BUILD.tools +++ b/BUILD.tools @@ -32,12 +32,12 @@ jar_library(name = 'scrooge-gen', jars = [ jar(org='com.twitter', name='scrooge-generator_2.10', rev='3.20.0') # scrooge requires libthrift 0.5.0-1 which is not available on - # the default maven repos. Force scrooge to use the lib thirft used + # the default maven repos. Force scrooge to use the lib thrift used # by pants: 3rdparty:thrift .exclude(org = 'org.apache.thrift', name = 'libthrift') ], dependencies = [ - '3rdparty:thrift', + '3rdparty:thrift-0.6.1', ]) jar_library(name = 'scrooge-linter', @@ -46,5 +46,5 @@ jar_library(name = 'scrooge-linter', .exclude(org = 'org.apache.thrift', name = 'libthrift') ], dependencies = [ - '3rdparty:thrift', + '3rdparty:thrift-0.6.1', ]) diff --git a/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/android_generator/BUILD b/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/android_generator/BUILD index 89ea2e715aa..ce09a25ce3c 100644 --- a/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/android_generator/BUILD +++ b/contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/android_generator/BUILD @@ -4,7 +4,7 @@ java_thrift_library( compiler='scrooge', language='android', rpc_style='finagle', - dependencies=['3rdparty:thrift'], + dependencies=['3rdparty:thrift-0.6.1'], provides=artifact( org='org.archimedes', name='android_generator', diff --git a/examples/src/python/example/pants_publish_plugin/extra_test_jar_example.py b/examples/src/python/example/pants_publish_plugin/extra_test_jar_example.py index 0c507f35f19..6336059b7f0 100644 --- a/examples/src/python/example/pants_publish_plugin/extra_test_jar_example.py +++ b/examples/src/python/example/pants_publish_plugin/extra_test_jar_example.py @@ -25,6 +25,7 @@ class ExtraTestJarExample(JarTask): will create an 'example.txt' file, which will be placed in an additional jar. During publishing, this additional jar will be published along with the target. """ + def __init__(self, context, workdir): # Constructor for custom task. Setup things that you need at pants initialization time. super(ExtraTestJarExample, self).__init__(context, workdir) diff --git a/examples/tests/python/example_test/usethriftpy/use_thrift_test.py b/examples/tests/python/example_test/usethriftpy/use_thrift_test.py index 62445e64940..3d73f2315e6 100644 --- a/examples/tests/python/example_test/usethriftpy/use_thrift_test.py +++ b/examples/tests/python/example_test/usethriftpy/use_thrift_test.py @@ -12,8 +12,6 @@ # Illustrate using Thrift-generated code from Python. - - class UseThriftTest(unittest.TestCase): def test_make_it_rain(self): distance = Distance() diff --git a/migrations/options/src/python/migrate_config.py b/migrations/options/src/python/migrate_config.py index a997eace4f5..0b9bab46bb0 100644 --- a/migrations/options/src/python/migrate_config.py +++ b/migrations/options/src/python/migrate_config.py @@ -325,8 +325,22 @@ ('test.junit', 'junit'): None, ('thrift-linter', 'nailgun-server'): None, ('thrift-linter', 'scrooge-linter'): None, + + # Global strategy removal. + ('compile.apt', 'changed-targets-heuristic-limit'): None, + ('compile.apt', 'partition-size-hint'): None, + ('compile.apt', 'strategy'): None, + ('compile.java', 'changed-targets-heuristic-limit'): None, + ('compile.java', 'partition-size-hint'): None, + ('compile.java', 'strategy'): None, + ('compile.zinc', 'changed-targets-heuristic-limit'): None, + ('compile.zinc', 'partition-size-hint'): None, + ('compile.zinc', 'strategy'): None, } +jvm_global_strategy_removal = ('The JVM global compile strategy was removed in favor of the ' + 'isolated strategy, which uses a different set of options.') + ng_daemons_note = ('The global "ng_daemons" option has been replaced by a "use_nailgun" option ' 'local to each task that can use a nailgun. A default can no longer be ' 'specified at intermediate scopes; ie: "compile" when the option is present in ' @@ -499,6 +513,17 @@ ('test.junit', 'junit'): jvm_tool_spec_override, ('thrift-linter', 'nailgun-server'): jvm_tool_spec_override, ('thrift-linter', 'scrooge-linter'): jvm_tool_spec_override, + + # Global strategy removal. + ('compile.apt', 'changed-targets-heuristic-limit'): jvm_global_strategy_removal, + ('compile.apt', 'partition-size-hint'): jvm_global_strategy_removal, + ('compile.apt', 'strategy'): jvm_global_strategy_removal, + ('compile.java', 'changed-targets-heuristic-limit'): jvm_global_strategy_removal, + ('compile.java', 'partition-size-hint'): jvm_global_strategy_removal, + ('compile.java', 'strategy'): jvm_global_strategy_removal, + ('compile.zinc', 'changed-targets-heuristic-limit'): jvm_global_strategy_removal, + ('compile.zinc', 'partition-size-hint'): jvm_global_strategy_removal, + ('compile.zinc', 'strategy'): jvm_global_strategy_removal, } diff --git a/pants.ini b/pants.ini index af8ee53c724..25f6b9e0909 100644 --- a/pants.ini +++ b/pants.ini @@ -74,9 +74,36 @@ ivy_settings: %(pants_supportdir)s/ivy/ivysettings.xml ivy_profile: %(pants_supportdir)s/ivy/ivy.xml +[gen.scrooge] +service_deps: { + 'java': [ + '//:scala-library', + '3rdparty:slf4j-api', + '3rdparty:thrift-0.6.1', + '3rdparty/jvm/com/twitter:finagle-thrift', + '3rdparty/jvm/com/twitter:scrooge-core', + ], + 'scala': [ + '3rdparty:thrift-0.6.1', + '3rdparty/jvm/com/twitter:finagle-thrift', + '3rdparty/jvm/com/twitter:scrooge-core', + ], + } +structs_deps: { + 'java': [ + '3rdparty:thrift-0.6.1', + '3rdparty/jvm/com/twitter:scrooge-core', + ], + 'scala': [ + '3rdparty:thrift-0.6.1', + '3rdparty/jvm/com/twitter:scrooge-core', + ], + } + + [gen.thrift] gen_options: hashcode -deps: ["3rdparty:thrift"] +deps: ["3rdparty:thrift-0.9.2"] [compile.checkstyle] @@ -116,14 +143,28 @@ skip: True [pycheck-except-statement] skip: True -[compile.java] -partition_size_hint: 1000000000 -compiler-bootstrap-tools: ["//:java-compiler"] -jvm_options: ["-Xmx2G"] - [compile.zinc] -jvm_options: ["-Xmx2g", "-XX:MaxPermSize=256m", "-Dzinc.analysis.cache.limit=0"] +worker_count: 4 +jvm_options: [ + '-Xmx4g', '-XX:MaxPermSize=512m', '-XX:+UseConcMarkSweepGC', '-XX:ParallelGCThreads=4', + # bigger cache size for our big projects (default is just 5) + '-Dzinc.analysis.cache.limit=1000', + ] + +args: [ + '-S-encoding', '-SUTF-8', + '-S-g:vars', + ] +warning_args: [ + '-S-deprecation', + '-S-unchecked', + # request warnings for http://www.scala-lang.org/api/2.10.4/index.html#scala.language$ + '-S-feature', + ] +no_warning_args: [ + '-S-nowarn', + ] [jvm.run.jvm] diff --git a/pants.ini.isolated b/pants.ini.isolated deleted file mode 100644 index 2ab83f29f33..00000000000 --- a/pants.ini.isolated +++ /dev/null @@ -1,33 +0,0 @@ -# An override config example that uses the `isolated` compile strategy for JVM languages. -# ./pants --config-override=pants.ini.isolated - -[compile.apt] -strategy: isolated - - -[compile.java] -use_jmake: False - - -[compile.zinc] -strategy: isolated -worker_count: 4 -jvm_options: [ - '-Xmx4g', '-XX:MaxPermSize=512m', '-XX:+UseConcMarkSweepGC', '-XX:ParallelGCThreads=4', - # bigger cache size for our big projects (default is just 5) - '-Dzinc.analysis.cache.limit=1000', - ] - -args: [ - '-S-encoding', '-SUTF-8', - '-S-g:vars', - ] -warning_args: [ - '-S-deprecation', - '-S-unchecked', - # request warnings for http://www.scala-lang.org/api/2.10.4/index.html#scala.language$ - '-S-feature', - ] -no_warning_args: [ - '-S-nowarn', - ] diff --git a/src/python/pants/backend/codegen/tasks/BUILD b/src/python/pants/backend/codegen/tasks/BUILD index 5cf977748e5..35ac6fe229c 100644 --- a/src/python/pants/backend/codegen/tasks/BUILD +++ b/src/python/pants/backend/codegen/tasks/BUILD @@ -132,6 +132,7 @@ python_library( 'src/python/pants/base:address', 'src/python/pants/base:address_lookup_error', 'src/python/pants/base:build_environment', + 'src/python/pants/base:dep_lookup_error', 'src/python/pants/base:workunit', 'src/python/pants/util:memo', ], diff --git a/src/python/pants/backend/codegen/tasks/simple_codegen_task.py b/src/python/pants/backend/codegen/tasks/simple_codegen_task.py index cf8c332248a..b23d06608e1 100644 --- a/src/python/pants/backend/codegen/tasks/simple_codegen_task.py +++ b/src/python/pants/backend/codegen/tasks/simple_codegen_task.py @@ -16,6 +16,7 @@ from pants.base.address_lookup_error import AddressLookupError from pants.base.build_environment import get_buildroot from pants.base.build_graph import sort_targets +from pants.base.dep_lookup_error import DepLookupError from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.util.dirutil import safe_rmtree, safe_walk @@ -284,7 +285,7 @@ def resolve_deps(self, unresolved_deps): try: deps.update(self.context.resolve(dep)) except AddressLookupError as e: - raise self.DepLookupError('{message}\n on dependency {dep}'.format(message=e, dep=dep)) + raise DepLookupError('{message}\n on dependency {dep}'.format(message=e, dep=dep)) return deps class CodegenStrategy(AbstractClass): diff --git a/src/python/pants/backend/jvm/subsystems/BUILD b/src/python/pants/backend/jvm/subsystems/BUILD index f7865eefe53..39c4693c4e2 100644 --- a/src/python/pants/backend/jvm/subsystems/BUILD +++ b/src/python/pants/backend/jvm/subsystems/BUILD @@ -5,7 +5,7 @@ python_library( name = 'jvm_tool_mixin', sources = ['jvm_tool_mixin.py'], dependencies = [ - 'src/python/pants/base:address_lookup_error', + 'src/python/pants/base:dep_lookup_error', 'src/python/pants/base:exceptions', ], ) diff --git a/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py b/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py index 2c14d17f085..be7cc60853a 100644 --- a/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py +++ b/src/python/pants/backend/jvm/subsystems/jvm_tool_mixin.py @@ -8,7 +8,7 @@ from collections import namedtuple from textwrap import dedent -from pants.base.address_lookup_error import AddressLookupError +from pants.base.dep_lookup_error import DepLookupError from pants.base.exceptions import TaskError @@ -17,10 +17,6 @@ class JvmToolMixin(object): Must be mixed in to something that can register and use options, e.g., a Task or a Subsystem. """ - class DepLookupError(AddressLookupError): - """Thrown when a dependency can't be found.""" - pass - class InvalidToolClasspath(TaskError): """Indicates an invalid jvm tool classpath.""" diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/BUILD b/src/python/pants/backend/jvm/tasks/jvm_compile/BUILD index b459fd3bd0a..86e2a494add 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/BUILD +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/BUILD @@ -66,54 +66,25 @@ python_library( python_library( name = 'jvm_compile', sources = ['jvm_compile.py'], - dependencies = [ - ':jvm_compile_global_strategy', - ':jvm_compile_isolated_strategy', - 'src/python/pants/backend/core/tasks:group_task', - 'src/python/pants/backend/jvm/subsystems:jvm_platform', - 'src/python/pants/backend/jvm/tasks:nailgun_task', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:fingerprint_strategy', - 'src/python/pants/base:workunit', - 'src/python/pants/goal:products', - 'src/python/pants/option', - 'src/python/pants/reporting', - ], -) - -python_library( - name = 'jvm_compile_global_strategy', - sources = ['jvm_compile_global_strategy.py'], dependencies = [ '3rdparty/python/twitter/commons:twitter.common.collections', ':compile_context', - ':jvm_compile_strategy', + ':execution_graph', ':resource_mapping', - 'src/python/pants/backend/jvm/targets:jvm', + 'src/python/pants/backend/core/tasks:group_task', + 'src/python/pants/backend/jvm/subsystems:jvm_platform', 'src/python/pants/backend/jvm/tasks:classpath_util', + 'src/python/pants/backend/jvm/tasks:nailgun_task', 'src/python/pants/base:build_environment', 'src/python/pants/base:exceptions', + 'src/python/pants/base:fingerprint_strategy', 'src/python/pants/base:target', 'src/python/pants/base:worker_pool', + 'src/python/pants/goal:products', 'src/python/pants/option', + 'src/python/pants/reporting', 'src/python/pants/util:contextutil', 'src/python/pants/util:dirutil', - ], -) - -python_library( - name = 'jvm_compile_isolated_strategy', - sources = ['jvm_compile_isolated_strategy.py'], - dependencies = [ - ':compile_context', - ':execution_graph', - ':jvm_compile_strategy', - ':resource_mapping', - 'src/python/pants/backend/jvm/tasks:classpath_util', - 'src/python/pants/base:build_environment', - 'src/python/pants/base:target', - 'src/python/pants/base:worker_pool', - 'src/python/pants/util:dirutil', 'src/python/pants/util:fileutil', ], ) @@ -126,18 +97,6 @@ python_library( ], ) -python_library( - name = 'jvm_compile_strategy', - sources = ['jvm_compile_strategy.py'], - dependencies = [ - '3rdparty/python/twitter/commons:twitter.common.collections', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:build_environment', - 'src/python/pants/util:dirutil', - 'src/python/pants/util:contextutil', - ], -) - python_library( name = 'resource_mapping', sources = ['resource_mapping.py'], diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/compile_context.py b/src/python/pants/backend/jvm/tasks/jvm_compile/compile_context.py index 92aa5559595..cd7549c38ec 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/compile_context.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/compile_context.py @@ -18,12 +18,18 @@ class CompileContext(object): and a finalized compile in its permanent location. """ - def __init__(self, target, analysis_file, classes_dir, sources): + def __init__(self, target, analysis_file, classes_dir, jar_file, sources): self.target = target self.analysis_file = analysis_file self.classes_dir = classes_dir + self.jar_file = jar_file self.sources = sources + @contextmanager + def open_jar(self, mode): + with open_zip(self.jar_file, mode=mode, compression=zipfile.ZIP_STORED) as jar: + yield jar + @property def _id(self): return (self.target, self.analysis_file, self.classes_dir) @@ -36,16 +42,3 @@ def __ne__(self, other): def __hash__(self): return hash(self._id) - - -class IsolatedCompileContext(CompileContext): - """Extends CompileContext to add a jar location.""" - - def __init__(self, target, analysis_file, classes_dir, jar_file, sources): - super(IsolatedCompileContext, self).__init__(target, analysis_file, classes_dir, sources) - self.jar_file = jar_file - - @contextmanager - def open_jar(self, mode): - with open_zip(self.jar_file, mode=mode, compression=zipfile.ZIP_STORED) as jar: - yield jar diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py index 50a884795a9..fe05e489fd7 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/java/java_compile.py @@ -74,7 +74,7 @@ def get_no_warning_args_default(cls): @classmethod def register_options(cls, register): super(JmakeCompile, cls).register_options(register) - register('--use-jmake', advanced=True, action='store_true', default=True, + register('--use-jmake', advanced=True, action='store_true', default=False, fingerprint=True, help='Use jmake to compile Java targets') cls.register_jvm_tool(register, diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py index b36dec04965..56db9aff5a3 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py @@ -5,24 +5,49 @@ from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) -import hashlib +import functools import itertools +import os +import shutil import sys -from collections import defaultdict +from collections import OrderedDict, defaultdict +from hashlib import sha1 from pants.backend.core.tasks.group_task import GroupMember from pants.backend.jvm.subsystems.jvm_platform import JvmPlatform from pants.backend.jvm.targets.jar_library import JarLibrary -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_global_strategy import JvmCompileGlobalStrategy -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_isolated_strategy import \ - JvmCompileIsolatedStrategy +from pants.backend.jvm.tasks.classpath_util import ClasspathUtil +from pants.backend.jvm.tasks.jvm_compile.compile_context import CompileContext +from pants.backend.jvm.tasks.jvm_compile.execution_graph import (ExecutionFailure, ExecutionGraph, + Job) +from pants.backend.jvm.tasks.jvm_compile.resource_mapping import ResourceMapping from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase +from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.fingerprint_strategy import TaskIdentityFingerprintStrategy +from pants.base.worker_pool import Work, WorkerPool from pants.base.workunit import WorkUnitLabel from pants.goal.products import MultipleRootedProducts from pants.option.custom_types import list_option from pants.reporting.reporting_utils import items_to_report_element +from pants.util.dirutil import fast_relpath, safe_mkdir, safe_rmtree, safe_walk +from pants.util.fileutil import atomic_copy, create_size_estimators + + +class CacheHitCallback(object): + """A serializable cache hit callback that cleans the class directory prior to cache extraction. + + This class holds onto class directories rather than CompileContexts because CompileContexts + aren't picklable. + """ + + def __init__(self, cache_key_to_class_dir): + self._key_to_classes_dir = cache_key_to_class_dir + + def __call__(self, cache_key): + class_dir = self._key_to_classes_dir.get(cache_key) + if class_dir: + safe_mkdir(class_dir, clean=True) class ResolvedJarAwareTaskIdentityFingerprintStrategy(TaskIdentityFingerprintStrategy): @@ -61,14 +86,23 @@ class JvmCompile(NailgunTaskBase, GroupMember): mentioned below under "Subclasses must implement". """ + size_estimators = create_size_estimators() + + @classmethod + def size_estimator_by_name(cls, estimation_strategy_name): + return cls.size_estimators[estimation_strategy_name] + + @staticmethod + def _analysis_for_target(analysis_dir, target): + return os.path.join(analysis_dir, target.id + '.analysis') + + @staticmethod + def _portable_analysis_for_target(analysis_dir, target): + return JvmCompile._analysis_for_target(analysis_dir, target) + '.portable' + @classmethod def register_options(cls, register): super(JvmCompile, cls).register_options(register) - register('--partition-size-hint', advanced=True, type=int, default=sys.maxint, - metavar='<# source files>', - help='Roughly how many source files to attempt to compile together. Set to a large ' - 'number to compile all sources together. Set to 0 to compile target-by-target.') - register('--jvm-options', advanced=True, type=list_option, default=[], help='Run the compiler with these JVM options.') @@ -96,17 +130,20 @@ def register_options(cls, register): default=list(cls.get_no_warning_args_default()), help='Extra compiler args to use when warnings are disabled.') - register('--strategy', advanced=True, choices=['global', 'isolated'], default='global', - fingerprint=True, - help='Selects the compilation strategy to use. The "global" strategy uses a shared ' - 'global classpath for all compiled classes, and the "isolated" strategy uses ' - 'per-target classpaths.') - register('--delete-scratch', advanced=True, default=True, action='store_true', help='Leave intermediate scratch files around, for debugging build problems.') - JvmCompileGlobalStrategy.register_options(register, cls._name, cls._supports_concurrent_execution) - JvmCompileIsolatedStrategy.register_options(register, cls._name, cls._supports_concurrent_execution) + register('--worker-count', advanced=True, type=int, default=1, + help='The number of concurrent workers to use when ' + 'compiling with {task}.'.format(task=cls._name)) + + register('--size-estimator', advanced=True, + choices=list(cls.size_estimators.keys()), default='filesize', + help='The method of target size estimation.') + + register('--capture-log', advanced=True, action='store_true', default=False, + fingerprint=True, + help='Capture compilation output to per-target logs.') @classmethod def product_types(cls): @@ -175,7 +212,7 @@ def select(self, target): return target.has_sources(self._file_suffix) def select_source(self, source_file_path): - """Source predicate for the strategy.""" + """Source predicate for this task.""" return source_file_path.endswith(self._file_suffix) def create_analysis_tools(self): @@ -243,28 +280,67 @@ def __init__(self, *args, **kwargs): # The ivy confs for which we're building. self._confs = self.get_options().confs - # The compile strategy to use for analysis and classfile placement. - if self.get_options().strategy == 'global': - strategy_constructor = JvmCompileGlobalStrategy - else: - assert self.get_options().strategy == 'isolated' - strategy_constructor = JvmCompileIsolatedStrategy - self._strategy = strategy_constructor(self.context, - self.get_options(), - self.workdir, - self.create_analysis_tools(), - self._name, - self.select_source) # Maps CompileContext --> dict of upstream class to paths. self._upstream_class_to_paths = {} + # Mapping of relevant (as selected by the predicate) sources by target. + self._sources_by_target = None + self._sources_predicate = self.select_source + + # Various working directories. + self._analysis_dir = os.path.join(self.workdir, 'isolated-analysis') + self._classes_dir = os.path.join(self.workdir, 'isolated-classes') + self._logs_dir = os.path.join(self.workdir, 'isolated-logs') + self._jars_dir = os.path.join(self.workdir, 'jars') + + self._capture_log = self.get_options().capture_log + self._delete_scratch = self.get_options().delete_scratch + self._clear_invalid_analysis = self.get_options().clear_invalid_analysis + + try: + worker_count = self.get_options().worker_count + except AttributeError: + # tasks that don't support concurrent execution have no worker_count registered + worker_count = 1 + self._worker_count = worker_count + + self._size_estimator = self.size_estimator_by_name(self.get_options().size_estimator) + + self._worker_pool = None + + self._analysis_tools = self.create_analysis_tools() + + @property + def _analysis_parser(self): + return self._analysis_tools.parser + def _fingerprint_strategy(self, classpath_products): return ResolvedJarAwareTaskIdentityFingerprintStrategy(self, classpath_products) + def ensure_analysis_tmpdir(self): + """Work in a tmpdir so we don't stomp the main analysis files on error. + + A temporary, but well-known, dir in which to munge analysis/dependency files in before + caching. It must be well-known so we know where to find the files when we retrieve them from + the cache. The tmpdir is cleaned up in a shutdown hook, because background work + may need to access files we create there even after this method returns + :return: path of temporary analysis directory + """ + analysis_tmpdir = os.path.join(self._workdir, 'analysis_tmpdir') + if self._delete_scratch: + self.context.background_worker_pool().add_shutdown_hook( + lambda: safe_rmtree(analysis_tmpdir)) + safe_mkdir(analysis_tmpdir) + return analysis_tmpdir + def pre_execute(self): # Only create these working dirs during execution phase, otherwise, they # would be wiped out by clean-all goal/task if it's specified. - self._strategy.pre_compile() + self.analysis_tmpdir = self.ensure_analysis_tmpdir() + safe_mkdir(self._analysis_dir) + safe_mkdir(self._classes_dir) + safe_mkdir(self._logs_dir) + safe_mkdir(self._jars_dir) # TODO(John Sirois): Ensuring requested product maps are available - if empty - should probably # be lifted to Task infra. @@ -273,13 +349,43 @@ def pre_execute(self): self._create_empty_products() def prepare_execute(self, chunks): - targets_in_chunks = list(itertools.chain(*chunks)) - - classpath_product = self.context.products.get_data('compile_classpath') - # Invoke the strategy's prepare_compile to prune analysis. - cache_manager = self.create_cache_manager(invalidate_dependents=True, - fingerprint_strategy=self._fingerprint_strategy(classpath_product)) - self._strategy.prepare_compile(cache_manager, self.context.targets(), targets_in_chunks) + relevant_targets = list(itertools.chain(*chunks)) + + # Target -> sources (relative to buildroot). + # TODO(benjy): Should sources_by_target be available in all Tasks? + self._sources_by_target = self._compute_sources_by_target(relevant_targets) + + # Update the classpath by adding relevant target's classes directories to its classpath. + compile_classpaths = self.context.products.get_data('compile_classpath') + + with self.context.new_workunit('validate-{}-analysis'.format(self._name)): + for target in relevant_targets: + cc = self.compile_context(target) + safe_mkdir(cc.classes_dir) + compile_classpaths.add_for_target(target, [(conf, cc.classes_dir) for conf in self._confs]) + self.validate_analysis(cc.analysis_file) + + # This ensures the workunit for the worker pool is set + with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self._name)) \ + as workunit: + # This uses workunit.parent as the WorkerPool's parent so that child workunits + # of different pools will show up in order in the html output. This way the current running + # workunit is on the bottom of the page rather than possibly in the middle. + self._worker_pool = WorkerPool(workunit.parent, + self.context.run_tracker, + self._worker_count) + + def compile_context(self, target): + analysis_file = JvmCompile._analysis_for_target(self._analysis_dir, target) + classes_dir = os.path.join(self._classes_dir, target.id) + # Generate a short unique path for the jar to allow for shorter classpaths. + # TODO: likely unnecessary after https://github.com/pantsbuild/pants/issues/1988 + jar_file = os.path.join(self._jars_dir, '{}.jar'.format(sha1(target.id).hexdigest()[:12])) + return CompileContext(target, + analysis_file, + classes_dir, + jar_file, + self._sources_for_target(target)) def execute_chunk(self, relevant_targets): if not relevant_targets: @@ -289,7 +395,7 @@ def execute_chunk(self, relevant_targets): fingerprint_strategy = self._fingerprint_strategy(classpath_product) # Invalidation check. Everything inside the with block must succeed for the # invalid targets to become valid. - partition_size_hint, locally_changed_targets = self._strategy.invalidation_hints(relevant_targets) + partition_size_hint, locally_changed_targets = (0, None) with self.invalidated(relevant_targets, invalidate_dependents=True, partition_size_hint=partition_size_hint, @@ -303,30 +409,73 @@ def execute_chunk(self, relevant_targets): # Register products for all the valid targets. # We register as we go, so dependency checking code can use this data. valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid] - valid_compile_contexts = [self._strategy.compile_context(t) for t in valid_targets] + valid_compile_contexts = [self.compile_context(t) for t in valid_targets] self._register_vts(valid_compile_contexts) - # Invoke the strategy to execute compilations for invalid targets. + # Execute compilations for invalid targets. check_vts = (self.check_artifact_cache if self.artifact_cache_reads_enabled() else None) update_artifact_cache_vts_work = (self.get_update_artifact_cache_work if self.artifact_cache_writes_enabled() else None) - self._strategy.compile_chunk(invalidation_check, - self.context.targets(), - relevant_targets, + self.compile_chunk(invalidation_check, + self.context.targets(), + relevant_targets, + invalid_targets, + self.extra_compile_time_classpath_elements(), + check_vts, + self._compile_vts, + self._register_vts, + update_artifact_cache_vts_work) + else: + # Nothing to build. Register products for all the targets in one go. + self._register_vts([self.compile_context(t) for t in relevant_targets]) + + def compile_chunk(self, + invalidation_check, + all_targets, + relevant_targets, + invalid_targets, + extra_compile_time_classpath_elements, + check_vts, + compile_vts, + register_vts, + update_artifact_cache_vts_work): + """Executes compilations for the invalid targets contained in a single chunk.""" + assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets." + # Get the classpath generated by upstream JVM tasks and our own prepare_compile(). + compile_classpaths = self.context.products.get_data('compile_classpath') + + extra_compile_time_classpath = self._compute_extra_classpath( + extra_compile_time_classpath_elements) + + compile_contexts = self._create_compile_contexts_for_targets(all_targets) + + # Now create compile jobs for each invalid target one by one. + jobs = self._create_compile_jobs(compile_classpaths, + compile_contexts, + extra_compile_time_classpath, invalid_targets, - self.extra_compile_time_classpath_elements(), + invalidation_check.invalid_vts_partitioned, check_vts, - self._compile_vts, - self._register_vts, + compile_vts, + register_vts, update_artifact_cache_vts_work) - else: - # Nothing to build. Register products for all the targets in one go. - self._register_vts([self._strategy.compile_context(t) for t in relevant_targets]) + + exec_graph = ExecutionGraph(jobs) + try: + exec_graph.execute(self._worker_pool, self.context.log) + except ExecutionFailure as e: + raise TaskError("Compilation failure: {}".format(e)) def finalize_execute(self, chunks): - targets_in_chunks = list(itertools.chain(*chunks)) - self._strategy.finalize_compile(targets_in_chunks) + targets = list(itertools.chain(*chunks)) + # Replace the classpath entry for each target with its jar'd representation. + compile_classpaths = self.context.products.get_data('compile_classpath') + for target in targets: + cc = self.compile_context(target) + for conf in self._confs: + compile_classpaths.remove_for_target(target, [(conf, cc.classes_dir)]) + compile_classpaths.add_for_target(target, [(conf, cc.jar_file)]) def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath, outdir, log_file, progress_message, settings): @@ -365,11 +514,33 @@ def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath log_file, settings) def check_artifact_cache(self, vts): - post_process_cached_vts = lambda cvts: self._strategy.post_process_cached_vts(cvts) - cache_hit_callback=self._strategy.create_cache_hit_callback(vts) - return self.do_check_artifact_cache(vts, post_process_cached_vts=post_process_cached_vts, + post_process_cached_vts = lambda cvts: self.post_process_cached_vts(cvts) + cache_hit_callback = self.create_cache_hit_callback(vts) + return self.do_check_artifact_cache(vts, + post_process_cached_vts=post_process_cached_vts, cache_hit_callback=cache_hit_callback) + def create_cache_hit_callback(self, vts): + cache_key_to_classes_dir = {v.cache_key: self.compile_context(v.target).classes_dir + for v in vts} + return CacheHitCallback(cache_key_to_classes_dir) + + def post_process_cached_vts(self, cached_vts): + """Localizes the fetched analysis for targets we found in the cache. + + This is the complement of `_write_to_artifact_cache`. + """ + compile_contexts = [] + for vt in cached_vts: + for target in vt.targets: + compile_contexts.append(self.compile_context(target)) + + for compile_context in compile_contexts: + portable_analysis_file = JvmCompile._portable_analysis_for_target( + self._analysis_dir, compile_context.target) + if os.path.exists(portable_analysis_file): + self._analysis_tools.localize(portable_analysis_file, compile_context.analysis_file) + def _create_empty_products(self): make_products = lambda: defaultdict(MultipleRootedProducts) if self.context.products.is_required_data('classes_by_source'): @@ -384,6 +555,48 @@ def _create_empty_products(self): self.context.products.safe_create_data('product_deps_by_src', dict) + def compute_classes_by_source(self, compile_contexts): + """Compute a map of (context->(src->classes)) for the given compile_contexts. + + It's possible (although unfortunate) for multiple targets to own the same sources, hence + the top level division. Srcs are relative to buildroot. Classes are absolute paths. + + Returning classes with 'None' as their src indicates that the compiler analysis indicated + that they were un-owned. This case is triggered when annotation processors generate + classes (or due to bugs in classfile tracking in zinc/jmake.) + """ + buildroot = get_buildroot() + # Build a mapping of srcs to classes for each context. + classes_by_src_by_context = defaultdict(dict) + for compile_context in compile_contexts: + # Walk the context's jar to build a set of unclaimed classfiles. + unclaimed_classes = set() + with compile_context.open_jar(mode='r') as jar: + for name in jar.namelist(): + if not name.endswith('/'): + unclaimed_classes.add(os.path.join(compile_context.classes_dir, name)) + + # Grab the analysis' view of which classfiles were generated. + classes_by_src = classes_by_src_by_context[compile_context] + if os.path.exists(compile_context.analysis_file): + products = self._analysis_parser.parse_products_from_path(compile_context.analysis_file, + compile_context.classes_dir) + for src, classes in products.items(): + relsrc = os.path.relpath(src, buildroot) + classes_by_src[relsrc] = classes + unclaimed_classes.difference_update(classes) + + # Any remaining classfiles were unclaimed by sources/analysis. + classes_by_src[None] = list(unclaimed_classes) + return classes_by_src_by_context + + def class_name_for_class_file(self, compile_context, class_file_name): + if not class_file_name.endswith(".class"): + return None + assert class_file_name.startswith(compile_context.classes_dir) + class_file_name = class_file_name[len(compile_context.classes_dir) + 1:-len(".class")] + return class_file_name.replace("/", ".") + def _register_vts(self, compile_contexts): classes_by_source = self.context.products.get_data('classes_by_source') classes_by_target = self.context.products.get_data('classes_by_target') @@ -392,9 +605,9 @@ def _register_vts(self, compile_contexts): product_deps_by_src = self.context.products.get_data('product_deps_by_src') # Register class products (and resources generated by annotation processors.) - computed_classes_by_source_by_context = self._strategy.compute_classes_by_source( + computed_classes_by_source_by_context = self.compute_classes_by_source( compile_contexts) - resource_mapping = self._strategy.compute_resource_mapping(compile_contexts) + resource_mapping = ResourceMapping(self._classes_dir) for compile_context in compile_contexts: computed_classes_by_source = computed_classes_by_source_by_context[compile_context] target = compile_context.target @@ -402,7 +615,7 @@ def _register_vts(self, compile_contexts): def add_products_by_target(files): for f in files: - clsname = self._strategy.class_name_for_class_file(compile_context, f) + clsname = self.class_name_for_class_file(compile_context, f) if clsname: # Is a class. classes_by_target[target].add_abs_paths(classes_dir, [f]) @@ -444,4 +657,251 @@ def add_products_by_target(files): compile_classpath.add_for_target(compile_context.target, entries) if self.context.products.is_required_data('product_deps_by_src'): - product_deps_by_src[compile_context.target] = self._strategy.parse_deps(compile_context.analysis_file) + product_deps_by_src[compile_context.target] = \ + self._analysis_parser.parse_deps_from_path(compile_context.analysis_file) + + def _create_compile_contexts_for_targets(self, targets): + compile_contexts = OrderedDict() + for target in targets: + compile_context = self.compile_context(target) + compile_contexts[target] = compile_context + return compile_contexts + + def _compute_classpath_entries(self, compile_classpaths, + target_closure, + compile_context, + extra_compile_time_classpath): + # Generate a classpath specific to this compile and target. + return ClasspathUtil.compute_classpath_for_target(compile_context.target, compile_classpaths, + extra_compile_time_classpath, self._confs, + target_closure) + + def _upstream_analysis(self, compile_contexts, classpath_entries): + """Returns tuples of classes_dir->analysis_file for the closure of the target.""" + # Reorganize the compile_contexts by class directory. + compile_contexts_by_directory = {} + for compile_context in compile_contexts.values(): + compile_contexts_by_directory[compile_context.classes_dir] = compile_context + # If we have a compile context for the target, include it. + for entry in classpath_entries: + if not entry.endswith('.jar'): + compile_context = compile_contexts_by_directory.get(entry) + if not compile_context: + self.context.log.debug('Missing upstream analysis for {}'.format(entry)) + else: + yield compile_context.classes_dir, compile_context.analysis_file + + def _capture_log_file(self, target): + if self._capture_log: + return os.path.join(self._logs_dir, "{}.log".format(target.id)) + return None + + def exec_graph_key_for_target(self, compile_target): + return "compile({})".format(compile_target.address.spec) + + def _create_compile_jobs(self, compile_classpaths, compile_contexts, extra_compile_time_classpath, + invalid_targets, invalid_vts_partitioned, check_vts, compile_vts, + register_vts, update_artifact_cache_vts_work): + def check_cache(vts): + """Manually checks the artifact cache (usually immediately before compilation.) + + Returns true if the cache was hit successfully, indicating that no compilation is necessary. + """ + if not check_vts: + return False + cached_vts, uncached_vts = check_vts([vts]) + if not cached_vts: + self.context.log.debug('Missed cache during double check for {}'.format(vts.target.address.spec)) + return False + assert cached_vts == [vts], ( + 'Cache returned unexpected target: {} vs {}'.format(cached_vts, [vts]) + ) + self.context.log.info('Hit cache during double check for {}'.format(vts.target.address.spec)) + return True + + def work_for_vts(vts, compile_context, target_closure): + progress_message = compile_context.target.address.spec + cp_entries = self._compute_classpath_entries(compile_classpaths, + target_closure, + compile_context, + extra_compile_time_classpath) + + upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) + + # Capture a compilation log if requested. + log_file = self._capture_log_file(compile_context.target) + + # Double check the cache before beginning compilation + if not check_cache(vts): + # Mutate analysis within a temporary directory, and move it to the final location + # on success. + tmpdir = os.path.join(self.analysis_tmpdir, compile_context.target.id) + safe_mkdir(tmpdir) + tmp_analysis_file = self._analysis_for_target( + tmpdir, compile_context.target) + if os.path.exists(compile_context.analysis_file): + shutil.copy(compile_context.analysis_file, tmp_analysis_file) + target, = vts.targets + compile_vts(vts, + compile_context.sources, + tmp_analysis_file, + upstream_analysis, + cp_entries, + compile_context.classes_dir, + log_file, + progress_message, + target.platform) + atomic_copy(tmp_analysis_file, compile_context.analysis_file) + + # Jar the compiled output. + self._create_context_jar(compile_context) + + # Update the products with the latest classes. + register_vts([compile_context]) + + # Kick off the background artifact cache write. + if update_artifact_cache_vts_work: + self._write_to_artifact_cache(vts, compile_context, update_artifact_cache_vts_work) + + jobs = [] + invalid_target_set = set(invalid_targets) + for vts in invalid_vts_partitioned: + assert len(vts.targets) == 1, ("Requested one target per partition, got {}".format(vts)) + + # Invalidated targets are a subset of relevant targets: get the context for this one. + compile_target = vts.targets[0] + compile_context = compile_contexts[compile_target] + compile_target_closure = compile_target.closure() + + # dependencies of the current target which are invalid for this chunk + invalid_dependencies = (compile_target_closure & invalid_target_set) - [compile_target] + + jobs.append(Job(self.exec_graph_key_for_target(compile_target), + functools.partial(work_for_vts, vts, compile_context, compile_target_closure), + [self.exec_graph_key_for_target(target) for target in invalid_dependencies], + self._size_estimator(compile_context.sources), + # If compilation and analysis work succeeds, validate the vts. + # Otherwise, fail it. + on_success=vts.update, + on_failure=vts.force_invalidate)) + return jobs + + def _create_context_jar(self, compile_context): + """Jar up the compile_context to its output jar location. + + TODO(stuhood): In the medium term, we hope to add compiler support for this step, which would + allow the jars to be used as compile _inputs_ as well. Currently using jar'd compile outputs as + compile inputs would make the compiler's analysis useless. + see https://github.com/twitter-forks/sbt/tree/stuhood/output-jars + """ + root = compile_context.classes_dir + with compile_context.open_jar(mode='w') as jar: + for abs_sub_dir, dirnames, filenames in safe_walk(root): + for name in dirnames + filenames: + abs_filename = os.path.join(abs_sub_dir, name) + arcname = fast_relpath(abs_filename, root) + jar.write(abs_filename, arcname) + + def _write_to_artifact_cache(self, vts, compile_context, get_update_artifact_cache_work): + assert len(vts.targets) == 1 + assert vts.targets[0] == compile_context.target + + # Noop if the target is uncacheable. + if (compile_context.target.has_label('no_cache')): + return + vt = vts.versioned_targets[0] + + # Set up args to relativize analysis in the background. + portable_analysis_file = self._portable_analysis_for_target( + self._analysis_dir, compile_context.target) + relativize_args_tuple = (compile_context.analysis_file, portable_analysis_file) + + # Collect the artifacts for this target. + artifacts = [] + + def add_abs_products(p): + if p: + for _, paths in p.abs_paths(): + artifacts.extend(paths) + # Resources. + resources_by_target = self.context.products.get_data('resources_by_target') + add_abs_products(resources_by_target.get(compile_context.target)) + # Classes. + classes_by_target = self.context.products.get_data('classes_by_target') + add_abs_products(classes_by_target.get(compile_context.target)) + # Log file. + log_file = self._capture_log_file(compile_context.target) + if log_file and os.path.exists(log_file): + artifacts.append(log_file) + # Jar. + artifacts.append(compile_context.jar_file) + + # Get the 'work' that will publish these artifacts to the cache. + # NB: the portable analysis_file won't exist until we finish. + vts_artifactfiles_pair = (vt, artifacts + [portable_analysis_file]) + update_artifact_cache_work = get_update_artifact_cache_work([vts_artifactfiles_pair]) + + # And execute it. + if update_artifact_cache_work: + work_chain = [ + Work(self._analysis_tools.relativize, [relativize_args_tuple], 'relativize'), + update_artifact_cache_work + ] + self.context.submit_background_work_chain(work_chain, parent_workunit_name='cache') + + def validate_analysis(self, path): + """Throws a TaskError for invalid analysis files.""" + try: + self._analysis_parser.validate_analysis(path) + except Exception as e: + if self._clear_invalid_analysis: + self.context.log.warn("Invalid analysis detected at path {} ... pants will remove these " + "automatically, but\nyou may experience spurious warnings until " + "clean-all is executed.\n{}".format(path, e)) + safe_delete(path) + else: + raise TaskError("An internal build directory contains invalid/mismatched analysis: please " + "run `clean-all` if your tools versions changed recently:\n{}".format(e)) + + def _compute_sources_by_target(self, targets): + """Computes and returns a map target->sources (relative to buildroot).""" + def resolve_target_sources(target_sources): + resolved_sources = [] + for target in target_sources: + if target.has_sources(): + resolved_sources.extend(target.sources_relative_to_buildroot()) + return resolved_sources + + def calculate_sources(target): + sources = [s for s in target.sources_relative_to_buildroot() if self._sources_predicate(s)] + # TODO: Make this less hacky. Ideally target.java_sources will point to sources, not targets. + if hasattr(target, 'java_sources') and target.java_sources: + sources.extend(resolve_target_sources(target.java_sources)) + return sources + return {t: calculate_sources(t) for t in targets} + + def _sources_for_targets(self, targets): + """Returns a cached map of target->sources for the specified targets.""" + if self._sources_by_target is None: + raise TaskError('self._sources_by_target not computed yet.') + return {t: self._sources_by_target.get(t, []) for t in targets} + + def _sources_for_target(self, target): + """Returns the cached sources for the given target.""" + if self._sources_by_target is None: + raise TaskError('self._sources_by_target not computed yet.') + return self._sources_by_target.get(target, []) + + def _compute_extra_classpath(self, extra_compile_time_classpath_elements): + """Compute any extra compile-time-only classpath elements. + + TODO(benjy): Model compile-time vs. runtime classpaths more explicitly. + TODO(benjy): Add a pre-execute goal for injecting deps into targets, so e.g., + we can inject a dep on the scala runtime library and still have it ivy-resolve. + """ + def extra_compile_classpath_iter(): + for conf in self._confs: + for jar in extra_compile_time_classpath_elements: + yield (conf, jar) + + return list(extra_compile_classpath_iter()) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py deleted file mode 100644 index 5dceab182ec..00000000000 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_global_strategy.py +++ /dev/null @@ -1,621 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -import itertools -import os -import shutil -import uuid -from collections import defaultdict - -from twitter.common.collections import OrderedSet - -from pants.backend.jvm.targets.jvm_target import JvmTarget -from pants.backend.jvm.tasks.classpath_util import ClasspathUtil -from pants.backend.jvm.tasks.jvm_compile.compile_context import CompileContext -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_strategy import JvmCompileStrategy -from pants.backend.jvm.tasks.jvm_compile.resource_mapping import ResourceMapping -from pants.base.build_environment import get_buildroot, get_scm -from pants.base.exceptions import TaskError -from pants.base.target import Target -from pants.base.worker_pool import Work -from pants.option.custom_types import list_option -from pants.util.contextutil import temporary_dir -from pants.util.dirutil import safe_mkdir - - -class JvmCompileGlobalStrategy(JvmCompileStrategy): - """A strategy for JVM compilation that uses a global classpath and analysis.""" - - class InternalTargetPartitioningError(Exception): - """Error partitioning targets by jvm platform settings.""" - - @classmethod - def register_options(cls, register, compile_task_name, supports_concurrent_execution): - register('--changed-targets-heuristic-limit', advanced=True, type=int, default=0, - help='If non-zero, and we have fewer than this number of locally-changed targets, ' - 'partition them separately, to preserve stability when compiling repeatedly.') - - def __init__(self, context, options, workdir, analysis_tools, compile_task_name, - sources_predicate): - super(JvmCompileGlobalStrategy, self).__init__(context, options, workdir, analysis_tools, - compile_task_name, sources_predicate) - - # Various working directories. - # NB: These are grandfathered in with non-strategy-specific names, but to prevent - # collisions within the buildcache, strategies should use strategy-specific subdirectories. - self._analysis_dir = os.path.join(workdir, 'analysis') - self._classes_dir = os.path.join(workdir, 'classes') - - self._analysis_file = os.path.join(self._analysis_dir, 'global_analysis.valid') - self._invalid_analysis_file = os.path.join(self._analysis_dir, 'global_analysis.invalid') - - self._target_sources_dir = os.path.join(workdir, 'target_sources') - - # The rough number of source files to build in each compiler pass. - self._partition_size_hint = options.partition_size_hint - - # Computed lazily as needed. - self._upstream_class_to_path = None - - # If non-zero, and we have fewer than this number of locally-changed targets, - # then we partition them separately, to preserve stability in the face of repeated - # compilations. - self._changed_targets_heuristic_limit = options.changed_targets_heuristic_limit - - # Sources (relative to buildroot) present in the last analysis that have since been deleted. - # Populated in prepare_compile(). - self._deleted_sources = None - - self._upstream_class_to_path = None - - def name(self): - return 'global' - - def compile_context(self, target): - """Returns the default/stable compile context for the given target. - - Temporary compile contexts are private to the strategy. - """ - return CompileContext(target, - self._analysis_file, - self._classes_dir, - self._sources_for_target(target)) - - def move(self, src, dst): - if self.delete_scratch: - shutil.move(src, dst) - else: - shutil.copy(src, dst) - - def pre_compile(self): - super(JvmCompileGlobalStrategy, self).pre_compile() - - # Only create these working dirs during execution phase, otherwise, they - # would be wiped out by clean-all goal/task if it's specified. - safe_mkdir(self._target_sources_dir) - safe_mkdir(self._analysis_dir) - safe_mkdir(self._classes_dir) - - # Look for invalid analysis files. - for f in (self._invalid_analysis_file, self._analysis_file): - self.validate_analysis(f) - - def prepare_compile(self, cache_manager, all_targets, relevant_targets): - super(JvmCompileGlobalStrategy, self).prepare_compile(cache_manager, all_targets, - relevant_targets) - - # Update the classpath for us and for downstream tasks. - compile_classpaths = self.context.products.get_data('compile_classpath') - for conf in self._confs: - compile_classpaths.add_for_targets(all_targets, [(conf, self._classes_dir)]) - - # Split the global analysis file into valid and invalid parts. - invalidation_check = cache_manager.check(relevant_targets) - if invalidation_check.invalid_vts: - # The analysis for invalid and deleted sources is no longer valid. - invalid_targets = [vt.target for vt in invalidation_check.invalid_vts] - invalid_sources_by_target = {} - for tgt in invalid_targets: - invalid_sources_by_target[tgt] = self._sources_for_target(tgt) - invalid_sources = list(itertools.chain.from_iterable(invalid_sources_by_target.values())) - self._deleted_sources = self._compute_deleted_sources() - - tmpdir = os.path.join(self.analysis_tmpdir, str(uuid.uuid4())) - os.mkdir(tmpdir) - valid_analysis_tmp = os.path.join(tmpdir, 'valid_analysis') - newly_invalid_analysis_tmp = os.path.join(tmpdir, 'newly_invalid_analysis') - invalid_analysis_tmp = os.path.join(tmpdir, 'invalid_analysis') - if self._analysis_parser.is_nonempty_analysis(self._analysis_file): - with self.context.new_workunit(name='prepare-analysis'): - self._analysis_tools.split_to_paths(self._analysis_file, - [(invalid_sources + self._deleted_sources, newly_invalid_analysis_tmp)], - valid_analysis_tmp) - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): - self._analysis_tools.merge_from_paths( - [self._invalid_analysis_file, newly_invalid_analysis_tmp], invalid_analysis_tmp) - else: - invalid_analysis_tmp = newly_invalid_analysis_tmp - - # Now it's OK to overwrite the main analysis files with the new state. - self.move(valid_analysis_tmp, self._analysis_file) - self.move(invalid_analysis_tmp, self._invalid_analysis_file) - else: - self._deleted_sources = [] - - def invalidation_hints(self, relevant_targets): - # If needed, find targets that we've changed locally (as opposed to - # changes synced in from the SCM). - # TODO(benjy): Should locally_changed_targets be available in all Tasks? - locally_changed_targets = None - if self._changed_targets_heuristic_limit: - locally_changed_targets = self._find_locally_changed_targets(relevant_targets) - if (locally_changed_targets and - len(locally_changed_targets) > self._changed_targets_heuristic_limit): - locally_changed_targets = None - - return (self._partition_size_hint, locally_changed_targets) - - def ordered_compile_settings_and_targets(self, relevant_targets): - """Groups the targets into ordered chunks, dependencies before dependees. - - Each chunk is of the form (compile_setting, targets). Attempts to create as few chunks as - possible, under the constraint that targets with different compile settings cannot be in the - same chunk, and dependencies must be in the same chunk or an earlier chunk than their - dependees. - - Detects impossible combinations/dependency relationships with respect to the java target and - source level, and raising errors as necessary (see targets_to_compile and - infer_and_validate_java_target_levels). - - :return: a list of tuples of the form (compile_settings, list of targets) - """ - relevant_targets = set(relevant_targets) - - def get_platform(target): - return getattr(target, 'platform', None) - - # NB(gmalmquist): Short-circuit if we only have one platform. Asymptotically, this only gives us - # O(|V|) time instead of O(|V|+|E|) if we have only one platform, which doesn't seem like much, - # but in practice we save a lot of time because the runtime for the non-short-circuited code is - # multiplied by a higher constant, because we have to iterate over all the targets several - # times. - platform_counts = defaultdict(int) - for target in relevant_targets: - platform_counts[target.platform] += 1 - if len(platform_counts) == 1: - settings, = platform_counts - return [(settings, relevant_targets)] - - # Map of target -> dependees. - outgoing = defaultdict(set) - # Map of target -> dependencies. - incoming = defaultdict(set) - - transitive_targets = set() - - def add_edges(target): - transitive_targets.add(target) - if target.dependencies: - for dependency in target.dependencies: - outgoing[dependency].add(target) - incoming[target].add(dependency) - - self.context.build_graph.walk_transitive_dependency_graph([t.address for t in relevant_targets], - work=add_edges) - # Topological sort. - sorted_targets = [] - frontier = defaultdict(set) - - def add_node(node): - frontier[get_platform(node)].add(node) - - def next_node(): - next_setting = None - if sorted_targets: - # Prefer targets with the same settings as whatever we just added to the sorted list, to - # greedily create chains that are as long as possible. - next_setting = get_platform(sorted_targets[-1]) - if next_setting not in frontier: - if None in frontier: - # NB(gmalmquist): compile_settings=None indicates a target that is not actually a - # jvm_target, which mean's it's an intermediate dependency. We want to expand these - # whenever we can, because they give us more options we can use to create longer chains. - next_setting = None - else: - next_setting = max(frontier.keys(), key=lambda setting: len(frontier[setting])) - node = frontier[next_setting].pop() - if not frontier[next_setting]: - frontier.pop(next_setting) - return node - - for target in transitive_targets: - if not incoming[target]: - add_node(target) - - while frontier: - node = next_node() - sorted_targets.append(node) - if node in outgoing: - for dependee in tuple(outgoing[node]): - outgoing[node].remove(dependee) - incoming[dependee].remove(node) - if not incoming[dependee]: - add_node(dependee) - - sorted_targets = [target for target in sorted_targets if target in relevant_targets] - - if set(sorted_targets) != relevant_targets: - added = '\n '.join(t.address.spec for t in (set(sorted_targets) - relevant_targets)) - removed = '\n '.join(t.address.spec for t in (set(relevant_targets) - sorted_targets)) - raise self.InternalTargetPartitioningError( - 'Internal partitioning targets:\nSorted targets =/= original targets!\n' - 'Added:\n {}\nRemoved:\n {}'.format(added, removed) - ) - - unconsumed_edges = any(len(edges) > 0 for edges in outgoing.values()) - if unconsumed_edges: - raise self.InternalTargetPartitioningError( - 'Cycle detected while ordering jvm_targets for compilation. This should have been detected ' - 'when constructing the build_graph, so the presence of this error means there is probably ' - 'a bug in this method.' - ) - - chunks = [] - for target in sorted_targets: - if not isinstance(target, JvmTarget): - continue - if chunks and chunks[-1][0] == get_platform(target): - chunks[-1][1].append(target) - else: - chunks.append((get_platform(target), [target])) - return chunks - - def compile_chunk(self, - invalidation_check, - all_targets, - relevant_targets, - invalid_targets, - extra_compile_time_classpath_elements, - check_vts, - compile_vts, - register_vts, - update_artifact_cache_vts_work): - assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets." - settings_and_targets = self.ordered_compile_settings_and_targets(invalid_targets) - for settings, targets in settings_and_targets: - if targets: - self.compile_sub_chunk(invalidation_check, - all_targets, - targets, - extra_compile_time_classpath_elements, - compile_vts, - register_vts, - update_artifact_cache_vts_work, - settings) - - def compile_sub_chunk(self, - invalidation_check, - all_targets, - invalid_targets, - extra_compile_time_classpath_elements, - compile_vts, - register_vts, - update_artifact_cache_vts_work, - settings): - """Executes compilations for the invalid targets contained in a single chunk. - - Has the side effects of populating: - # valid/invalid analysis files - # classes_by_source product - # classes_by_target product - # resources_by_target product - """ - extra_classpath_tuples = self._compute_extra_classpath(extra_compile_time_classpath_elements) - - # Get the classpath generated by upstream JVM tasks and our own prepare_compile(). - # NB: The global strategy uses the aggregated classpath (for all targets) to compile each - # chunk, which avoids needing to introduce compile-time dependencies between annotation - # processors and the classes they annotate. - compile_classpath = ClasspathUtil.compute_classpath(all_targets, self.context.products.get_data( - 'compile_classpath'), extra_classpath_tuples, self._confs) - - # Find the invalid sources for this chunk. - invalid_sources_by_target = {t: self._sources_for_target(t) for t in invalid_targets} - - tmpdir = os.path.join(self.analysis_tmpdir, str(uuid.uuid4())) - os.mkdir(tmpdir) - - # Figure out the sources and analysis belonging to each partition. - partitions = [] # Each element is a triple (vts, sources_by_target, analysis). - for vts in invalidation_check.invalid_vts_partitioned: - partition_tmpdir = os.path.join(tmpdir, Target.maybe_readable_identify(vts.targets)) - os.mkdir(partition_tmpdir) - sources = list(itertools.chain.from_iterable( - [invalid_sources_by_target.get(t, []) for t in vts.targets])) - de_duped_sources = list(OrderedSet(sources)) - if len(sources) != len(de_duped_sources): - counts = [(src, len(list(srcs))) for src, srcs in itertools.groupby(sorted(sources))] - self.context.log.warn( - 'De-duped the following sources:\n\t{}' - .format('\n\t'.join(sorted('{} {}'.format(cnt, src) for src, cnt in counts if cnt > 1)))) - analysis_file = os.path.join(partition_tmpdir, 'analysis') - partitions.append((vts, de_duped_sources, analysis_file)) - - # Split per-partition files out of the global invalid analysis. - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file) and partitions: - with self.context.new_workunit(name='partition-analysis'): - splits = [(x[1], x[2]) for x in partitions] - # We have to pass the analysis for any deleted files through zinc, to give it - # a chance to delete the relevant class files. - if splits: - splits[0] = (splits[0][0] + self._deleted_sources, splits[0][1]) - self._analysis_tools.split_to_paths(self._invalid_analysis_file, splits) - - # Now compile partitions one by one. - for partition_index, partition in enumerate(partitions): - (vts, sources, analysis_file) = partition - - progress_message = 'partition {} of {}'.format(partition_index + 1, len(partitions)) - # We have to treat the global output dir as an upstream element, so compilers can - # find valid analysis for previous partitions. We use the global valid analysis - # for the upstream. - upstream_analysis = ({self._classes_dir: self._analysis_file} - if os.path.exists(self._analysis_file) else {}) - compile_vts(vts, - sources, - analysis_file, - upstream_analysis, - compile_classpath, - self._classes_dir, - None, - progress_message, - settings) - - # No exception was thrown, therefore the compile succeeded and analysis_file is now valid. - if os.path.exists(analysis_file): # The compilation created an analysis. - # Merge the newly-valid analysis with our global valid analysis. - new_valid_analysis = analysis_file + '.valid.new' - if self._analysis_parser.is_nonempty_analysis(self._analysis_file): - with self.context.new_workunit(name='update-upstream-analysis'): - self._analysis_tools.merge_from_paths([self._analysis_file, analysis_file], - new_valid_analysis) - else: # We need to keep analysis_file around. Background tasks may need it. - shutil.copy(analysis_file, new_valid_analysis) - - # Move the merged valid analysis to its proper location. - # We do this before checking for missing dependencies, so that we can still - # enjoy an incremental compile after fixing missing deps. - self.move(new_valid_analysis, self._analysis_file) - - # Update the products with the latest classes. Must happen before the - # missing dependencies check. - register_vts([self.compile_context(t) for t in vts.targets]) - - # Kick off the background artifact cache write. - if update_artifact_cache_vts_work: - self._write_to_artifact_cache(analysis_file, - vts, - update_artifact_cache_vts_work) - - if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): - with self.context.new_workunit(name='trim-downstream-analysis'): - # Trim out the newly-valid sources from our global invalid analysis. - new_invalid_analysis = analysis_file + '.invalid.new' - discarded_invalid_analysis = analysis_file + '.invalid.discard' - self._analysis_tools.split_to_paths(self._invalid_analysis_file, - [(sources, discarded_invalid_analysis)], new_invalid_analysis) - self.move(new_invalid_analysis, self._invalid_analysis_file) - - # Record the built target -> sources mapping for future use. - for target, sources in self._sources_for_targets(vts.targets).items(): - self._record_previous_sources_by_target(target, sources) - - # Now that all the analysis accounting is complete, and we have no missing deps, - # we can safely mark the targets as valid. - vts.update() - - def compute_resource_mapping(self, compile_contexts): - return ResourceMapping(self._classes_dir) - - def compute_classes_by_source(self, compile_contexts): - if not compile_contexts: - return {} - - # This implementation requires that all contexts use the same analysis file and global classes. - analysis_file = None - for compile_context in compile_contexts: - if compile_context.classes_dir != self._classes_dir: - raise TaskError('Unrecognized classes directory for the global strategy: {}'.format( - compile_context.classes_dir)) - if not analysis_file: - analysis_file = compile_context.analysis_file - else: - if compile_context.analysis_file != analysis_file: - raise TaskError('Inconsistent analysis file for the global strategy: {} vs {}'.format( - compile_context.analysis_file, analysis_file)) - - classes_by_src_by_context = defaultdict(dict) - if os.path.exists(analysis_file): - # Parse the global analysis once. - buildroot = get_buildroot() - products = self._analysis_parser.parse_products_from_path(analysis_file, - self._classes_dir) - - # Then iterate over contexts (targets), and add the classes for their sources. - for compile_context in compile_contexts: - classes_by_src = classes_by_src_by_context[compile_context] - for source in compile_context.sources: - absolute_source = os.path.join(buildroot, source) - classes_by_src[source] = products.get(absolute_source, []) - return classes_by_src_by_context - - def post_process_cached_vts(self, cached_vts): - """Special post processing for global scala analysis files. - - Class files are retrieved directly into their final locations in the global classes dir. - """ - - # Get all the targets whose artifacts we found in the cache. - cached_targets = [] - for vt in cached_vts: - for target in vt.targets: - cached_targets.append(target) - - # The current global analysis may contain old data for modified targets for - # which we got cache hits. We need to strip out this old analysis, to ensure - # that the new data incoming from the cache doesn't collide with it during the merge. - sources_to_strip = [] - if os.path.exists(self._analysis_file): - for target in cached_targets: - sources_to_strip.extend(self._get_previous_sources_by_target(target)) - - # Localize the cached analyses. - analyses_to_merge = [] - for target in cached_targets: - analysis_file = JvmCompileStrategy._analysis_for_target(self.analysis_tmpdir, target) - portable_analysis_file = JvmCompileStrategy._portable_analysis_for_target( - self.analysis_tmpdir, target) - if os.path.exists(portable_analysis_file): - self._analysis_tools.localize(portable_analysis_file, analysis_file) - if os.path.exists(analysis_file): - analyses_to_merge.append(analysis_file) - - # Merge them into the global analysis. - if analyses_to_merge: - with temporary_dir() as tmpdir: - if sources_to_strip: - throwaway = os.path.join(tmpdir, 'throwaway') - trimmed_analysis = os.path.join(tmpdir, 'trimmed') - self._analysis_tools.split_to_paths(self._analysis_file, - [(sources_to_strip, throwaway)], - trimmed_analysis) - else: - trimmed_analysis = self._analysis_file - if os.path.exists(trimmed_analysis): - analyses_to_merge.append(trimmed_analysis) - tmp_analysis = os.path.join(tmpdir, 'analysis') - with self.context.new_workunit(name='merge_analysis'): - self._analysis_tools.merge_from_paths(analyses_to_merge, tmp_analysis) - - sources_by_cached_target = self._sources_for_targets(cached_targets) - - # Record the cached target -> sources mapping for future use. - for target, sources in sources_by_cached_target.items(): - self._record_previous_sources_by_target(target, sources) - - # Everything's good so move the merged analysis to its final location. - if os.path.exists(tmp_analysis): - self.move(tmp_analysis, self._analysis_file) - - def _write_to_artifact_cache(self, analysis_file, vts, get_update_artifact_cache_work): - vt_by_target = dict([(vt.target, vt) for vt in vts.versioned_targets]) - - vts_targets = [t for t in vts.targets if not t.has_label('no_cache')] - - # Determine locations for analysis files that will be split in the background. - split_analysis_files = [ - JvmCompileStrategy._analysis_for_target(self.analysis_tmpdir, t) for t in vts_targets] - portable_split_analysis_files = [ - JvmCompileStrategy._portable_analysis_for_target(self.analysis_tmpdir, t) for t in vts_targets] - - # Set up args for splitting the analysis into per-target files. - splits = zip([self._sources_for_target(t) for t in vts_targets], split_analysis_files) - splits_args_tuples = [(analysis_file, splits)] - - # Set up args for rebasing the splits. - relativize_args_tuples = zip(split_analysis_files, portable_split_analysis_files) - - # Compute the classes and resources for each vts. - compile_contexts = [self.compile_context(t) for t in vts_targets] - vts_artifactfiles_pairs = [] - classes_by_source_by_context = self.compute_classes_by_source(compile_contexts) - resources_by_target = self.context.products.get_data('resources_by_target') - for compile_context in compile_contexts: - target = compile_context.target - if target.has_label('no_cache'): - continue - artifacts = [] - if resources_by_target is not None: - for _, paths in resources_by_target[target].abs_paths(): - artifacts.extend(paths) - classes_by_source = classes_by_source_by_context[compile_context] - for source in compile_context.sources: - classes = classes_by_source.get(source, []) - artifacts.extend(classes) - - vt = vt_by_target.get(target) - if vt is not None: - # NOTE: analysis_file doesn't exist yet. - vts_artifactfiles_pairs.append( - (vt, artifacts + [JvmCompileStrategy._portable_analysis_for_target( - self.analysis_tmpdir, target)])) - - update_artifact_cache_work = get_update_artifact_cache_work(vts_artifactfiles_pairs) - if update_artifact_cache_work: - work_chain = [ - Work(self._analysis_tools.split_to_paths, splits_args_tuples, 'split'), - Work(self._analysis_tools.relativize, relativize_args_tuples, 'relativize'), - update_artifact_cache_work - ] - self.context.submit_background_work_chain(work_chain, parent_workunit_name='cache') - - def _get_previous_sources_by_target(self, target): - """Returns the target's sources as recorded on the last successful build of target. - - Returns a list of absolute paths. - """ - path = os.path.join(self._target_sources_dir, target.identifier) - if os.path.exists(path): - with open(path, 'r') as infile: - return [s.rstrip() for s in infile.readlines()] - else: - return [] - - def _record_previous_sources_by_target(self, target, sources): - # Record target -> source mapping for future use. - with open(os.path.join(self._target_sources_dir, target.identifier), 'w') as outfile: - for src in sources: - outfile.write(os.path.join(get_buildroot(), src)) - outfile.write(b'\n') - - def _compute_deleted_sources(self): - """Computes the list of sources present in the last analysis that have since been deleted. - - This is a global list. We have no way of associating them to individual targets. - Paths are relative to buildroot. - """ - with self.context.new_workunit('find-deleted-sources'): - if os.path.exists(self._analysis_file): - products = self._analysis_parser.parse_products_from_path(self._analysis_file, - self._classes_dir) - buildroot = get_buildroot() - old_srcs = products.keys() # Absolute paths. - return [os.path.relpath(src, buildroot) for src in old_srcs if not os.path.exists(src)] - else: - return [] - - def _find_locally_changed_targets(self, relevant_targets): - """Finds the targets whose sources have been modified locally. - - Returns a list of targets, or None if no SCM is available. - """ - # Compute the src->targets mapping. There should only be one target per source, - # but that's not yet a hard requirement, so the value is a list of targets. - # TODO(benjy): Might this inverse mapping be needed elsewhere too? - targets_by_source = defaultdict(list) - for tgt, srcs in self._sources_for_targets(relevant_targets).items(): - for src in srcs: - targets_by_source[src].append(tgt) - - ret = OrderedSet() - scm = get_scm() - if not scm: - return None - changed_files = scm.changed_files(include_untracked=True, relative_to=get_buildroot()) - for f in changed_files: - ret.update(targets_by_source.get(f, [])) - return list(ret) diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py deleted file mode 100644 index e57eab0d779..00000000000 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_isolated_strategy.py +++ /dev/null @@ -1,419 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -import functools -import os -import shutil -from collections import OrderedDict, defaultdict -from hashlib import sha1 - -from pants.backend.jvm.tasks.classpath_util import ClasspathUtil -from pants.backend.jvm.tasks.jvm_compile.compile_context import IsolatedCompileContext -from pants.backend.jvm.tasks.jvm_compile.execution_graph import (ExecutionFailure, ExecutionGraph, - Job) -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_strategy import JvmCompileStrategy -from pants.backend.jvm.tasks.jvm_compile.resource_mapping import ResourceMapping -from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError -from pants.base.worker_pool import Work, WorkerPool -from pants.util.dirutil import fast_relpath, safe_mkdir, safe_walk -from pants.util.fileutil import atomic_copy, create_size_estimators - - -# This class holds onto class directories rather than CompileContexts because -# CompileContext aren't picklable. -class IsolationCacheHitCallback(object): - """A serializable cache hit callback that cleans the class directory prior to cache extraction.""" - - def __init__(self, cache_key_to_class_dir): - self._key_to_classes_dir = cache_key_to_class_dir - - def __call__(self, cache_key): - class_dir = self._key_to_classes_dir.get(cache_key) - if class_dir: - safe_mkdir(class_dir, clean=True) - - -class JvmCompileIsolatedStrategy(JvmCompileStrategy): - """A strategy for JVM compilation that uses per-target classpaths and analysis.""" - - size_estimators = create_size_estimators() - - @classmethod - def size_estimator_by_name(cls, estimation_strategy_name): - return cls.size_estimators[estimation_strategy_name] - - @classmethod - def register_options(cls, register, compile_task_name, supports_concurrent_execution): - if supports_concurrent_execution: - register('--worker-count', advanced=True, type=int, default=1, - help='The number of concurrent workers to use compiling with {task} with the ' - 'isolated strategy.'.format(task=compile_task_name)) - register('--size-estimator', advanced=True, - choices=list(cls.size_estimators.keys()), default='filesize', - help='The method of target size estimation.') - register('--capture-log', advanced=True, action='store_true', default=False, - fingerprint=True, - help='Capture compilation output to per-target logs.') - - def __init__(self, context, options, workdir, analysis_tools, compile_task_name, - sources_predicate): - super(JvmCompileIsolatedStrategy, self).__init__(context, options, workdir, analysis_tools, - compile_task_name, sources_predicate) - - # Various working directories. - self._analysis_dir = os.path.join(workdir, 'isolated-analysis') - self._classes_dir = os.path.join(workdir, 'isolated-classes') - self._logs_dir = os.path.join(workdir, 'isolated-logs') - self._jars_dir = os.path.join(workdir, 'jars') - - self._capture_log = options.capture_log - - try: - worker_count = options.worker_count - except AttributeError: - # tasks that don't support concurrent execution have no worker_count registered - worker_count = 1 - self._worker_count = worker_count - - self._size_estimator = self.size_estimator_by_name(options.size_estimator) - - self._worker_pool = None - - def name(self): - return 'isolated' - - def compile_context(self, target): - analysis_file = JvmCompileStrategy._analysis_for_target(self._analysis_dir, target) - classes_dir = os.path.join(self._classes_dir, target.id) - # Generate a short unique path for the jar to allow for shorter classpaths. - # TODO: likely unnecessary after https://github.com/pantsbuild/pants/issues/1988 - jar_file = os.path.join(self._jars_dir, '{}.jar'.format(sha1(target.id).hexdigest()[:12])) - return IsolatedCompileContext(target, - analysis_file, - classes_dir, - jar_file, - self._sources_for_target(target)) - - def _create_compile_contexts_for_targets(self, targets): - compile_contexts = OrderedDict() - for target in targets: - compile_context = self.compile_context(target) - compile_contexts[target] = compile_context - return compile_contexts - - def pre_compile(self): - super(JvmCompileIsolatedStrategy, self).pre_compile() - safe_mkdir(self._analysis_dir) - safe_mkdir(self._classes_dir) - safe_mkdir(self._logs_dir) - safe_mkdir(self._jars_dir) - - def prepare_compile(self, cache_manager, all_targets, relevant_targets): - super(JvmCompileIsolatedStrategy, self).prepare_compile(cache_manager, all_targets, - relevant_targets) - - # Update the classpath by adding relevant target's classes directories to its classpath. - compile_classpaths = self.context.products.get_data('compile_classpath') - - with self.context.new_workunit('validate-{}-analysis'.format(self._compile_task_name)): - for target in relevant_targets: - cc = self.compile_context(target) - safe_mkdir(cc.classes_dir) - compile_classpaths.add_for_target(target, [(conf, cc.classes_dir) for conf in self._confs]) - self.validate_analysis(cc.analysis_file) - - # This ensures the workunit for the worker pool is set - with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self._compile_task_name)) \ - as workunit: - # This uses workunit.parent as the WorkerPool's parent so that child workunits - # of different pools will show up in order in the html output. This way the current running - # workunit is on the bottom of the page rather than possibly in the middle. - self._worker_pool = WorkerPool(workunit.parent, - self.context.run_tracker, - self._worker_count) - - def finalize_compile(self, targets): - # Replace the classpath entry for each target with its jar'd representation. - compile_classpaths = self.context.products.get_data('compile_classpath') - for target in targets: - cc = self.compile_context(target) - for conf in self._confs: - compile_classpaths.remove_for_target(target, [(conf, cc.classes_dir)]) - compile_classpaths.add_for_target(target, [(conf, cc.jar_file)]) - - def invalidation_hints(self, relevant_targets): - # No partitioning. - return (0, None) - - def compute_classes_by_source(self, compile_contexts): - buildroot = get_buildroot() - # Build a mapping of srcs to classes for each context. - classes_by_src_by_context = defaultdict(dict) - for compile_context in compile_contexts: - # Walk the context's jar to build a set of unclaimed classfiles. - unclaimed_classes = set() - with compile_context.open_jar(mode='r') as jar: - for name in jar.namelist(): - unclaimed_classes.add(os.path.join(compile_context.classes_dir, name)) - - # Grab the analysis' view of which classfiles were generated. - classes_by_src = classes_by_src_by_context[compile_context] - if os.path.exists(compile_context.analysis_file): - products = self._analysis_parser.parse_products_from_path(compile_context.analysis_file, - compile_context.classes_dir) - for src, classes in products.items(): - relsrc = fast_relpath(src, buildroot) - classes_by_src[relsrc] = classes - unclaimed_classes.difference_update(classes) - - # Any remaining classfiles were unclaimed by sources/analysis. - classes_by_src[None] = list(unclaimed_classes) - return classes_by_src_by_context - - def _compute_classpath_entries(self, compile_classpaths, - target_closure, - compile_context, - extra_compile_time_classpath): - # Generate a classpath specific to this compile and target. - return ClasspathUtil.compute_classpath_for_target(compile_context.target, compile_classpaths, - extra_compile_time_classpath, self._confs, - target_closure) - - def _upstream_analysis(self, compile_contexts, classpath_entries): - """Returns tuples of classes_dir->analysis_file for the closure of the target.""" - # Reorganize the compile_contexts by class directory. - compile_contexts_by_directory = {} - for compile_context in compile_contexts.values(): - compile_contexts_by_directory[compile_context.classes_dir] = compile_context - # If we have a compile context for the target, include it. - for entry in classpath_entries: - if not entry.endswith('.jar'): - compile_context = compile_contexts_by_directory.get(entry) - if not compile_context: - self.context.log.debug('Missing upstream analysis for {}'.format(entry)) - else: - yield compile_context.classes_dir, compile_context.analysis_file - - def _capture_log_file(self, target): - if self._capture_log: - return os.path.join(self._logs_dir, "{}.log".format(target.id)) - return None - - def exec_graph_key_for_target(self, compile_target): - return "compile({})".format(compile_target.address.spec) - - def _create_compile_jobs(self, compile_classpaths, compile_contexts, extra_compile_time_classpath, - invalid_targets, invalid_vts_partitioned, check_vts, compile_vts, - register_vts, update_artifact_cache_vts_work): - def check_cache(vts): - """Manually checks the artifact cache (usually immediately before compilation.) - - Returns true if the cache was hit successfully, indicating that no compilation is necessary. - """ - if not check_vts: - return False - cached_vts, uncached_vts = check_vts([vts]) - if not cached_vts: - self.context.log.debug('Missed cache during double check for {}'.format(vts.target.address.spec)) - return False - assert cached_vts == [vts], ( - 'Cache returned unexpected target: {} vs {}'.format(cached_vts, [vts]) - ) - self.context.log.info('Hit cache during double check for {}'.format(vts.target.address.spec)) - return True - - def work_for_vts(vts, compile_context, target_closure): - progress_message = compile_context.target.address.spec - cp_entries = self._compute_classpath_entries(compile_classpaths, - target_closure, - compile_context, - extra_compile_time_classpath) - - upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) - - # Capture a compilation log if requested. - log_file = self._capture_log_file(compile_context.target) - - # Double check the cache before beginning compilation - if not check_cache(vts): - # Mutate analysis within a temporary directory, and move it to the final location - # on success. - tmpdir = os.path.join(self.analysis_tmpdir, compile_context.target.id) - safe_mkdir(tmpdir) - tmp_analysis_file = JvmCompileStrategy._analysis_for_target( - tmpdir, compile_context.target) - if os.path.exists(compile_context.analysis_file): - shutil.copy(compile_context.analysis_file, tmp_analysis_file) - target, = vts.targets - compile_vts(vts, - compile_context.sources, - tmp_analysis_file, - upstream_analysis, - cp_entries, - compile_context.classes_dir, - log_file, - progress_message, - target.platform) - atomic_copy(tmp_analysis_file, compile_context.analysis_file) - - # Jar the compiled output. - self._create_context_jar(compile_context) - - # Update the products with the latest classes. - register_vts([compile_context]) - - # Kick off the background artifact cache write. - if update_artifact_cache_vts_work: - self._write_to_artifact_cache(vts, compile_context, update_artifact_cache_vts_work) - - jobs = [] - invalid_target_set = set(invalid_targets) - for vts in invalid_vts_partitioned: - assert len(vts.targets) == 1, ("Requested one target per partition, got {}".format(vts)) - - # Invalidated targets are a subset of relevant targets: get the context for this one. - compile_target = vts.targets[0] - compile_context = compile_contexts[compile_target] - compile_target_closure = compile_target.closure() - - # dependencies of the current target which are invalid for this chunk - invalid_dependencies = (compile_target_closure & invalid_target_set) - [compile_target] - - jobs.append(Job(self.exec_graph_key_for_target(compile_target), - functools.partial(work_for_vts, vts, compile_context, compile_target_closure), - [self.exec_graph_key_for_target(target) for target in invalid_dependencies], - self._size_estimator(compile_context.sources), - # If compilation and analysis work succeeds, validate the vts. - # Otherwise, fail it. - on_success=vts.update, - on_failure=vts.force_invalidate)) - return jobs - - def compile_chunk(self, - invalidation_check, - all_targets, - relevant_targets, - invalid_targets, - extra_compile_time_classpath_elements, - check_vts, - compile_vts, - register_vts, - update_artifact_cache_vts_work): - """Executes compilations for the invalid targets contained in a single chunk.""" - assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets." - # Get the classpath generated by upstream JVM tasks and our own prepare_compile(). - compile_classpaths = self.context.products.get_data('compile_classpath') - - extra_compile_time_classpath = self._compute_extra_classpath( - extra_compile_time_classpath_elements) - - compile_contexts = self._create_compile_contexts_for_targets(all_targets) - - # Now create compile jobs for each invalid target one by one. - jobs = self._create_compile_jobs(compile_classpaths, - compile_contexts, - extra_compile_time_classpath, - invalid_targets, - invalidation_check.invalid_vts_partitioned, - check_vts, - compile_vts, - register_vts, - update_artifact_cache_vts_work) - - exec_graph = ExecutionGraph(jobs) - try: - exec_graph.execute(self._worker_pool, self.context.log) - except ExecutionFailure as e: - raise TaskError("Compilation failure: {}".format(e)) - - def compute_resource_mapping(self, compile_contexts): - return ResourceMapping(self._classes_dir) - - def create_cache_hit_callback(self, vts): - cache_key_to_classes_dir = {v.cache_key: self.compile_context(v.target).classes_dir - for v in vts} - return IsolationCacheHitCallback(cache_key_to_classes_dir) - - def post_process_cached_vts(self, cached_vts): - """Localizes the fetched analysis for targets we found in the cache. - - This is the complement of `_write_to_artifact_cache`. - """ - compile_contexts = [] - for vt in cached_vts: - for target in vt.targets: - compile_contexts.append(self.compile_context(target)) - - for compile_context in compile_contexts: - portable_analysis_file = JvmCompileStrategy._portable_analysis_for_target( - self._analysis_dir, compile_context.target) - if os.path.exists(portable_analysis_file): - self._analysis_tools.localize(portable_analysis_file, compile_context.analysis_file) - - def _create_context_jar(self, compile_context): - """Jar up the compile_context to its output jar location. - - TODO(stuhood): In the medium term, we hope to add compiler support for this step, which would - allow the jars to be used as compile _inputs_ as well. Currently using jar'd compile outputs as - compile inputs would make the compiler's analysis useless. - see https://github.com/twitter-forks/sbt/tree/stuhood/output-jars - """ - root = compile_context.classes_dir - with compile_context.open_jar(mode='w') as jar: - for abs_sub_dir, _, filenames in safe_walk(root): - for name in filenames: - abs_filename = os.path.join(abs_sub_dir, name) - arcname = os.path.relpath(abs_filename, root) - jar.write(abs_filename, arcname) - - def _write_to_artifact_cache(self, vts, compile_context, get_update_artifact_cache_work): - assert len(vts.targets) == 1 - assert vts.targets[0] == compile_context.target - - # Noop if the target is uncacheable. - if (compile_context.target.has_label('no_cache')): - return - vt = vts.versioned_targets[0] - - # Set up args to relativize analysis in the background. - portable_analysis_file = JvmCompileStrategy._portable_analysis_for_target( - self._analysis_dir, compile_context.target) - relativize_args_tuple = (compile_context.analysis_file, portable_analysis_file) - - # Collect the artifacts for this target. - artifacts = [] - - def add_abs_products(p): - if p: - for _, paths in p.abs_paths(): - artifacts.extend(paths) - # Resources. - resources_by_target = self.context.products.get_data('resources_by_target') - add_abs_products(resources_by_target.get(compile_context.target)) - # Classes. - classes_by_target = self.context.products.get_data('classes_by_target') - add_abs_products(classes_by_target.get(compile_context.target)) - # Log file. - log_file = self._capture_log_file(compile_context.target) - if log_file and os.path.exists(log_file): - artifacts.append(log_file) - # Jar. - artifacts.append(compile_context.jar_file) - - # Get the 'work' that will publish these artifacts to the cache. - # NB: the portable analysis_file won't exist until we finish. - vts_artifactfiles_pair = (vt, artifacts + [portable_analysis_file]) - update_artifact_cache_work = get_update_artifact_cache_work([vts_artifactfiles_pair]) - - # And execute it. - if update_artifact_cache_work: - work_chain = [ - Work(self._analysis_tools.relativize, [relativize_args_tuple], 'relativize'), - update_artifact_cache_work - ] - self.context.submit_background_work_chain(work_chain, parent_workunit_name='cache') diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_strategy.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_strategy.py deleted file mode 100644 index 29516b7e0f7..00000000000 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile_strategy.py +++ /dev/null @@ -1,244 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -import os -from abc import ABCMeta, abstractmethod -from collections import defaultdict - -from twitter.common.collections import OrderedSet - -from pants.base.build_environment import get_buildroot, get_scm -from pants.base.exceptions import TaskError -from pants.util.dirutil import safe_delete, safe_mkdir, safe_rmtree - - -class JvmCompileStrategy(object): - """An abstract base strategy for JVM compilation.""" - - __metaclass__ = ABCMeta - # Common code. - # ------------ - - @staticmethod - def _analysis_for_target(analysis_dir, target): - return os.path.join(analysis_dir, target.id + '.analysis') - - @staticmethod - def _portable_analysis_for_target(analysis_dir, target): - return JvmCompileStrategy._analysis_for_target(analysis_dir, target) + '.portable' - - @classmethod - @abstractmethod - def register_options(cls, register, compile_task_name, supports_concurrent_execution): - """Registration for strategy-specific options. - - The abstract base class does not register any options itself: those are left to JvmCompile. - """ - - def __init__(self, context, options, workdir, analysis_tools, compile_task_name, - sources_predicate): - self._compile_task_name = compile_task_name - self.context = context - self._analysis_tools = analysis_tools - - self._workdir = workdir - self.delete_scratch = options.delete_scratch - - # Mapping of relevant (as selected by the predicate) sources by target. - self._sources_by_target = None - self._sources_predicate = sources_predicate - - # The ivy confs for which we're building. - self._confs = options.confs - self._clear_invalid_analysis = options.clear_invalid_analysis - - @abstractmethod - def name(self): - """A readable, unique name for this strategy.""" - - @abstractmethod - def invalidation_hints(self, relevant_targets): - """A tuple of partition_size_hint and locally_changed targets for the given inputs.""" - - @abstractmethod - def compile_context(self, target): - """Returns the default/stable compile context for the given target. - - Temporary compile contexts are private to the strategy. - """ - - @abstractmethod - def compute_classes_by_source(self, compile_contexts): - """Compute a map of (context->(src->classes)) for the given compile_contexts. - - It's possible (although unfortunate) for multiple targets to own the same sources, hence - the top level division. Srcs are relative to buildroot. Classes are absolute paths. - - Strategies may also return classes with 'None' as their src, to indicate that compiler - analysis indicated that they were un-owned. This case is triggered when annotation - processors generate classes (or due to bugs in classfile tracking in zinc/jmake.) - - """ - - @abstractmethod - def compile_chunk(self, - invalidation_check, - all_targets, - relevant_targets, - invalid_targets, - extra_compile_time_classpath_elements, - check_vts, - compile_vts, - register_vts, - update_artifact_cache_vts_work): - """Executes compilations for the invalid targets contained in a single language chunk.""" - - @abstractmethod - def post_process_cached_vts(self, cached_vts): - """Post processes VTS that have been fetched from the cache.""" - - def create_cache_hit_callback(self, vts): - """Factory method for artifact cache hit callback functions. - - Return value must be serializable. - """ - return None - - @abstractmethod - def compute_resource_mapping(self, compile_contexts): - """Computes a merged ResourceMapping for the given compile contexts. - - Since classes should live in exactly one context, a merged mapping is unambiguous. - """ - - def pre_compile(self): - """Executed once before any compiles.""" - self.analysis_tmpdir = self.ensure_analysis_tmpdir() - - def validate_analysis(self, path): - """Throws a TaskError for invalid analysis files.""" - try: - self._analysis_parser.validate_analysis(path) - except Exception as e: - if self._clear_invalid_analysis: - self.context.log.warn("Invalid analysis detected at path {} ... pants will remove these " - "automatically, but\nyou may experience spurious warnings until " - "clean-all is executed.\n{}".format(path, e)) - safe_delete(path) - else: - raise TaskError("An internal build directory contains invalid/mismatched analysis: please " - "run `clean-all` if your tools versions changed recently:\n{}".format(e)) - - def prepare_compile(self, cache_manager, all_targets, relevant_targets): - """Prepares to compile the given set of targets. - - Has the side effects of pruning old analysis, and computing deleted sources. - """ - # Target -> sources (relative to buildroot). - # TODO(benjy): Should sources_by_target be available in all Tasks? - self._sources_by_target = self._compute_sources_by_target(relevant_targets) - - def finalize_compile(self, relevant_targets): - """Executed once after all targets have been compiled.""" - pass - - def class_name_for_class_file(self, compile_context, class_file_name): - if not class_file_name.endswith(".class"): - return None - assert class_file_name.startswith(compile_context.classes_dir) - class_file_name = class_file_name[len(compile_context.classes_dir) + 1:-len(".class")] - return class_file_name.replace("/", ".") - - def _compute_sources_by_target(self, targets): - """Computes and returns a map target->sources (relative to buildroot).""" - def resolve_target_sources(target_sources): - resolved_sources = [] - for target in target_sources: - if target.has_sources(): - resolved_sources.extend(target.sources_relative_to_buildroot()) - return resolved_sources - - def calculate_sources(target): - sources = [s for s in target.sources_relative_to_buildroot() if self._sources_predicate(s)] - # TODO: Make this less hacky. Ideally target.java_sources will point to sources, not targets. - if hasattr(target, 'java_sources') and target.java_sources: - sources.extend(resolve_target_sources(target.java_sources)) - return sources - return {t: calculate_sources(t) for t in targets} - - def _sources_for_targets(self, targets): - """Returns a cached map of target->sources for the specified targets.""" - if self._sources_by_target is None: - raise TaskError('self._sources_by_target not computed yet.') - return {t: self._sources_by_target.get(t, []) for t in targets} - - def _sources_for_target(self, target): - """Returns the cached sources for the given target.""" - if self._sources_by_target is None: - raise TaskError('self._sources_by_target not computed yet.') - return self._sources_by_target.get(target, []) - - def _find_locally_changed_targets(self, sources_by_target): - """Finds the targets whose sources have been modified locally. - - Returns a list of targets, or None if no SCM is available. - """ - # Compute the src->targets mapping. There should only be one target per source, - # but that's not yet a hard requirement, so the value is a list of targets. - # TODO(benjy): Might this inverse mapping be needed elsewhere too? - targets_by_source = defaultdict(list) - for tgt, srcs in sources_by_target.items(): - for src in srcs: - targets_by_source[src].append(tgt) - - ret = OrderedSet() - scm = get_scm() - if not scm: - return None - changed_files = scm.changed_files(include_untracked=True, relative_to=get_buildroot()) - for f in changed_files: - ret.update(targets_by_source.get(f, [])) - return list(ret) - - @property - def _analysis_parser(self): - return self._analysis_tools.parser - - # Compute any extra compile-time-only classpath elements. - # TODO(benjy): Model compile-time vs. runtime classpaths more explicitly. - # TODO(benjy): Add a pre-execute goal for injecting deps into targets, so e.g., - # we can inject a dep on the scala runtime library and still have it ivy-resolve. - def _compute_extra_classpath(self, extra_compile_time_classpath_elements): - def extra_compile_classpath_iter(): - for conf in self._confs: - for jar in extra_compile_time_classpath_elements: - yield (conf, jar) - - return list(extra_compile_classpath_iter()) - - def ensure_analysis_tmpdir(self): - """Work in a tmpdir so we don't stomp the main analysis files on error. - - A temporary, but well-known, dir in which to munge analysis/dependency files in before - caching. It must be well-known so we know where to find the files when we retrieve them from - the cache. The tmpdir is cleaned up in a shutdown hook, because background work - may need to access files we create there even after this method returns - :return: path of temporary analysis directory - """ - analysis_tmpdir = os.path.join(self._workdir, 'analysis_tmpdir') - if self.delete_scratch: - self.context.background_worker_pool().add_shutdown_hook( - lambda: safe_rmtree(analysis_tmpdir)) - safe_mkdir(analysis_tmpdir) - return analysis_tmpdir - - def parse_deps(self, analysis_file): - """Parses the actual source dependencies given an analysis file. - - The dependencies are returned as relative paths. - """ - return self._analysis_parser.parse_deps_from_path(analysis_file) diff --git a/src/python/pants/backend/jvm/tasks/jvm_dependency_usage.py b/src/python/pants/backend/jvm/tasks/jvm_dependency_usage.py index 33cdbebcf08..d2f697deba0 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_dependency_usage.py +++ b/src/python/pants/backend/jvm/tasks/jvm_dependency_usage.py @@ -13,7 +13,6 @@ from pants.backend.core.targets.dependencies import Dependencies from pants.backend.core.targets.resources import Resources from pants.backend.jvm.targets.jar_library import JarLibrary -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_isolated_strategy import create_size_estimators from pants.backend.jvm.tasks.jvm_dependency_analyzer import JvmDependencyAnalyzer from pants.base.build_environment import get_buildroot from pants.base.target import Target diff --git a/src/python/pants/base/BUILD b/src/python/pants/base/BUILD index e9ec1eac18b..137ab6947a9 100644 --- a/src/python/pants/base/BUILD +++ b/src/python/pants/base/BUILD @@ -161,6 +161,14 @@ python_library( ] ) +python_library( + name = 'dep_lookup_error', + dependencies = [ + ':address_lookup_error', + ], + sources = ['dep_lookup_error.py'], +) + python_library( name = 'deprecated', sources = ['deprecated.py'], diff --git a/src/python/pants/base/dep_lookup_error.py b/src/python/pants/base/dep_lookup_error.py new file mode 100644 index 00000000000..edb3550fd6a --- /dev/null +++ b/src/python/pants/base/dep_lookup_error.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import (absolute_import, division, generators, nested_scopes, print_function, + unicode_literals, with_statement) + +from pants.base.address_lookup_error import AddressLookupError + + +class DepLookupError(AddressLookupError): + """Thrown when a dependency can't be found.""" diff --git a/testprojects/src/java/org/pantsbuild/testproject/shading/BUILD b/testprojects/src/java/org/pantsbuild/testproject/shading/BUILD index 87f3753e491..0003d26ed9c 100644 --- a/testprojects/src/java/org/pantsbuild/testproject/shading/BUILD +++ b/testprojects/src/java/org/pantsbuild/testproject/shading/BUILD @@ -6,8 +6,6 @@ jvm_binary(name='shading', basename='shading', dependencies=[ ':lib', - 'testprojects/src/java/org/pantsbuild/testproject/shadingdep', - 'testprojects/src/java/org/pantsbuild/testproject/shadingdep:other', ], shading_rules=[ shading_exclude('org.pantsbuild.testproject.shadingdep.PleaseDoNotShadeMe'), @@ -20,6 +18,10 @@ jvm_binary(name='shading', ) java_library(name='lib', + dependencies=[ + 'testprojects/src/java/org/pantsbuild/testproject/shadingdep', + 'testprojects/src/java/org/pantsbuild/testproject/shadingdep:other', + ], sources=[ 'Main.java', 'ShadeSelf.java', diff --git a/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/BUILD b/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/BUILD new file mode 100644 index 00000000000..36b1baebf33 --- /dev/null +++ b/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/BUILD @@ -0,0 +1,11 @@ +# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +junit_tests( + name = 'cp-directories', + dependencies = [ + '3rdparty:junit', + '3rdparty:scalatest', + ], + sources=globs('*.scala'), +) diff --git a/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/ClasspathDirectories.scala b/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/ClasspathDirectories.scala new file mode 100644 index 00000000000..e88156f7315 --- /dev/null +++ b/testprojects/tests/scala/org/pantsbuild/testproject/cp-directories/ClasspathDirectories.scala @@ -0,0 +1,26 @@ +// Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). +// Licensed under the Apache License, Version 2.0 (see LICENSE). + +package org.pantsbuild.testproject.cp_directories + +import org.junit.runner.RunWith +import org.scalatest.WordSpec +import org.scalatest.junit.JUnitRunner +import org.scalatest.MustMatchers + +/** + * A test that confirms it can fetch the directory entry for its own package. This confirms + * that the classpath provided to the test contains directories, which may not always be true + * for jars. + */ +@RunWith(classOf[JUnitRunner]) +class ClasspathDirectories extends WordSpec with MustMatchers { + val thisPackage = this.getClass.getCanonicalName.split('.').dropRight(1).mkString(".") + + "ClasspathDirectories" should { + "see its own package as a directory on the classpath" in { + val packageResource = "/" + thisPackage.replace('.', '/') + Option(this.getClass.getResource(packageResource)) mustBe defined + } + } +} diff --git a/tests/python/pants_test/backend/core/tasks/BUILD b/tests/python/pants_test/backend/core/tasks/BUILD index 0e9616d4789..235ca641cbc 100644 --- a/tests/python/pants_test/backend/core/tasks/BUILD +++ b/tests/python/pants_test/backend/core/tasks/BUILD @@ -30,7 +30,7 @@ python_tests( sources=['test_cache_cleanup.py'], coverage=['pants.backend.core.tasks.bash_completion'], dependencies=[ - 'src/python/pants/backend/jvm/tasks/jvm_compile:java', + 'src/python/pants/backend/jvm/tasks/jvm_compile:zinc', 'src/python/pants/util:dirutil', 'tests/python/pants_test:int-test', 'src/python/pants/util:contextutil', @@ -127,4 +127,4 @@ python_tests( 'src/python/pants/base:target', 'tests/python/pants_test/tasks:task_test_base', ] -) \ No newline at end of file +) diff --git a/tests/python/pants_test/backend/core/tasks/test_cache_cleanup.py b/tests/python/pants_test/backend/core/tasks/test_cache_cleanup.py index 63ac98f863c..0bb484e3483 100644 --- a/tests/python/pants_test/backend/core/tasks/test_cache_cleanup.py +++ b/tests/python/pants_test/backend/core/tasks/test_cache_cleanup.py @@ -7,7 +7,7 @@ import os -from pants.backend.jvm.tasks.jvm_compile.java.java_compile import JmakeCompile +from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile from pants.util.contextutil import temporary_dir from pants.util.dirutil import touch from pants_test.pants_run_integration_test import PantsRunIntegrationTest @@ -21,10 +21,10 @@ def create_platform_args(self, version): '--jvm-platform-default-platform=default'] def test_leave_one(self): - """ Ensure that max-old of 1 removes all but one files""" + """Ensure that max-old of 1 removes all but one files""" with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, JmakeCompile.stable_name(), + artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') touch(os.path.join(artifact_dir, 'old_cache_test1')) @@ -33,7 +33,7 @@ def test_leave_one(self): touch(os.path.join(artifact_dir, 'old_cache_test4')) touch(os.path.join(artifact_dir, 'old_cache_test5')) - config = {'cache.compile.java': {'write_to': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + ['compile.java', @@ -57,14 +57,14 @@ def test_leave_one(self): self.assertEqual(len(os.listdir(artifact_dir)), 1) def test_leave_none(self): - """ Ensure that max-old of zero removes all files + """Ensure that max-old of zero removes all files This test should ensure that conditional doesn't change to the simpler test of if max_old since we need to handle zero as well. """ with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, JmakeCompile.stable_name(), + artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') touch(os.path.join(artifact_dir, 'old_cache_test1')) @@ -73,7 +73,7 @@ def test_leave_none(self): touch(os.path.join(artifact_dir, 'old_cache_test4')) touch(os.path.join(artifact_dir, 'old_cache_test5')) - config = {'cache.compile.java': {'write_to': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + ['compile.java', diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/BUILD b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/BUILD index 6a12f446118..4b95b12ebda 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/BUILD +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/BUILD @@ -10,8 +10,8 @@ target( ) python_tests( - name='isolation_cache_hit_callback', - sources=['test_isolation_cache_hit_callback.py'], + name='cache_hit_callback', + sources=['test_cache_hit_callback.py'], dependencies=[ 'src/python/pants/backend/jvm/tasks/jvm_compile', 'src/python/pants/util:contextutil', diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/base_compile_integration_test.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/base_compile_integration_test.py index e8c3b5dad1e..61c7c0594a4 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/base_compile_integration_test.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/base_compile_integration_test.py @@ -16,7 +16,7 @@ class BaseCompileIT(PantsRunIntegrationTest): @contextmanager - def do_test_compile(self, target, strategy, + def do_test_compile(self, target, expected_files=None, iterations=2, expect_failure=False, extra_args=None): """Runs a configurable number of iterations of compilation for the given target. @@ -26,7 +26,7 @@ def do_test_compile(self, target, strategy, with temporary_dir(root_dir=self.workdir_root()) as cachedir: for i in xrange(0, iterations): pants_run = self.run_test_compile(workdir, cachedir, target, - strategy, clean_all=(i == 0), + clean_all=(i == 0), extra_args=extra_args) if expect_failure: self.assert_failure(pants_run) @@ -48,18 +48,12 @@ def do_test_compile(self, target, strategy, to_find, '\n'.join(sorted(workdir_files)))) yield found - def run_test_compile(self, workdir, cachedir, target, strategy, clean_all=False, extra_args=None): + def run_test_compile(self, workdir, cachedir, target, clean_all=False, extra_args=None): global_args = [ '--cache-write', '--cache-write-to=[\'{}\']'.format(cachedir), ] - args = [ - 'compile', - '--compile-apt-strategy={}'.format(strategy), - '--compile-java-strategy={}'.format(strategy), - '--compile-zinc-strategy={}'.format(strategy), - target, - ] + (extra_args if extra_args else []) + args = ['compile', target] + (extra_args if extra_args else []) # Clean-all on the first iteration. if clean_all: args.insert(0, 'clean-all') diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/BUILD b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/BUILD index 53fdab04fce..7175e94804c 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/BUILD +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/BUILD @@ -51,7 +51,7 @@ python_tests( name='java_compile_integration', sources=['test_java_compile_integration.py'], dependencies=[ - 'src/python/pants/backend/jvm/tasks/jvm_compile:java', + 'src/python/pants/backend/jvm/tasks/jvm_compile:zinc', 'src/python/pants/fs', 'src/python/pants/util:contextutil', 'src/python/pants/util:dirutil', diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/jvm_platform_integration_mixin.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/jvm_platform_integration_mixin.py index b4d402b61be..309b8a53efd 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/jvm_platform_integration_mixin.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/jvm_platform_integration_mixin.py @@ -12,7 +12,6 @@ from pants.fs.archive import ZIP from pants.util.contextutil import temporary_dir -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class JvmPlatformIntegrationMixin(object): @@ -57,7 +56,7 @@ def _get_jar_class_versions(self, jarname): class_to_version[os.path.relpath(path, tempdir)] = self.determine_version(path) return class_to_version - def _get_compiled_class_versions(self, strategy, spec, more_args=None): + def _get_compiled_class_versions(self, spec, more_args=None): more_args = more_args or [] jar_name = os.path.basename(spec) while jar_name.endswith(':'): @@ -69,7 +68,7 @@ def _get_compiled_class_versions(self, strategy, spec, more_args=None): with temporary_dir(root_dir=self.workdir_root()) as workdir: pants_run = self.run_pants_with_workdir( ['binary'] + self.get_pants_compile_args() - + ['--strategy={}'.format(strategy), 'compile.checkstyle', '--skip', spec] + + ['compile.checkstyle', '--skip', spec] + more_args, workdir, config) self.assert_success(pants_run) @@ -82,40 +81,36 @@ def format_dict(d): 'Compiled class versions differed.\n expected: {}\n received: {}' .format(format_dict(expected), format_dict(received))) - @provide_compile_strategies - def test_compile_java6(self, strategy): + def test_compile_java6(self): target_spec = 'testprojects/src/java/org/pantsbuild/testproject/targetlevels/java6' self.assert_class_versions({ 'org/pantsbuild/testproject/targetlevels/java6/Six.class': '1.6', - }, self._get_compiled_class_versions(strategy, target_spec)) + }, self._get_compiled_class_versions(target_spec)) - @provide_compile_strategies - def test_compile_java7(self, strategy): + def test_compile_java7(self): target_spec = 'testprojects/src/java/org/pantsbuild/testproject/targetlevels/java7' self.assert_class_versions({ 'org/pantsbuild/testproject/targetlevels/java7/Seven.class': '1.7', - }, self._get_compiled_class_versions(strategy, target_spec)) + }, self._get_compiled_class_versions(target_spec)) - @provide_compile_strategies - def test_compile_java7on6(self, strategy): + def test_compile_java7on6(self): target_spec = 'testprojects/src/java/org/pantsbuild/testproject/targetlevels/java7on6' self.assert_class_versions({ 'org/pantsbuild/testproject/targetlevels/java7on6/SevenOnSix.class': '1.7', 'org/pantsbuild/testproject/targetlevels/java6/Six.class': '1.6', - }, self._get_compiled_class_versions(strategy, target_spec)) + }, self._get_compiled_class_versions(target_spec)) - @provide_compile_strategies - def test_compile_target_coercion(self, strategy): + def test_compile_target_coercion(self): target_spec = 'testprojects/src/java/org/pantsbuild/testproject/targetlevels/unspecified' self.assert_class_versions({ 'org/pantsbuild/testproject/targetlevels/unspecified/Unspecified.class': '1.7', 'org/pantsbuild/testproject/targetlevels/unspecified/Six.class': '1.6', - }, self._get_compiled_class_versions(strategy, target_spec, more_args=[ + }, self._get_compiled_class_versions(target_spec, more_args=[ '--jvm-platform-validate-check=warn', '--jvm-platform-default-platform=java7', ])) - def _test_compile(self, target_level, class_name, source_contents, strategy, platform_args=None): + def _test_compile(self, target_level, class_name, source_contents, platform_args=None): with temporary_dir(root_dir=os.path.abspath('.')) as tmpdir: with open(os.path.join(tmpdir, 'BUILD'), 'w') as f: f.write(dedent(''' @@ -139,42 +134,39 @@ def _test_compile(self, target_level, class_name, source_contents, strategy, pla command.extend(['--jvm-platform-platforms={}'.format(platforms), '--jvm-platform-default-platform={}'.format(target_level)]) command.extend(self.get_pants_compile_args()) - command.extend(['--strategy={}'.format(strategy), tmpdir]) + command.extend([tmpdir]) pants_run = self.run_pants(command) return pants_run - @provide_compile_strategies - def test_compile_diamond_operator_java7_works(self, strategy): + def test_compile_diamond_operator_java7_works(self): pants_run = self._test_compile('1.7', 'Diamond', dedent(''' public class Diamond { public static void main(String[] args) { Diamond diamond = new Diamond<>(); } } - '''), strategy) + ''')) self.assert_success(pants_run) - @provide_compile_strategies - def test_compile_diamond_operator_java6_fails(self, strategy): + def test_compile_diamond_operator_java6_fails(self): pants_run = self._test_compile('1.6', 'Diamond', dedent(''' public class Diamond { public static void main(String[] args) { Diamond diamond = new Diamond<>(); } } - '''), strategy) + ''')) self.assert_failure(pants_run) - @provide_compile_strategies - def test_compile_with_javac_args(self, strategy): + def test_compile_with_javac_args(self): pants_run = self._test_compile('1.7', 'LintyDiamond', dedent(''' public class LintyDiamond { public static void main(String[] args) { LintyDiamond diamond = new LintyDiamond<>(); } } - '''), strategy, platform_args=['-C-Xlint:cast']) + '''), platform_args=['-C-Xlint:cast']) self.assert_success(pants_run) def test_compile_stale_platform_settings(self): @@ -209,8 +201,7 @@ def compile_diamond(platform): '--jvm-platform-default-platform={}'.format(platform), '-ldebug', 'compile'] + self.get_pants_compile_args() + - ['--strategy=isolated', - '{}:diamond'.format(tmpdir)], workdir=workdir) + ['{}:diamond'.format(tmpdir)], workdir=workdir) # We shouldn't be able to compile this with -source=6. self.assert_failure(compile_diamond('java6'), 'Diamond.java was compiled successfully with ' diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_apt_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_apt_compile_integration.py index afa3840dece..31e7f8bbdba 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_apt_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_apt_compile_integration.py @@ -7,15 +7,12 @@ from pants.util.contextutil import temporary_dir from pants_test.backend.jvm.tasks.jvm_compile.base_compile_integration_test import BaseCompileIT -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class AptCompileIntegrationTest(BaseCompileIT): - @provide_compile_strategies - def test_apt_compile(self, strategy): + def test_apt_compile(self): with self.do_test_compile('testprojects/src/java/org/pantsbuild/testproject/annotation/processor', - strategy, expected_files=['ResourceMappingProcessor.class', 'javax.annotation.processing.Processor']) as found: @@ -33,10 +30,8 @@ def test_apt_compile(self, strategy): self.assertEqual('org.pantsbuild.testproject.annotation.processor.ResourceMappingProcessor', fp.read().strip()) - @provide_compile_strategies - def test_apt_compile_and_run(self, strategy): + def test_apt_compile_and_run(self): with self.do_test_compile('testprojects/src/java/org/pantsbuild/testproject/annotation/main', - strategy, expected_files=['Main.class', 'deprecation_report.txt']) as found: @@ -50,8 +45,7 @@ def test_apt_compile_and_run(self, strategy): with open(self.get_only(found, 'deprecation_report.txt')) as fp: self.assertIn('org.pantsbuild.testproject.annotation.main.Main', fp.read().splitlines()) - @provide_compile_strategies - def test_stale_apt_with_deps(self, strategy): + def test_stale_apt_with_deps(self): """An annotation processor with a dependency doesn't pollute other annotation processors. At one point, when you added an annotation processor, it stayed configured for all subsequent @@ -62,7 +56,6 @@ def test_stale_apt_with_deps(self, strategy): # Demonstrate that the annotation processor is working with self.do_test_compile( 'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep/main', - strategy, expected_files=['Main.class', 'Main_HelloWorld.class', 'Main_HelloWorld.java']) as found: gen_file = self.get_only(found, 'Main_HelloWorld.java') self.assertTrue(gen_file.endswith( @@ -77,8 +70,7 @@ def test_stale_apt_with_deps(self, strategy): self.assert_success(self.run_test_compile( workdir, cachedir, - 'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep::', - strategy)) + 'testprojects/src/java/org/pantsbuild/testproject/annotation/processorwithdep::')) # When we run a second compile with annotation processors, make sure the previous annotation # processor doesn't stick around to spoil the compile @@ -86,5 +78,4 @@ def test_stale_apt_with_deps(self, strategy): workdir, cachedir, 'testprojects/src/java/org/pantsbuild/testproject/annotation/processor::', - strategy, clean_all=False)) diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py index a92efa7b7d3..be9a6224865 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_cache_compile_integration.py @@ -15,10 +15,8 @@ class CacheCompileIntegrationTest(BaseCompileIT): - def run_compile(self, target_spec, config, strategy, workdir, tool_name): - args = ['compile', - 'compile.{}'.format(tool_name), '--strategy={}'.format(strategy), - '--partition-size-hint=1', target_spec] + def run_compile(self, target_spec, config, workdir, tool_name): + args = ['compile', target_spec] if tool_name == 'zinc': args.append('--no-compile-java-use-jmake') @@ -44,6 +42,7 @@ def _do_test_stale_artifacts_rmd_when_cache_used(self, tool_name): config = { 'cache.compile.{}'.format(tool_name): {'write_to': [cache_dir], 'read_from': [cache_dir]}, + 'compile.java': {'use_jmake': tool_name == 'java' }, } self.create_file(os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java'), @@ -59,14 +58,14 @@ class Main {}""")) 'cachetest:cachetest') # Caches values A.class, Main.class - self.run_compile(cachetest_spec, config, 'isolated', workdir, tool_name) + self.run_compile(cachetest_spec, config, workdir, tool_name) self.create_file(os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java'), dedent("""package org.pantsbuild.cachetest; class A {} class NotMain {}""")) # Caches values A.class, NotMain.class and leaves them on the filesystem - self.run_compile(cachetest_spec, config, 'isolated', workdir, tool_name) + self.run_compile(cachetest_spec, config, workdir, tool_name) self.create_file(os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java'), dedent("""package org.pantsbuild.cachetest; @@ -74,7 +73,7 @@ class A {} class Main {}""")) # Should cause NotMain.class to be removed - self.run_compile(cachetest_spec, config, 'isolated', workdir, tool_name) + self.run_compile(cachetest_spec, config, workdir, tool_name) cachetest_id = cachetest_spec.replace(':', '.').replace(os.sep, '.') diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py index 32ea237997f..5929205fbbc 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_integration.py @@ -7,43 +7,16 @@ import os -from pants.backend.jvm.tasks.jvm_compile.java.java_compile import JmakeCompile -from pants.backend.jvm.tasks.jvm_compile.java.jmake_analysis_parser import JMakeAnalysisParser +from pants.backend.jvm.tasks.jvm_compile.zinc.zinc_compile import ZincCompile from pants.fs.archive import TarArchiver from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_walk from pants_test.backend.jvm.tasks.jvm_compile.base_compile_integration_test import BaseCompileIT -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class JavaCompileIntegrationTest(BaseCompileIT): - def _java_compile_produces_valid_analysis_file(self, workdir): - # A bug was introduced where if a java compile was run twice, the second - # time the global_analysis.valid file would incorrectly be empty. - - pants_run = self.run_pants_with_workdir([ - 'compile', - 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], - workdir) - self.assert_success(pants_run) - - # Parse the analysis file from the compilation. - analysis_file = os.path.join(workdir, 'compile', 'jvm', 'java', 'analysis', - 'global_analysis.valid') - parser = JMakeAnalysisParser() - analysis = parser.parse_from_path(analysis_file) - - # Ensure we have entries in the analysis file. - self.assertEquals(len(analysis.pcd_entries), 2) - - def test_java_compile_produces_valid_analysis_file_second_time(self): - # Run the test above twice to ensure it works both times. - with temporary_dir(root_dir=self.workdir_root()) as workdir: - self._java_compile_produces_valid_analysis_file(workdir) - - @provide_compile_strategies - def test_resources_by_target_and_partitions(self, strategy): + def test_resources_by_target_and_partitions(self): """ This tests that resources_by_target interacts correctly with partitions; we want to make sure that even targets that are outside @@ -55,25 +28,23 @@ def test_resources_by_target_and_partitions(self, strategy): with temporary_dir(root_dir=self.workdir_root()) as workdir: pants_run = self.run_pants_with_workdir( - ['compile', 'compile.java', '--strategy={}'.format(strategy), '--partition-size-hint=1', + ['compile', 'compile.java', 'testprojects/src/java/org/pantsbuild/testproject/publish/hello/main:', ], workdir, config) self.assert_success(pants_run) - @provide_compile_strategies - def test_nocache(self, strategy): + def test_nocache(self): with temporary_dir() as cache_dir: bad_artifact_dir = os.path.join(cache_dir, - JmakeCompile.stable_name(), + ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.nocache.nocache') good_artifact_dir = os.path.join(cache_dir, - JmakeCompile.stable_name(), + ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.nocache.cache_me') - config = {'cache.compile.java': {'write_to': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(['compile.java', - '--strategy={}'.format(strategy), 'testprojects/src/java/org/pantsbuild/testproject/nocache::'], config) self.assert_success(pants_run) @@ -91,19 +62,17 @@ def create_platform_args(self, version): .format(version=version)), '--jvm-platform-default-platform=default'] - @provide_compile_strategies - def test_java_compile_produces_different_artifact_depending_on_java_version(self, strategy): + def test_java_compile_produces_different_artifact_depending_on_java_version(self): # Ensure that running java compile with java 6 and then java 7 # produces two different artifacts. with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, JmakeCompile.stable_name(), + artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.unicode.main.main') - config = {'cache.compile.java': {'write_to': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir]}} pants_run = self.run_pants(self.create_platform_args(6) + ['compile.java', - '--strategy={}'.format(strategy), 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], config) self.assert_success(pants_run) @@ -114,7 +83,6 @@ def test_java_compile_produces_different_artifact_depending_on_java_version(self # Rerun for java 7 pants_run = self.run_pants(self.create_platform_args(7) + ['compile.java', - '--strategy={}'.format(strategy), 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'], config) self.assert_success(pants_run) @@ -122,21 +90,16 @@ def test_java_compile_produces_different_artifact_depending_on_java_version(self # One artifact for java 6 and one for 7 self.assertEqual(len(os.listdir(artifact_dir)), 2) - @provide_compile_strategies - def test_java_compile_reads_resource_mapping(self, strategy): + def test_java_compile_reads_resource_mapping(self): # Ensure that if an annotation processor produces a resource-mapping, # the artifact contains that resource mapping. with temporary_dir() as cache_dir: - artifact_dir = os.path.join(cache_dir, - JmakeCompile.stable_name(), + artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), 'testprojects.src.java.org.pantsbuild.testproject.annotation.main.main') - config = {'cache.compile.java': {'write_to': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir]}} - pants_run = self.run_pants(['compile.java', - '--strategy={}'.format(strategy), - 'compile.apt', - '--strategy={}'.format(strategy), + pants_run = self.run_pants(['compile', 'testprojects/src/java/org/pantsbuild/testproject/annotation/main'], config) self.assert_success(pants_run) @@ -167,12 +130,12 @@ def test_java_compile_reads_resource_mapping(self, strategy): 'org.pantsbuild.testproject.annotation.main.Main$TestInnerClass'}, set(annotated_classes)) - def _whitelist_test(self, target, whitelist_target, strategy, fatal_flag, args=None): + def _whitelist_test(self, target, whitelist_target, fatal_flag, args=None): """Ensure that a project missing dependencies fails if it is not whitelisted.""" # First check that without the whitelist we do break the build. extra_args = (args if args else []) + [fatal_flag] - with self.do_test_compile(target, strategy, extra_args=extra_args, expect_failure=True): + with self.do_test_compile(target, extra_args=extra_args, expect_failure=True): # run failed as expected pass @@ -181,52 +144,36 @@ def _whitelist_test(self, target, whitelist_target, strategy, fatal_flag, args=N fatal_flag, '--compile-jvm-dep-check-missing-deps-whitelist=["{}"]'.format(whitelist_target) ] - with self.do_test_compile(target, strategy, extra_args=extra_args): + with self.do_test_compile(target, extra_args=extra_args): # run succeeded as expected pass - def test_java_compile_missing_dep_analysis_whitelist(self): - self._whitelist_test( - 'testprojects/src/java/org/pantsbuild/testproject/missingdepswhitelist', - 'testprojects/src/java/org/pantsbuild/testproject/missingdepswhitelist2', - # NB: missing transitive deps are only possible with the global strategy - 'global', - '--compile-jvm-dep-check-missing-deps=fatal' - ) - - @provide_compile_strategies - def test_java_compile_missing_direct_dep_analysis_whitelist_jmake(self, strategy): + def test_java_compile_missing_direct_dep_analysis_whitelist_jmake(self): self._whitelist_test( 'testprojects/src/java/org/pantsbuild/testproject/missingdirectdepswhitelist', 'testprojects/src/java/org/pantsbuild/testproject/missingdirectdepswhitelist', - strategy, - '--compile-jvm-dep-check-missing-direct-deps=fatal' + '--compile-jvm-dep-check-missing-direct-deps=fatal', + # Use jmake. + args=['--compile-java-use-jmake'] ) - @provide_compile_strategies - def test_java_compile_missing_direct_dep_analysis_whitelist_zinc(self, strategy): + def test_java_compile_missing_direct_dep_analysis_whitelist_zinc(self): self._whitelist_test( 'testprojects/src/java/org/pantsbuild/testproject/missingdirectdepswhitelist', 'testprojects/src/java/org/pantsbuild/testproject/missingdirectdepswhitelist', - strategy, - '--compile-jvm-dep-check-missing-direct-deps=fatal', - # Use zinc. - args=['--no-compile-java-use-jmake'] + '--compile-jvm-dep-check-missing-direct-deps=fatal' ) - @provide_compile_strategies - def test_java_compile_missing_jar_dep_analysis_whitelist_zinc(self, strategy): + def test_java_compile_missing_jar_dep_analysis_whitelist_zinc(self): self._whitelist_test( 'testprojects/src/java/org/pantsbuild/testproject/missingjardepswhitelist', 'testprojects/src/java/org/pantsbuild/testproject/missingjardepswhitelist', - strategy, '--compile-jvm-dep-check-missing-direct-deps=fatal', # Use zinc. args=['--no-compile-java-use-jmake'] ) - @provide_compile_strategies - def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self, strategy): + def test_java_compile_with_different_resolved_jars_produce_different_artifacts(self): # Since unforced dependencies resolve to the highest version including transitive jars, # We want to ensure that running java compile with binary incompatible libraries will # produces two different artifacts. @@ -234,12 +181,11 @@ def test_java_compile_with_different_resolved_jars_produce_different_artifacts(s with temporary_dir(root_dir=self.workdir_root()) as workdir, temporary_dir() as cache_dir: path_prefix = 'testprojects/src/java/org/pantsbuild/testproject/jarversionincompatibility' dotted_path = path_prefix.replace(os.path.sep, '.') - artifact_dir = os.path.join(cache_dir, JmakeCompile.stable_name(), + artifact_dir = os.path.join(cache_dir, ZincCompile.stable_name(), '{}.jarversionincompatibility'.format(dotted_path)) - config = {'cache.compile.java': {'write_to': [cache_dir], 'read_from': [cache_dir]}} + config = {'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]}} pants_run = self.run_pants_with_workdir(['compile.java', - '--strategy={}'.format(strategy), ('{}:only-15-directly'.format(path_prefix))], workdir, config) @@ -250,7 +196,6 @@ def test_java_compile_with_different_resolved_jars_produce_different_artifacts(s # Rerun for guava 16 pants_run = self.run_pants_with_workdir(['compile.java', - '--strategy={}'.format(strategy), (u'{}:alongside-16'.format(path_prefix)), '-ldebug'], workdir, config) diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_settings_partitioning.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_settings_partitioning.py index 1b6115b7f68..8164a8c8aad 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_settings_partitioning.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_java_compile_settings_partitioning.py @@ -44,11 +44,7 @@ def _task_setup(self, targets, platforms=None, default_platform=None, **options) context = self.context(target_roots=targets) return self.create_task(context) - def _settings_and_targets(self, targets, ordered=False, **options): - if ordered: - # Only works in global! Isolated doesn't need this ordering. - task = self._task_setup(targets, strategy='global', **options) - return task._strategy.ordered_compile_settings_and_targets(targets) + def _settings_and_targets(self, targets, **options): self._task_setup(targets, **options) settings_and_targets = defaultdict(set) for target in targets: @@ -162,52 +158,3 @@ def test_compile_setting_inequivalence(self): self.assertNotEqual(JvmPlatformSettings('1.4', '1.6', ['-Xfoo:bar']), JvmPlatformSettings('1.6', '1.6', ['-Xfoo:bar'])) - - def test_chunks_respect_dependencies(self): - # Create the dependency tree: - # a <- b <-| - # c <-|<- e <- f <- g <- | <- i <- j <- k - # d <-| h <- | - a = self._java('a', platform='1.6') - b = self._java('b', platform='1.6', deps=[a]) - c = self._java('c', platform='1.6') - d = self._java('d', platform='1.7') - e = self._java('e', platform='1.7', deps=[b, c, d]) - f = self._java('f', platform='1.7', deps=[e]) - g = self._java('g', platform='1.8', deps=[f]) - h = self._java('h', platform='1.6') - i = self._java('i', platform='1.8', deps=[g]) - j = self._java('j', platform='1.8-bar', deps=[i]) - k = self._java('k', platform='1.8', deps=[j]) - - platforms = { - '1.6': {'source': '1.6'}, - '1.7': {'source': '1.7'}, - '1.8': {'source': '1.8'}, - '1.8-bar': {'source': '1.8', 'args': ['-Xfoo:bar']} - } - - def _settings(target_level, args=None): - return JvmPlatformSettings(target_level, target_level, args) - - def _format_chunk(chunk): - settings, targets = chunk - return '{}: ({})'.format(str(settings), ', '.join(sorted(t.address.spec for t in targets))) - - expected = [ - (_settings('1.6'), {a, b, c, h}), - (_settings('1.7'), {d, e, f}), - (_settings('1.8'), {g, i}), - (_settings('1.8', ['-Xfoo:bar']), {j}), - (_settings('1.8'), {k}) - ] - - settings_and_targets = self._settings_and_targets([a, b, c, d, e, f, g, h, i, j, k], - ordered=True, - platforms=platforms) - received = [(settings, set(targets)) for settings, targets in settings_and_targets] - - self.assertEqual(expected, received, 'Expected: {}\n\nReceived: {}'.format( - ''.join('\n {}'.format(_format_chunk(s)) for s in expected), - ''.join('\n {}'.format(_format_chunk(s)) for s in received), - )) diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py index 9cb546c6182..8647dce6374 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py @@ -7,41 +7,36 @@ from pants.util.contextutil import temporary_dir from pants_test.backend.jvm.tasks.jvm_compile.base_compile_integration_test import BaseCompileIT -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class ZincCompileIntegrationTest(BaseCompileIT): def test_java_src_zinc_compile(self): - # TODO(stuhood): doesn't succeed stably with global+zinc - with self.do_test_compile('examples/src/java/::', 'isolated', extra_args=['--no-compile-java-use-jmake']): + with self.do_test_compile('examples/src/java/::', extra_args=['--no-compile-java-use-jmake']): # run succeeded as expected pass - @provide_compile_strategies - def test_java_tests_zinc_compile(self, strategy): - with self.do_test_compile('examples/tests/java/::', strategy, extra_args=['--no-compile-java-use-jmake']): + def test_java_tests_zinc_compile(self): + with self.do_test_compile('examples/tests/java/::', extra_args=['--no-compile-java-use-jmake']): # run succeeded as expected pass - @provide_compile_strategies - def test_in_process(self, strategy): + def test_in_process(self): with temporary_dir(root_dir=self.workdir_root()) as workdir: with temporary_dir(root_dir=self.workdir_root()) as cachedir: pants_run = self.run_test_compile( - workdir, cachedir, 'examples/src/java/org/pantsbuild/example/hello/main', strategy, + workdir, cachedir, 'examples/src/java/org/pantsbuild/example/hello/main', extra_args=['--no-compile-java-use-jmake', '-ldebug'], clean_all=True ) self.assertIn('Attempting to call com.sun.tools.javac.api.JavacTool', pants_run.stdout_data) self.assertNotIn('Forking javac', pants_run.stdout_data) - @provide_compile_strategies - def test_log_level(self, strategy): + def test_log_level(self): with temporary_dir(root_dir=self.workdir_root()) as workdir: with temporary_dir(root_dir=self.workdir_root()) as cachedir: target = 'testprojects/src/java/org/pantsbuild/testproject/dummies:compilation_failure_target' pants_run = self.run_test_compile( - workdir, cachedir, target, strategy, + workdir, cachedir, target, extra_args=['--no-compile-java-use-jmake', '--no-color'], clean_all=True ) self.assertIn('[warn] import sun.security.x509.X500Name;', pants_run.stdout_data) @@ -52,10 +47,9 @@ def test_unicode_source_symbol(self): with temporary_dir(root_dir=self.workdir_root()) as cachedir: target = 'testprojects/src/scala/org/pantsbuild/testproject/unicode/unicodedep/consumer' pants_run = self.run_test_compile( - workdir, cachedir, target, 'isolated', + workdir, cachedir, target, extra_args=[ '--compile-zinc-name-hashing', - '--config-override=pants.ini.isolated', '--cache-compile-zinc-write-to=["{}/dummy_artifact_cache_dir"]'.format(cachedir), ], clean_all=True, diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_isolation_cache_hit_callback.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_cache_hit_callback.py similarity index 75% rename from tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_isolation_cache_hit_callback.py rename to tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_cache_hit_callback.py index 5e610e2c2e3..bfe7256a3c7 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_isolation_cache_hit_callback.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/test_cache_hit_callback.py @@ -8,13 +8,12 @@ import os import unittest -from pants.backend.jvm.tasks.jvm_compile.jvm_compile_isolated_strategy import \ - IsolationCacheHitCallback +from pants.backend.jvm.tasks.jvm_compile.jvm_compile import CacheHitCallback from pants.util.contextutil import temporary_dir from pants.util.dirutil import touch -class IsolationCacheHitCallbackTest(unittest.TestCase): +class CacheHitCallbackTest(unittest.TestCase): def test_when_key_has_associated_directory_cleans_dir(self): with temporary_dir() as tmpdir: filename = os.path.join(tmpdir, 'deleted') @@ -22,5 +21,5 @@ def test_when_key_has_associated_directory_cleans_dir(self): key = 'some-key' cache_key_to_class_dir = {key: tmpdir} - IsolationCacheHitCallback(cache_key_to_class_dir)(key) + CacheHitCallback(cache_key_to_class_dir)(key) self.assertFalse(os.path.exists(filename)) diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/zinc/test_zinc_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/zinc/test_zinc_compile_integration.py index efa5e612f34..2451c4a7a5d 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/zinc/test_zinc_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/zinc/test_zinc_compile_integration.py @@ -9,38 +9,26 @@ from pants.util.contextutil import open_tar, open_zip, temporary_dir from pants_test.backend.jvm.tasks.jvm_compile.base_compile_integration_test import BaseCompileIT -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies -SHAPELESS_CLSNAME = 'ShapelessExample.class' SHAPELESS_CLSFILE = 'org/pantsbuild/testproject/unicode/shapeless/ShapelessExample.class' SHAPELESS_TARGET = 'testprojects/src/scala/org/pantsbuild/testproject/unicode/shapeless' class ZincCompileIntegrationTest(BaseCompileIT): - def test_scala_global_compile(self): - with self.do_test_compile(SHAPELESS_TARGET, - 'global', - expected_files=[SHAPELESS_CLSNAME]) as found: - - self.assertTrue(self.get_only(found, SHAPELESS_CLSNAME).endswith(SHAPELESS_CLSFILE)) - - def test_scala_isolated_compile_jar(self): + def test_scala_compile_jar(self): # NB: generated with: # hashlib.sha1('testprojects.src.scala.org.pantsbuild.testproject.unicode.shapeless.shapeless').hexdigest()[:12] jar_suffix = 'fd9f49e1153b.jar' with self.do_test_compile(SHAPELESS_TARGET, - 'isolated', expected_files=[jar_suffix]) as found: with open_zip(self.get_only(found, jar_suffix), 'r') as jar: self.assertTrue(jar.getinfo(SHAPELESS_CLSFILE), 'Expected a jar containing the expected class.') - @provide_compile_strategies - def test_scala_empty_compile(self, strategy): + def test_scala_empty_compile(self): with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testproject/emptyscala', - strategy, expected_files=[]) as found: # no classes generated by this target pass @@ -48,35 +36,25 @@ def test_scala_empty_compile(self, strategy): def test_scala_shared_sources(self): clsname = 'SharedSources.class' - def test(strategy, count): - with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testproject/sharedsources::', - strategy, - expected_files=[clsname]) as found: - classes = found[clsname] - self.assertEqual(count, len(classes)) - for cls in classes: - self.assertTrue(cls.endswith( - 'org/pantsbuild/testproject/sharedsources/SharedSources.class')) - - # We expect a single output class for the global strategy. - test('global', 1) - # But the isolated strategy should result in a class per target. - test('isolated', 2) - - def test_scala_isolated_failure(self): + with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testproject/sharedsources::', + expected_files=[clsname]) as found: + classes = found[clsname] + self.assertEqual(2, len(classes)) + for cls in classes: + self.assertTrue(cls.endswith( + 'org/pantsbuild/testproject/sharedsources/SharedSources.class')) + + def test_scala_failure(self): """With no initial analysis, a failed compilation shouldn't leave anything behind.""" analysis_file = 'testprojects.src.scala.' \ 'org.pantsbuild.testproject.compilation_failure.compilation_failure.analysis' with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testprojects/compilation_failure', - 'isolated', expected_files=[analysis_file], expect_failure=True) as found: self.assertEqual(0, len(found[analysis_file])) - @provide_compile_strategies - def test_scala_with_java_sources_compile(self, strategy): + def test_scala_with_java_sources_compile(self): with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testproject/javasources', - strategy, expected_files=['ScalaWithJavaSources.class', 'JavaSource.class']) as found: @@ -88,10 +66,8 @@ def test_scala_with_java_sources_compile(self, strategy): self.get_only(found, 'JavaSource.class').endswith( 'org/pantsbuild/testproject/javasources/JavaSource.class')) - @provide_compile_strategies - def test_scalac_plugin_compile(self, strategy): + def test_scalac_plugin_compile(self): with self.do_test_compile('testprojects/src/scala/org/pantsbuild/testproject/scalac/plugin', - strategy, expected_files=['HelloScalac.class', 'scalac-plugin.xml']) as found: self.assertTrue( @@ -113,7 +89,6 @@ def test_zinc_unsupported_option(self): workdir, cachedir, 'testprojects/src/scala/org/pantsbuild/testproject/emptyscala', - 'isolated', extra_args=[ '--compile-zinc-args=-recompile-all-fraction', '--compile-zinc-args=0.5', diff --git a/tests/python/pants_test/backend/jvm/tasks/test_binary_create_integration.py b/tests/python/pants_test/backend/jvm/tasks/test_binary_create_integration.py index 28475f7c5b5..47fe6b42a1d 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_binary_create_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_binary_create_integration.py @@ -14,10 +14,9 @@ class BinaryCreateIntegrationTest(PantsRunIntegrationTest): - def test_autovalue_isolated_classfiles(self): + def test_autovalue_classfiles(self): self.build_and_run( - pants_args=['binary', '--compile-java-strategy=isolated', - 'examples/src/java/org/pantsbuild/example/autovalue'], + pants_args=['binary', 'examples/src/java/org/pantsbuild/example/autovalue'], rel_out_path='dist', java_args=['-jar', 'autovalue.jar'], expected_output='Hello Autovalue!' diff --git a/tests/python/pants_test/option/test_options_bootstrapper.py b/tests/python/pants_test/option/test_options_bootstrapper.py index 7cc3fbbe69f..ef619cc1696 100644 --- a/tests/python/pants_test/option/test_options_bootstrapper.py +++ b/tests/python/pants_test/option/test_options_bootstrapper.py @@ -125,11 +125,11 @@ def test_create_bootstrapped_options(self): def test_create_bootstrapped_multiple_config_override(self): # check with multiple config files, the latest values always get taken - # in this case strategy will be overwritten, while fruit stays the same + # in this case worker_count will be overwritten, while fruit stays the same with temporary_file() as fp: fp.write(dedent(""" [compile.apt] - strategy: global + worker_count: 1 [fruit] apple: red @@ -145,16 +145,16 @@ def test_create_bootstrapped_multiple_config_override(self): ScopeInfo('fruit', ScopeInfo.TASK), ]) opts_single_config.register('', '--config-override') # So we don't choke on it on the cmd line. - opts_single_config.register('compile.apt', '--strategy') + opts_single_config.register('compile.apt', '--worker-count') opts_single_config.register('fruit', '--apple') - self.assertEquals('global', opts_single_config.for_scope('compile.apt').strategy) + self.assertEquals('1', opts_single_config.for_scope('compile.apt').worker_count) self.assertEquals('red', opts_single_config.for_scope('fruit').apple) with temporary_file() as fp2: fp2.write(dedent(""" [compile.apt] - strategy: isolated + worker_count: 2 """)) fp2.close() @@ -169,10 +169,10 @@ def test_create_bootstrapped_multiple_config_override(self): ScopeInfo('fruit', ScopeInfo.TASK), ]) opts_double_config.register('', '--config-override') # So we don't choke on it on the cmd line. - opts_double_config.register('compile.apt', '--strategy') + opts_double_config.register('compile.apt', '--worker-count') opts_double_config.register('fruit', '--apple') - self.assertEquals('isolated', opts_double_config.for_scope('compile.apt').strategy) + self.assertEquals('2', opts_double_config.for_scope('compile.apt').worker_count) self.assertEquals('red', opts_double_config.for_scope('fruit').apple) def test_full_options_caching(self): diff --git a/tests/python/pants_test/projects/base_project_integration_test.py b/tests/python/pants_test/projects/base_project_integration_test.py index 7c6655974fb..4d1d01b1cae 100644 --- a/tests/python/pants_test/projects/base_project_integration_test.py +++ b/tests/python/pants_test/projects/base_project_integration_test.py @@ -16,10 +16,5 @@ def _android_flags(): exclude_android = os.environ.get('SKIP_ANDROID') == "true" or not os.environ.get('ANDROID_HOME') return ['--exclude-target-regexp=.*android.*'] if exclude_android else [] - def pants_test(self, strategy, command): - return self.run_pants([ - 'test' - '--compile-apt-strategy={}'.format(strategy), - '--compile-java-strategy={}'.format(strategy), - '--compile-zinc-strategy={}'.format(strategy), - ] + command + self._android_flags()) + def pants_test(self, command): + return self.run_pants(['test'] + command + self._android_flags()) diff --git a/tests/python/pants_test/projects/test_examples_integration.py b/tests/python/pants_test/projects/test_examples_integration.py index d9c77dbfaad..c1faf7b7da1 100644 --- a/tests/python/pants_test/projects/test_examples_integration.py +++ b/tests/python/pants_test/projects/test_examples_integration.py @@ -6,11 +6,9 @@ unicode_literals, with_statement) from pants_test.projects.base_project_integration_test import ProjectIntegrationTest -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class ExamplesIntegrationTest(ProjectIntegrationTest): - @provide_compile_strategies - def tests_examples(self, strategy): - pants_run = self.pants_test(strategy, ['examples::']) + def tests_examples(self): + pants_run = self.pants_test(['examples::']) self.assert_success(pants_run) diff --git a/tests/python/pants_test/projects/test_testprojects_integration.py b/tests/python/pants_test/projects/test_testprojects_integration.py index 8feed951938..20813c3bc08 100644 --- a/tests/python/pants_test/projects/test_testprojects_integration.py +++ b/tests/python/pants_test/projects/test_testprojects_integration.py @@ -6,13 +6,11 @@ unicode_literals, with_statement) from pants_test.projects.base_project_integration_test import ProjectIntegrationTest -from pants_test.testutils.compile_strategy_utils import provide_compile_strategies class TestProjectsIntegrationTest(ProjectIntegrationTest): - @provide_compile_strategies - def tests_testprojects(self, strategy): + def tests_testprojects(self): # TODO(Eric Ayers) find a better way to deal with tests that are known to fail. # right now, just split them into two categories and ignore them. @@ -30,16 +28,25 @@ def tests_testprojects(self, strategy): 'testprojects/src/java/org/pantsbuild/testproject/bundle:missing-files', 'testprojects/src/java/org/pantsbuild/testproject/cycle1', 'testprojects/src/java/org/pantsbuild/testproject/cycle2', + 'testprojects/src/java/org/pantsbuild/testproject/dummies:compilation_failure_target', 'testprojects/src/java/org/pantsbuild/testproject/missingdepswhitelist.*', 'testprojects/src/python/antlr:test_antlr_failure', 'testprojects/src/scala/org/pantsbuild/testproject/compilation_failure', 'testprojects/src/thrift/org/pantsbuild/thrift_linter:', - 'testprojects/tests/java/org/pantsbuild/testproject/empty:', 'testprojects/tests/java/org/pantsbuild/testproject/dummies:failing_target', + 'testprojects/tests/java/org/pantsbuild/testproject/empty:', 'testprojects/tests/python/pants/dummies:failing_target', ] - targets_to_exclude = known_failing_targets + negative_test_targets + # May not succeed without java8 installed + need_java_8 = [ + 'testprojects/src/java/org/pantsbuild/testproject/targetlevels/java8', + 'testprojects/tests/java/org/pantsbuild/testproject/testjvms', + 'testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight', + 'testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight-test-platform', + ] + + targets_to_exclude = known_failing_targets + negative_test_targets + need_java_8 exclude_opts = map(lambda target: '--exclude-target-regexp={}'.format(target), targets_to_exclude) - pants_run = self.pants_test(strategy, ['testprojects::'] + exclude_opts) + pants_run = self.pants_test(['testprojects::'] + exclude_opts) self.assert_success(pants_run) diff --git a/tests/python/pants_test/reporting/test_reporting_integration.py b/tests/python/pants_test/reporting/test_reporting_integration.py index b9c0924aca3..6c1f7edf3f2 100644 --- a/tests/python/pants_test/reporting/test_reporting_integration.py +++ b/tests/python/pants_test/reporting/test_reporting_integration.py @@ -17,9 +17,9 @@ _REPORT_LOCATION = 'reports/latest/invalidation-report.csv' _ENTRY = re.compile(ur'^\d+,\S+,(init|pre-check|post-check),(True|False)') -_INIT = re.compile(ur'^\d+,JmakeCompile,\w+,\S+,init,(True|False)') -_POST = re.compile(ur'^\d+,JmakeCompile,\w+,\S+,post-check,(True|False)') -_PRE = re.compile(ur'^\d+,JmakeCompile,\w+,\S+,pre-check,(True|False)') +_INIT = re.compile(ur'^\d+,ZincCompile,\w+,\S+,init,(True|False)') +_POST = re.compile(ur'^\d+,ZincCompile,\w+,\S+,post-check,(True|False)') +_PRE = re.compile(ur'^\d+,ZincCompile,\w+,\S+,pre-check,(True|False)') class TestReportingIntegrationTest(PantsRunIntegrationTest, unittest.TestCase): @@ -49,8 +49,9 @@ def test_invalidation_report_output(self): DEBUG_LEVEL_COMPILE_MSG='compile(examples/src/java/org/pantsbuild/example/hello/simple:simple) finished with status Successful' def test_ouput_level_warn(self): - command = ['compile', '--compile-java-strategy=isolated', + command = ['compile', 'examples/src/java/org/pantsbuild/example/hello/simple', + '--compile-java-use-jmake', '--compile-java-level=warn'] pants_run = self.run_pants(command) self.assert_success(pants_run) @@ -58,8 +59,9 @@ def test_ouput_level_warn(self): self.assertFalse(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data) def test_output_level_info(self): - command = ['compile', '--compile-java-strategy=isolated', + command = ['compile', 'examples/src/java/org/pantsbuild/example/hello/simple', + '--compile-java-use-jmake', '--compile-java-level=info'] pants_run = self.run_pants(command) self.assert_success(pants_run) @@ -67,8 +69,9 @@ def test_output_level_info(self): self.assertFalse(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data) def test_output_level_debug(self): - command = ['compile', '--compile-java-strategy=isolated', + command = ['compile', 'examples/src/java/org/pantsbuild/example/hello/simple', + '--compile-java-use-jmake', '--compile-java-level=debug'] pants_run = self.run_pants(command) self.assert_success(pants_run) @@ -76,8 +79,9 @@ def test_output_level_debug(self): self.assertTrue(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data) def test_output_color_enabled(self): - command = ['compile', '--compile-java-strategy=isolated', + command = ['compile', 'examples/src/java/org/pantsbuild/example/hello/simple', + '--compile-java-use-jmake', '--compile-java-colors'] pants_run = self.run_pants(command) self.assert_success(pants_run) @@ -85,8 +89,9 @@ def test_output_color_enabled(self): def test_output_level_group_compile(self): """Set level with the scope 'compile' and see that it propagates to the task level.""" - command = ['compile', '--compile-java-strategy=isolated', + command = ['compile', 'examples/src/java/org/pantsbuild/example/hello/simple', + '--compile-java-use-jmake', '--compile-level=debug'] pants_run = self.run_pants(command) self.assert_success(pants_run) @@ -96,7 +101,7 @@ def test_output_level_group_compile(self): def test_default_console(self): command = ['compile', 'examples/src/java/org/pantsbuild/example/hello::', - '--compile-java-strategy=isolated'] + '--compile-java-use-jmake'] pants_run = self.run_pants(command) self.assert_success(pants_run) self.assertIn('Compiling 1 java source in 1 target (examples/src/java/org/pantsbuild/example/hello/greet:greet)', @@ -110,7 +115,7 @@ def test_suppress_compiler_output(self): 'examples/src/java/org/pantsbuild/example/hello::', '--reporting-console-label-format={ "COMPILER" : "SUPPRESS" }', '--reporting-console-tool-output-format={ "COMPILER" : "CHILD_SUPPRESS"}', - '--compile-java-strategy=isolated'] + '--compile-java-use-jmake'] pants_run = self.run_pants(command) self.assert_success(pants_run) self.assertIn('Compiling 1 java source in 1 target (examples/src/java/org/pantsbuild/example/hello/greet:greet)', @@ -125,7 +130,7 @@ def test_invalid_config(self): 'examples/src/java/org/pantsbuild/example/hello::', '--reporting-console-label-format={ "FOO" : "BAR" }', '--reporting-console-tool-output-format={ "BAZ" : "QUX"}', - '--compile-java-strategy=isolated'] + '--compile-java-use-jmake'] pants_run = self.run_pants(command) self.assert_success(pants_run) self.assertIn('*** Got invalid key FOO for --reporting-console-label-format. Expected one of [', pants_run.stdout_data) diff --git a/tests/python/pants_test/tasks/test_jar_publish.py b/tests/python/pants_test/tasks/test_jar_publish.py index e063821d44b..c747bd08e01 100644 --- a/tests/python/pants_test/tasks/test_jar_publish.py +++ b/tests/python/pants_test/tasks/test_jar_publish.py @@ -25,7 +25,6 @@ from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_mkdir, safe_walk from pants_test.tasks.task_test_base import TaskTestBase -from pants_test.testutils.compile_strategy_utils import set_compile_strategies class JarPublishTest(TaskTestBase): @@ -121,7 +120,6 @@ def test_publish_unlisted_repo(self): assert "Repository internal has no" in str(e) raise e - @set_compile_strategies def test_publish_local_dryrun(self): targets = self._prepare_for_publishing() diff --git a/tests/python/pants_test/tasks/test_junit_tests_integration.py b/tests/python/pants_test/tasks/test_junit_tests_integration.py index 3aac668ed50..5a5713239b4 100644 --- a/tests/python/pants_test/tasks/test_junit_tests_integration.py +++ b/tests/python/pants_test/tasks/test_junit_tests_integration.py @@ -8,6 +8,8 @@ import os from xml.etree import ElementTree +import pytest + from pants.util.contextutil import temporary_dir from pants_test.pants_run_integration_test import PantsRunIntegrationTest @@ -115,6 +117,8 @@ def test_junit_test_with_emma(self): self.assertIn('org.pantsbuild.example.hello.welcome', package_report) self.assertIn('org.pantsbuild.example.hello.greet', package_report) + # NB: fix in process over here: https://rbcommons.com/s/twitter/r/2803/ + @pytest.mark.xfail def test_junit_test_with_coberta(self): with temporary_dir(root_dir=self.workdir_root()) as workdir: pants_run = self.run_pants_with_workdir([ @@ -185,7 +189,6 @@ def test_junit_test_target_cwd(self): def test_junit_test_annotation_processor(self): pants_run = self.run_pants([ 'test', - '--compile-java-strategy=isolated', 'testprojects/tests/java/org/pantsbuild/testproject/annotation', ]) self.assert_success(pants_run) diff --git a/tests/python/pants_test/testutils/compile_strategy_utils.py b/tests/python/pants_test/testutils/compile_strategy_utils.py deleted file mode 100644 index 71d80b9d5b6..00000000000 --- a/tests/python/pants_test/testutils/compile_strategy_utils.py +++ /dev/null @@ -1,46 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import (absolute_import, division, generators, nested_scopes, print_function, - unicode_literals, with_statement) - -import sys - - -"""Helpers to provide compile strategies to unit tests. - -These methods work around the fact that pytest.mark.parametrize does not support methods. -""" - -_STRATEGIES = ['global', 'isolated'] -_SCOPES = ['apt', 'java', 'scala'] - - -def _wrap(testmethod, setupfun): - def wrapped(self): - for strategy in _STRATEGIES: - try: - setupfun(self, testmethod, strategy) - except Exception: - print("failed for strategy '{}'".format(strategy), file=sys.stderr) - raise - return wrapped - - -def provide_compile_strategies(testmethod): - """ A decorator for test methods that provides the compilation strategy as a parameter. - - Invokes the test multiple times, once for each built-in strategy in _STRATEGIES. - """ - return _wrap(testmethod, lambda self, testmethod, strategy: testmethod(self, strategy)) - - -def set_compile_strategies(testmethod): - """A decorator for BaseTests which sets strategy options differently for each invoke.""" - - def setup(self, testmethod, strategy): - for scope in _SCOPES: - self.set_options_for_scope('compile.{}'.format(scope), strategy=strategy) - testmethod(self) - return _wrap(testmethod, setup)