Skip to content

Commit

Permalink
Add log capture to isolated zinc compiles
Browse files Browse the repository at this point in the history
This change adds an optional debug log file to zinc, and includes it in the artifact cache to help determine the provenance of the classfiles and analysis in a cache entry.

- Drop analysis manipulation code from zinc (can split this out into a separate change if need be)
- Add the `capture-log` option to zinc
- Split out named Regex and File loggers
- Remove LoggerRaw in favor of writing directly to stdout
- Add (admittedly limited) support for log capture in the isolated strategy with zinc
- Log exception messages when jobs fail in the execution graph
- Switch scala tests in the pants codebase from `specs1` (deprecated) to `scalatest`
- Add a unit test for compound logging

Testing Done:
first unit test for zinc, local testing
pantsbuild#1721

Reviewed at https://rbcommons.com/s/twitter/r/2404/
  • Loading branch information
stuhood committed Jun 24, 2015
1 parent b862d13 commit 98c86e0
Show file tree
Hide file tree
Showing 28 changed files with 288 additions and 593 deletions.
4 changes: 2 additions & 2 deletions 3rdparty/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ jar_library(name='junit',
jar('junit', 'junit', '4.12')
])

jar_library(name='specs',
jar_library(name='scalatest',
jars=[
jar('org.scala-tools.testing', 'specs_2.10', '1.6.9')
jar('org.scalatest', 'scalatest_2.10', '2.2.4')
])
2 changes: 1 addition & 1 deletion build-support/bin/ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ fi
if [[ "${skip_jvm:-false}" == "false" ]]; then
banner "Running core jvm tests"
(
./pants.pex ${PANTS_ARGS[@]} test tests/java:: src/{java,scala}:: zinc::
./pants.pex ${PANTS_ARGS[@]} test {src,tests}/{java,scala}:: zinc::
) || die "Core jvm test failure"
fi

Expand Down
2 changes: 1 addition & 1 deletion examples/src/java/org/pantsbuild/example/3rdparty_jvm.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ In the appropriate `BUILD` file, you want to find a
<a pantsref="bdict_jar_library">`jar_library`</a>
with the <a pantsref="bdict_jar">`jar`</a>s you want:

!inc[start-at=junit&end-before=specs](../../../../../../3rdparty/BUILD)
!inc[start-at=junit&end-before=scalatest](../../../../../../3rdparty/BUILD)

Here, the
<a pantsref="bdict_jar_library">`jar_library`</a>'s name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ junit_tests(name='welcome',
dependencies=[
'examples/src/scala/org/pantsbuild/example/hello/welcome:welcome',
'3rdparty:junit',
'3rdparty:specs',
'3rdparty:scalatest',
],
sources=globs('*.scala'),
)
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
package org.pantsbuild.example.hello.welcome

import org.junit.runner.RunWith
import org.specs._
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers.MustMatchers

@RunWith(classOf[runner.JUnitSuiteRunner])
class WelSpec extends Specification with runner.JUnit {
@RunWith(classOf[JUnitRunner])
class WelSpec extends WordSpec with MustMatchers {
"Welcome" should {
"greet nobody" in {
WelcomeEverybody(List()).size mustEqual 0
Expand Down
4 changes: 3 additions & 1 deletion pants.ini.isolated
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ enabled: True
runtime-deps: []
scalac: [':scala-compile']
strategy: isolated
capture_log: True
worker_count: 4
zinc: [':zinc']
jvm_options: [
Expand All @@ -36,7 +37,8 @@ no_warning_args: [
[compile.scala]
runtime-deps: [':scala-library']
scalac: [':scala-compiler']
strategy: isolated
strategy: isolated
capture_log: True
worker_count: 4
zinc: [':zinc']
jvm_options: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,7 @@ def worker(worker_key, work):
direct_dependees = self._dependees[finished_key]
status_table.mark_as(result_status, finished_key)

# Queue downstream tasks.
if result_status is SUCCESSFUL:
try:
finished_job.run_success_callback()
Expand All @@ -230,19 +231,22 @@ def worker(worker_key, work):
if status_table.are_all_successful(self._jobs[dependee].dependencies)]

submit_jobs(ready_dependees)
else: # failed or canceled
else: # Failed or canceled.
try:
finished_job.run_failure_callback()
except Exception as e:
log.debug(traceback.format_exc())
raise ExecutionFailure("Error in on_failure for {}".format(finished_key), e)

# propagate failures downstream
# Propagate failures downstream.
for dependee in direct_dependees:
finished_queue.put((dependee, CANCELED, None))

log.debug("{} finished with status {}".format(finished_key,
status_table.get(finished_key)))
# Log success or failure for this job.
if result_status is FAILED:
log.error("{} failed: {}".format(finished_key, value))
else:
log.debug("{} finished with status {}".format(finished_key, result_status))
except ExecutionFailure:
raise
except Exception as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def create_analysis_tools(self):
def _language_platform_version_info(self):
return [self.get_options().target] if self.get_options().target else []

def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file):
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file):
relative_classpath = relativize_paths(classpath, self._buildroot)
jmake_classpath = self.tool_classpath('jmake')
args = [
Expand Down
6 changes: 3 additions & 3 deletions src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def create_analysis_tools(self):
"""
raise NotImplementedError()

def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file):
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file):
"""Invoke the compiler.
Must raise TaskError on compile failure.
Expand Down Expand Up @@ -278,7 +278,7 @@ def execute_chunk(self, relevant_targets):
# Nothing to build. Register products for all the targets in one go.
self._register_vts([self._strategy.compile_context(t) for t in relevant_targets])

def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath, outdir, progress_message):
def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath, outdir, log_file, progress_message):
"""Compiles sources for the given vts into the given output dir.
vts - versioned target set
Expand Down Expand Up @@ -310,7 +310,7 @@ def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath
# change triggering the error is reverted, we won't rebuild to restore the missing
# classfiles. So we force-invalidate here, to be on the safe side.
vts.force_invalidate()
self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file)
self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file, log_file)

def check_artifact_cache(self, vts):
post_process_cached_vts = lambda vts: self._strategy.post_process_cached_vts(vts)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,7 @@ def compile_chunk(self,
upstream_analysis,
compile_classpath,
self._classes_dir,
None,
progress_message)

# No exception was thrown, therefore the compile succeeded and analysis_file is now valid.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ def register_options(cls, register, language, supports_concurrent_execution):
if supports_concurrent_execution:
register('--worker-count', type=int, default=1, advanced=True,
help='The number of concurrent workers to use compiling {lang} sources with the isolated'
' strategy. This is a beta feature.'.format(lang=language))
' strategy.'.format(lang=language))
register('--capture-log', action='store_true', default=False, advanced=True,
help='Capture compilation output to per-target logs.')

def __init__(self, context, options, workdir, analysis_tools, language, sources_predicate):
super(JvmCompileIsolatedStrategy, self).__init__(context, options, workdir, analysis_tools,
Expand All @@ -38,14 +40,17 @@ def __init__(self, context, options, workdir, analysis_tools, language, sources_
# Various working directories.
self._analysis_dir = os.path.join(workdir, 'isolated-analysis')
self._classes_dir = os.path.join(workdir, 'isolated-classes')
self._logs_dir = os.path.join(workdir, 'isolated-logs')

self._capture_log = options.capture_log

try:
worker_count = options.worker_count
except AttributeError:
# tasks that don't support concurrent execution have no worker_count registered
worker_count = 1

self._worker_count = worker_count

self._worker_pool = None

def name(self):
Expand All @@ -70,6 +75,7 @@ def pre_compile(self):
super(JvmCompileIsolatedStrategy, self).pre_compile()
safe_mkdir(self._analysis_dir)
safe_mkdir(self._classes_dir)
safe_mkdir(self._logs_dir)

def prepare_compile(self, cache_manager, all_targets, relevant_targets):
super(JvmCompileIsolatedStrategy, self).prepare_compile(cache_manager, all_targets,
Expand Down Expand Up @@ -137,24 +143,34 @@ def _upstream_analysis(self, compile_contexts, classpath_entries):
else:
yield compile_context.classes_dir, compile_context.analysis_file

def _capture_log_file(self, target):
if self._capture_log:
return os.path.join(self._logs_dir, "{}.log".format(target.id))
return None

def exec_graph_key_for_target(self, compile_target):
return "compile-{}".format(compile_target.address.spec)
return "compile({})".format(compile_target.address.spec)

def _create_compile_jobs(self, compile_classpaths, compile_contexts, extra_compile_time_classpath,
invalid_targets, invalid_vts_partitioned, compile_vts, register_vts,
update_artifact_cache_vts_work):
def create_work_for_vts(vts, compile_context, target_closure):
def work():
progress_message = vts.targets[0].address.spec
progress_message = compile_context.target.address.spec
cp_entries = self._compute_classpath_entries(compile_classpaths,
target_closure,
compile_context,
extra_compile_time_classpath)

upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries))
tmpdir = os.path.join(self.analysis_tmpdir, vts.targets[0].id)
safe_mkdir(tmpdir)

# Capture a compilation log if requested.
log_file = self._capture_log_file(compile_context.target)

# Mutate analysis within a temporary directory, and move it to the final location
# on success.
tmpdir = os.path.join(self.analysis_tmpdir, compile_context.target.id)
safe_mkdir(tmpdir)
tmp_analysis_file = JvmCompileStrategy._analysis_for_target(
tmpdir, compile_context.target)
if os.path.exists(compile_context.analysis_file):
Expand All @@ -165,6 +181,7 @@ def work():
upstream_analysis,
cp_entries,
compile_context.classes_dir,
log_file,
progress_message)
atomic_copy(tmp_analysis_file, compile_context.analysis_file)

Expand Down Expand Up @@ -267,14 +284,17 @@ def _write_to_artifact_cache(self, vts, compile_context, get_update_artifact_cac
self._analysis_dir, compile_context.target)
relativize_args_tuple = (compile_context.analysis_file, portable_analysis_file)

# Compute the classes and resources for this target.
# Collect the artifacts for this target.
artifacts = []
resources_by_target = self.context.products.get_data('resources_by_target')
if resources_by_target is not None:
for _, paths in resources_by_target[compile_context.target].abs_paths():
artifacts.extend(paths)
for dirpath, _, filenames in safe_walk(compile_context.classes_dir):
artifacts.extend([os.path.join(dirpath, f) for f in filenames])
log_file = self._capture_log_file(compile_context.target)
if log_file and os.path.exists(log_file):
artifacts.append(log_file)

# Get the 'work' that will publish these artifacts to the cache.
# NB: the portable analysis_file won't exist until we finish.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def extra_products(self, target):
ret.append((root, [plugin_info_file]))
return ret

def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file):
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file, log_file):
# We add compiler_classpath to ensure the scala-library jar is on the classpath.
# TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't
# usually need. Be more selective?
Expand All @@ -236,6 +236,8 @@ def compile(self, args, classpath, sources, classes_output_dir, upstream_analysi
zinc_args.append('-no-color')
if not self.get_options().name_hashing:
zinc_args.append('-no-name-hashing')
if log_file:
zinc_args.extend(['-capture-log', log_file])

zinc_args.extend(['-compiler-interface', self.tool_jar('compiler-interface')])
zinc_args.extend(['-sbt-interface', self.tool_jar('sbt-interface')])
Expand Down
9 changes: 1 addition & 8 deletions src/scala/org/pantsbuild/zinc/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,7 @@ scala_library(
'3rdparty/jvm/com/typesafe/sbt:incremental-compiler',
'3rdparty:guava',
'3rdparty:jsr305',
'src/scala/org/pantsbuild/zinc/logging',
],
sources=globs('*.scala'),
)

jvm_binary(
name='main',
main='org.pantsbuild.zinc.Main',
dependencies=[
':zinc',
]
)
13 changes: 4 additions & 9 deletions src/scala/org/pantsbuild/zinc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@ package org.pantsbuild.zinc
import java.io.File
import java.net.URLClassLoader
import sbt.compiler.javac
import sbt.{ ClasspathOptions, CompileOptions, CompileSetup, LoggerReporter, ScalaInstance }
import sbt.{ ClasspathOptions, CompileOptions, CompileSetup, Logger, LoggerReporter, ScalaInstance }
import sbt.compiler.{ AggressiveCompile, AnalyzingCompiler, CompilerCache, CompileOutput, IC }
import sbt.inc.{ Analysis, AnalysisStore, FileBasedStore }
import sbt.Path._
import xsbti.compile.{ JavaCompiler, GlobalsCache }
import org.pantsbuild.zinc.{LoggerRaw => Logger}

import org.pantsbuild.zinc.Cache.Implicits

Expand Down Expand Up @@ -196,7 +195,7 @@ class Compiler(scalac: AnalyzingCompiler, javac: JavaCompiler, setup: Setup) {
* Note: This variant does not report progress updates
*/
def compile(inputs: Inputs, cwd: Option[File], reporter: xsbti.Reporter)(log: Logger): Analysis = {
val progress = Some(new SimpleCompileProgress(setup.logOptions.logPhases, setup.logOptions.printProgress, setup.logOptions.heartbeatSecs)(log))
val progress = Some(new SimpleCompileProgress(setup.consoleLog.logPhases, setup.consoleLog.printProgress, setup.consoleLog.heartbeatSecs)(log))
compile(inputs, cwd, reporter, progress)(log)
}

Expand All @@ -215,12 +214,8 @@ class Compiler(scalac: AnalyzingCompiler, javac: JavaCompiler, setup: Setup) {
val incOpts = incOptions.options
val compileSetup = new CompileSetup(compileOutput, new CompileOptions(scalacOptions, javacOptions), scalac.scalaInstance.actualVersion, compileOrder, incOpts.nameHashing)
val analysisStore = Compiler.analysisStore(cacheFile)
val analysis = aggressive.compile1(sources, cp, compileSetup, progress, analysisStore, getAnalysis, definesClass, scalac, javac, reporter, skip, globalsCache, incOpts)(log)
if (mirrorAnalysis) {
SbtAnalysis.printRelations(analysis, Some(new File(cacheFile.getPath() + ".relations")), cwd)
}
SbtAnalysis.printOutputs(analysis, outputRelations, outputProducts, cwd, classesDirectory)
analysis

aggressive.compile1(sources, cp, compileSetup, progress, analysisStore, getAnalysis, definesClass, scalac, javac, reporter, skip, globalsCache, incOpts)(log)
}

/**
Expand Down
Loading

0 comments on commit 98c86e0

Please sign in to comment.