def _render_jar_tool_args(self, options): """Format the arguments to jar-tool. :param Options options: """ args = [] with temporary_dir() as manifest_stage_dir: # relativize urls in canonical classpath, this needs to be stable too therefore # do not follow the symlinks because symlinks may vary from platform to platform. classpath = relativize_classpath(self.classpath, os.path.dirname(self._path), followlinks=False) def as_cli_entry(entry): src = entry.materialize(manifest_stage_dir) return '{}={}'.format(src, entry.dest) if entry.dest else src files = [as_cli_entry(entry) for entry in self._entries] if self._entries else [] jars = self._jars or [] with safe_args(classpath, options, delimiter=',') as classpath_args: with safe_args(files, options, delimiter=',') as files_args: with safe_args(jars, options, delimiter=',') as jars_args: # If you specify --manifest to jar-tool you cannot specify --main. if self._manifest_entry: manifest_file = self._manifest_entry.materialize( manifest_stage_dir) else: manifest_file = None if self._main and manifest_file: main_arg = None with open(manifest_file, 'a') as f: f.write("Main-Class: {}\n".format(self._main)) else: main_arg = self._main if main_arg: args.append('-main={}'.format(self._main)) if classpath_args: args.append('-classpath={}'.format( ','.join(classpath_args))) if manifest_file: args.append('-manifest={}'.format(manifest_file)) if files_args: args.append('-files={}'.format( ','.join(files_args))) if jars_args: args.append('-jars={}'.format(','.join(jars_args))) yield args
def _render_jar_tool_args(self, options): """Format the arguments to jar-tool. :param Options options: """ args = [] with temporary_dir() as manifest_stage_dir: # relativize urls in canonical classpath, this needs to be stable too therefore # do not follow the symlinks because symlinks may vary from platform to platform. classpath = relativize_classpath(self.classpath, os.path.dirname(self._path), followlinks=False) def as_cli_entry(entry): src = entry.materialize(manifest_stage_dir) return '{}={}'.format(src, entry.dest) if entry.dest else src files = map(as_cli_entry, self._entries) if self._entries else [] jars = self._jars or [] with safe_args(classpath, options, delimiter=',') as classpath_args: with safe_args(files, options, delimiter=',') as files_args: with safe_args(jars, options, delimiter=',') as jars_args: # If you specify --manifest to jar-tool you cannot specify --main. if self._manifest_entry: manifest_file = self._manifest_entry.materialize(manifest_stage_dir) else: manifest_file = None if self._main and manifest_file: main_arg = None with open(manifest_file, 'a') as f: f.write("Main-Class: {}\n".format(self._main)) else: main_arg = self._main if main_arg: args.append('-main={}'.format(self._main)) if classpath_args: args.append('-classpath={}'.format(','.join(classpath_args))) if manifest_file: args.append('-manifest={}'.format(manifest_file)) if files_args: args.append('-files={}'.format(','.join(files_args))) if jars_args: args.append('-jars={}'.format(','.join(jars_args))) yield args
def test_safe_args_below_max_arg(self): # len(args) < max_args, so args should pass through. args = ["1", "2", "3", "4"] with argfile.safe_args(args, options=None, max_args=10, quoter=lambda x: x, delimiter="") as safe_args: self.assertTrue(args, safe_args)
def test_safe_args_over_max_arg(self): # len(args) > max_args, so it should a file should be yielded args = ['1', '2', '3', '4'] with argfile.safe_args(args, options=None, max_args=2, quoter=lambda x: x, delimiter='') as safe_args: self.assertEqual(1, len(safe_args)) arg_file = safe_args[0] self.assertTrue(os.path.isfile(arg_file)) with open(arg_file) as f: self.assertEqual(['1234'], f.readlines())
def errorprone(self, target): runtime_classpaths = self.context.products.get_data('runtime_classpath') runtime_classpath = [jar for conf, jar in runtime_classpaths.get_for_targets(target.closure(bfs=True))] output_dir = os.path.join(self.workdir, target.id) safe_mkdir(output_dir) runtime_classpath.append(output_dir) # Try to run errorprone with the same java version as the target # The minimum JDK for errorprone is JDK 1.8 min_jdk_version = max(target.platform.target_level, Revision.lenient('1.8')) if min_jdk_version.components[0] == 1: max_jdk_version = Revision(min_jdk_version.components[0], min_jdk_version.components[1], '9999') else: max_jdk_version = Revision(min_jdk_version.components[0], '9999') self.set_distribution(minimum_version=min_jdk_version, maximum_version=max_jdk_version, jdk=True) jvm_options = self.get_options().jvm_options[:] if self.dist.version < Revision.lenient('9'): # For Java 8 we need to add the errorprone javac jar to the bootclasspath to # avoid the "java.lang.NoSuchFieldError: ANNOTATION_PROCESSOR_MODULE_PATH" error # See https://github.com/google/error-prone/issues/653 for more information jvm_options.extend(['-Xbootclasspath/p:{}'.format(self.tool_classpath('errorprone-javac')[0])]) args = [ '-d', output_dir, ] # Errorprone does not recognize source or target 10 yet if target.platform.source_level < Revision.lenient('10'): args.extend(['-source', str(target.platform.source_level)]) if target.platform.target_level < Revision.lenient('10'): args.extend(['-target', str(target.platform.target_level)]) errorprone_classpath_file = os.path.join(self.workdir, '{}.classpath'.format(os.path.basename(output_dir))) with open(errorprone_classpath_file, 'w') as f: f.write('-classpath ') f.write(':'.join(runtime_classpath)) args.append('@{}'.format(errorprone_classpath_file)) for opt in self.get_options().command_line_options: args.extend(safe_shlex_split(opt)) with argfile.safe_args(self.calculate_sources(target), self.get_options()) as batched_sources: args.extend(batched_sources) result = self.runjava(classpath=self.tool_classpath('errorprone'), main=self._ERRORPRONE_MAIN, jvm_options=jvm_options, args=args, workunit_name='errorprone', workunit_labels=[WorkUnitLabel.LINT]) self.context.log.debug('java {main} ... exited with result ({result})'.format( main=self._ERRORPRONE_MAIN, result=result)) return result
def test_safe_args_below_max_arg(self): # len(args) < max_args, so args should pass through. args = ['1', '2', '3', '4'] with argfile.safe_args(args, options=None, max_args=10, quoter=lambda x: x, delimiter='') as safe_args: self.assertTrue(args, safe_args)
def compile(self, ctx, args, classpath, upstream_analysis, settings, fatal_warnings, zinc_file_manager, javac_plugin_map, scalac_plugin_map): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ '-d', ctx.classes_dir, # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) if fatal_warnings: javac_cmd.extend(self.get_options().fatal_warnings_enabled_args) else: javac_cmd.extend(self.get_options().fatal_warnings_disabled_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def compile(self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map): classpath = (ctx.classes_dir.path,) + tuple(ce.path for ce in dependency_classpath) if self.get_options().capture_classpath: self._record_compile_classpath(classpath, ctx.target, ctx.classes_dir.path) try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_args = [] if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_args.extend(settings_args) javac_args.extend([ # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) if self.execution_strategy == self.HERMETIC: javac_args.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. '-d', '.', ]) else: javac_args.extend([ '-d', ctx.classes_dir.path, ]) javac_args.extend(self._javac_plugin_args(javac_plugin_map)) javac_args.extend(args) compiler_option_sets_args = self.get_merged_args_for_compiler_option_sets(compiler_option_sets) javac_args.extend(compiler_option_sets_args) javac_args.extend([ '-classpath', ':'.join(classpath), ]) javac_args.extend(ctx.sources) # From https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#BHCJEIBB # Wildcards (*) aren’t allowed in these lists (such as for specifying *.java). # Use of the at sign (@) to recursively interpret files isn’t supported. # The -J options aren’t supported because they’re passed to the launcher, # which doesn’t support argument files. j_args = [j_arg for j_arg in javac_args if j_arg.startswith('-J')] safe_javac_args = list(filter(lambda x: x not in j_args, javac_args)) with argfile.safe_args(safe_javac_args, self.get_options()) as batched_args: javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend(j_args) javac_cmd.extend(batched_args) if self.execution_strategy == self.HERMETIC: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def _run_tests(self, tests_to_targets): if self._coverage: extra_jvm_options = self._coverage.extra_jvm_options classpath_prepend = self._coverage.classpath_prepend classpath_append = self._coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = self._tests_by_properties( tests_to_targets, self._infer_workdir, lambda target: target.test_platform, lambda target: target.payload.extra_jvm_options, lambda target: target.payload.extra_env_vars, lambda target: target.concurrency, lambda target: target.threads ) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for properties, tests in tests_by_properties.items(): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = set(map(tests_to_targets.get, batch)) complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update(self.tool_classpath('junit')) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == junit_tests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] with argfile.safe_args(batch, self.get_options()) as batch_tests: self.context.log.debug('CWD = {}'.format(workdir)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): result += abs(self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnitRun._MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=self.workdir, create_synthetic_jar=self.synthetic_classpath, )) if result != 0 and self._fail_fast: break if result != 0: failed_targets_and_tests = self._get_failed_targets(tests_to_targets) failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec)) for test in sorted(failed_targets_and_tests[target]): error_message_lines.append('{0}{1}'.format(' '*8, test)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
def run_tests(self, fail_fast, test_targets, output_dir, coverage): test_registry = self._collect_test_targets(test_targets) if test_registry.empty: return TestResult.rc(0) coverage.instrument(output_dir) def parse_error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error( 'Error parsing test result file {path}: {cause}'.format( path=parse_error.xml_path, cause=parse_error.cause)) # The 'instrument_classpath' product below below will be `None` if not set, and we'll default # back to runtime_classpath classpath_product = self.context.products.get_data( 'instrument_classpath') result = 0 for batch_id, (properties, batch) in enumerate(self._iter_batches(test_registry)): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties batch_output_dir = output_dir if self._batched: batch_output_dir = os.path.join(batch_output_dir, 'batch-{}'.format(batch_id)) run_modifications = coverage.run_modifications(batch_output_dir) extra_jvm_options = run_modifications.extra_jvm_options # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = { test_registry.get_owning_target(t) for t in batch } complete_classpath = OrderedSet() complete_classpath.update(run_modifications.classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath( self.context)) complete_classpath.update( self.classpath(relevant_targets, classpath_product=classpath_product)) distribution = JvmPlatform.preferred_jvm_distribution( [platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(fail_fast, batch_output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: with self._chroot(relevant_targets, workdir) as chroot: self.context.log.debug('CWD = {}'.format(chroot)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): subprocess_result = self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=chroot, synthetic_jar_dir=batch_output_dir, create_synthetic_jar=self.synthetic_classpath, ) self.context.log.debug( 'JUnit subprocess exited with result ({})'.format( subprocess_result)) result += abs(subprocess_result) tests_info = self.parse_test_info(batch_output_dir, parse_error_handler, ['classname']) for test_name, test_info in tests_info.items(): test_item = Test(test_info['classname'], test_name) test_target = test_registry.get_owning_target(test_item) self.report_all_info_for_single_test( self.options_scope, test_target, test_name, test_info) if result != 0 and fail_fast: break if result == 0: return TestResult.rc(0) target_to_failed_test = parse_failed_targets(test_registry, output_dir, parse_error_handler) def sort_owning_target(t): return t.address.spec if t else None failed_targets = sorted(target_to_failed_test, key=sort_owning_target) error_message_lines = [] if self._failure_summary: def render_owning_target(t): return t.address.reference() if t else '<Unknown Target>' for target in failed_targets: error_message_lines.append('\n{indent}{owner}'.format( indent=' ' * 4, owner=render_owning_target(target))) for test in sorted(target_to_failed_test[target]): error_message_lines.append( '{indent}{classname}#{methodname}'.format( indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target'))) return TestResult(msg='\n'.join(error_message_lines), rc=result, failed_targets=failed_targets)
def run_tests(self, fail_fast, test_targets, output_dir, coverage): test_registry = self._collect_test_targets(test_targets) if test_registry.empty: return TestResult.rc(0) coverage.instrument(output_dir) def parse_error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error('Error parsing test result file {path}: {cause}' .format(path=parse_error.xml_path, cause=parse_error.cause)) # The 'instrument_classpath' product below below will be `None` if not set, and we'll default # back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for batch_id, (properties, batch) in enumerate(self._iter_batches(test_registry)): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties batch_output_dir = output_dir if self._batched: batch_output_dir = os.path.join(batch_output_dir, 'batch-{}'.format(batch_id)) run_modifications = coverage.run_modifications(batch_output_dir) extra_jvm_options = run_modifications.extra_jvm_options # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(run_modifications.classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(fail_fast, batch_output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: with self._chroot(relevant_targets, workdir) as chroot: self.context.log.debug('CWD = {}'.format(chroot)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): subprocess_result = self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=chroot, synthetic_jar_dir=batch_output_dir, create_synthetic_jar=self.synthetic_classpath, ) self.context.log.debug('JUnit subprocess exited with result ({})' .format(subprocess_result)) result += abs(subprocess_result) tests_info = self.parse_test_info(batch_output_dir, parse_error_handler, ['classname']) for test_name, test_info in tests_info.items(): test_item = Test(test_info['classname'], test_name) test_target = test_registry.get_owning_target(test_item) self.report_all_info_for_single_test(self.options_scope, test_target, test_name, test_info) if result != 0 and fail_fast: break if result == 0: return TestResult.rc(0) target_to_failed_test = parse_failed_targets(test_registry, output_dir, parse_error_handler) def sort_owning_target(t): return t.address.spec if t else None failed_targets = sorted(target_to_failed_test, key=sort_owning_target) error_message_lines = [] if self._failure_summary: def render_owning_target(t): return t.address.reference() if t else '<Unknown Target>' for target in failed_targets: error_message_lines.append('\n{indent}{owner}'.format(indent=' ' * 4, owner=render_owning_target(target))) for test in sorted(target_to_failed_test[target]): error_message_lines.append('{indent}{classname}#{methodname}' .format(indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) return TestResult(msg='\n'.join(error_message_lines), rc=result, failed_targets=failed_targets)
def compile(self, ctx, args, classpath, upstream_analysis, settings, fatal_warnings, zinc_file_manager, javac_plugin_map, scalac_plugin_map): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) if self.execution_strategy == self.HERMETIC: javac_cmd.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. '-d', '.', ]) else: javac_cmd.extend([ '-d', ctx.classes_dir, ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) if fatal_warnings: javac_cmd.extend(self.get_options().fatal_warnings_enabled_args) else: javac_cmd.extend(self.get_options().fatal_warnings_disabled_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) if self.execution_strategy == self.HERMETIC: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def _run_tests(self, test_registry, output_dir, coverage=None): if coverage: extra_jvm_options = coverage.extra_jvm_options classpath_prepend = coverage.classpath_prepend classpath_append = coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = test_registry.index( lambda tgt: tgt.cwd if tgt.cwd is not None else self._working_dir, lambda tgt: tgt.test_platform, lambda tgt: tgt.payload.extra_jvm_options, lambda tgt: tgt.payload.extra_env_vars, lambda tgt: tgt.concurrency, lambda tgt: tgt.threads) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data( 'instrument_classpath') result = 0 for properties, tests in tests_by_properties.items(): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = { test_registry.get_owning_target(t) for t in batch } complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update( JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update( self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution( [platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: self.context.log.debug('CWD = {}'.format(workdir)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): result += abs( self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=output_dir, create_synthetic_jar=self.synthetic_classpath, )) if result != 0 and self._fail_fast: break if result != 0: def error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error( 'Error parsing test result file {path}: {cause}'.format( path=parse_error.junit_xml_path, cause=parse_error.cause)) target_to_failed_test = parse_failed_targets( test_registry, output_dir, error_handler) failed_targets = sorted(target_to_failed_test, key=lambda t: t.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append('\n{indent}{address}'.format( indent=' ' * 4, address=target.address.spec)) for test in sorted(target_to_failed_test[target]): error_message_lines.append( '{indent}{classname}#{methodname}'.format( indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target'))) raise ErrorWhileTesting('\n'.join(error_message_lines), failed_targets=list(failed_targets))
def compile( self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map, ): classpath = (ctx.classes_dir.path, ) + tuple( ce.path for ce in dependency_classpath) if self.get_options().capture_classpath: self._record_compile_classpath(classpath, ctx.target, ctx.classes_dir.path) try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_args = [] if settings.args: settings_args = settings.args if any("$JAVA_HOME" in a for a in settings.args): logger.debug( 'Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace("$JAVA_HOME", distribution.home) for a in settings.args) javac_args.extend(settings_args) javac_args.extend([ # TODO: support -release "-source", str(settings.source_level), "-target", str(settings.target_level), ]) if self.execution_strategy == self.ExecutionStrategy.hermetic: javac_args.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. "-d", ".", ]) else: javac_args.extend(["-d", ctx.classes_dir.path]) javac_args.extend(self._javac_plugin_args(javac_plugin_map)) javac_args.extend(args) compiler_option_sets_args = self.get_merged_args_for_compiler_option_sets( compiler_option_sets) javac_args.extend(compiler_option_sets_args) javac_args.extend(["-classpath", ":".join(classpath)]) javac_args.extend(ctx.sources) # From https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#BHCJEIBB # Wildcards (*) aren’t allowed in these lists (such as for specifying *.java). # Use of the at sign (@) to recursively interpret files isn’t supported. # The -J options aren’t supported because they’re passed to the launcher, # which doesn’t support argument files. j_args = [j_arg for j_arg in javac_args if j_arg.startswith("-J")] safe_javac_args = list(filter(lambda x: x not in j_args, javac_args)) with argfile.safe_args(safe_javac_args, self.get_options()) as batched_args: javac_cmd = [f"{distribution.real_home}/bin/javac"] javac_cmd.extend(j_args) javac_cmd.extend(batched_args) if self.execution_strategy == self.ExecutionStrategy.hermetic: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name="javac", cmd=" ".join(javac_cmd), labels=[WorkUnitLabel.COMPILER ]) as workunit: self.context.log.debug(f"Executing {' '.join(javac_cmd)}") p = subprocess.Popen( javac_cmd, stdout=workunit.output("stdout"), stderr=workunit.output("stderr"), ) return_code = p.wait() workunit.set_outcome( WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError( f"javac exited with return code {return_code}") classes_directory = Path(ctx.classes_dir.path).relative_to( get_buildroot()) self.context._scheduler.materialize_directory( DirectoryToMaterialize( self.post_compile_extra_resources_digest( ctx, prepend_post_merge_relative_path=False), path_prefix=str(classes_directory), ), ) self._create_context_jar(ctx)
def run_tests(self, fail_fast, test_targets, output_dir, coverage, complete_test_registry): test_registry = complete_test_registry.filter(test_targets) if test_registry.empty: return TestResult.successful coverage.instrument(output_dir) def parse_error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error( "Error parsing test result file {path}: {cause}".format( path=parse_error.xml_path, cause=parse_error.cause ) ) # The 'instrument_classpath' product below below will be `None` if not set, and we'll default # back to runtime_classpath classpath_product = self.context.products.get_data("instrument_classpath") result = 0 for batch_id, (properties, batch) in enumerate(self._iter_batches(test_registry)): ( workdir, platform, target_jvm_options, target_env_vars, concurrency, threads, ) = properties batch_output_dir = output_dir if self._batched: batch_output_dir = os.path.join(batch_output_dir, f"batch-{batch_id}") run_modifications = coverage.run_modifications(batch_output_dir) self.context.log.debug(f"run_modifications: {run_modifications}") extra_jvm_options = run_modifications.extra_jvm_options # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(run_modifications.classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update( self.classpath(relevant_targets, classpath_product=classpath_product) ) distribution = self.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(fail_fast, batch_output_dir) + ["-xmlreport"] if concurrency is not None: args = remove_arg(args, "-default-parallel") if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, "-default-concurrency", param="SERIAL") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, "-default-concurrency", param="PARALLEL_CLASSES") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, "-default-concurrency", param="PARALLEL_METHODS") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg( args, "-default-concurrency", param="PARALLEL_CLASSES_AND_METHODS" ) if threads is not None: args = remove_arg(args, "-parallel-threads", has_param=True) args += ["-parallel-threads", str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: with self.chroot(relevant_targets, workdir) as chroot: self.context.log.debug(f"CWD = {chroot}") self.context.log.debug(f"platform = {platform}") with environment_as(**dict(target_env_vars)): subprocess_result = self.spawn_and_wait( relevant_targets, executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + list(platform.jvm_options) + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name="run", workunit_labels=[WorkUnitLabel.TEST], cwd=chroot, synthetic_jar_dir=batch_output_dir, create_synthetic_jar=self.synthetic_classpath, ) self.context.log.debug( "JUnit subprocess exited with result ({})".format(subprocess_result) ) result += abs(subprocess_result) tests_info = self.parse_test_info( batch_output_dir, parse_error_handler, ["classname"] ) for test_name, test_info in tests_info.items(): test_item = Test(test_info["classname"], test_name) test_target = test_registry.get_owning_target(test_item) self.report_all_info_for_single_test( self.options_scope, test_target, test_name, test_info ) if result != 0 and fail_fast: break if result == 0: return TestResult.successful # NB: If the TestRegistry fails to find the owning target of a failed test, the target key in # this dictionary will be None: helper methods in this block account for that. target_to_failed_test = parse_failed_targets(test_registry, output_dir, parse_error_handler) def sort_owning_target(t): return t.address.spec if t else "" failed_targets = sorted(target_to_failed_test, key=sort_owning_target) error_message_lines = [] if self._failure_summary: def render_owning_target(t): return t.address.reference() if t else "<Unknown Target>" for target in failed_targets: error_message_lines.append(f"\n{(' ' * 4)}{render_owning_target(target)}") for test in sorted(target_to_failed_test[target]): error_message_lines.append(f"{' ' * 8}{test.classname}#{test.methodname}") error_message_lines.append( "\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.".format( main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), "target"), ) ) return TestResult( msg="\n".join(error_message_lines), rc=result, failed_targets=failed_targets )
def compile(self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map): classpath = (ctx.classes_dir.path, ) + tuple( ce.path for ce in dependency_classpath) if self.get_options().capture_classpath: self._record_compile_classpath(classpath, ctx.target, ctx.classes_dir.path) try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug( 'Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) if self.execution_strategy == self.HERMETIC: javac_cmd.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. '-d', '.', ]) else: javac_cmd.extend([ '-d', ctx.classes_dir.path, ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) compiler_option_sets_args = self.get_merged_args_for_compiler_option_sets( compiler_option_sets) javac_cmd.extend(compiler_option_sets_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) if self.execution_strategy == self.HERMETIC: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER ]) as workunit: self.context.log.debug('Executing {}'.format( ' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome( WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError( 'javac exited with return code {rc}'.format( rc=return_code))
def compile(self, ctx, args, classpath, upstream_analysis, settings, fatal_warnings, zinc_file_manager, javac_plugin_map, scalac_plugin_map): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug( 'Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ '-d', ctx.classes_dir, # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) if fatal_warnings: javac_cmd.extend(self.get_options().fatal_warnings_enabled_args) else: javac_cmd.extend(self.get_options().fatal_warnings_disabled_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER ]) as workunit: self.context.log.debug('Executing {}'.format( ' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome( WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError( 'javac exited with return code {rc}'.format( rc=return_code))
def _run_tests(self, test_registry, output_dir, coverage=None): if coverage: extra_jvm_options = coverage.extra_jvm_options classpath_prepend = coverage.classpath_prepend classpath_append = coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = test_registry.index( lambda tgt: tgt.cwd if tgt.cwd is not None else self._working_dir, lambda tgt: tgt.test_platform, lambda tgt: tgt.payload.extra_jvm_options, lambda tgt: tgt.payload.extra_env_vars, lambda tgt: tgt.concurrency, lambda tgt: tgt.threads) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for properties, tests in tests_by_properties.items(): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: self.context.log.debug('CWD = {}'.format(workdir)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): result += abs(self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=output_dir, create_synthetic_jar=self.synthetic_classpath, )) if result != 0 and self._fail_fast: break if result != 0: def error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error('Error parsing test result file {path}: {cause}' .format(path=parse_error.junit_xml_path, cause=parse_error.cause)) target_to_failed_test = parse_failed_targets(test_registry, output_dir, error_handler) failed_targets = sorted(target_to_failed_test, key=lambda t: t.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append('\n{indent}{address}'.format(indent=' ' * 4, address=target.address.spec)) for test in sorted(target_to_failed_test[target]): error_message_lines.append('{indent}{classname}#{methodname}' .format(indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))