def _local_jvm_distribution(settings=None): settings_args = [settings] if settings else [] try: local_distribution = JvmPlatform.preferred_jvm_distribution(settings_args, strict=True) except DistributionLocator.Error: local_distribution = JvmPlatform.preferred_jvm_distribution(settings_args, strict=False) return local_distribution
def _get_zinc_arguments(settings): """Extracts and formats the zinc arguments given in the jvm platform settings. This is responsible for the symbol substitution which replaces $JAVA_HOME with the path to an appropriate jvm distribution. :param settings: The jvm platform settings from which to extract the arguments. :type settings: :class:`JvmPlatformSettings` """ zinc_args = [ '-C-source', '-C{}'.format(settings.source_level), '-C-target', '-C{}'.format(settings.target_level), ] if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) zinc_args.extend(settings_args) return zinc_args
def compile(self, ctx, args, classpath, upstream_analysis, settings, fatal_warnings, zinc_file_manager, javac_plugin_map, scalac_plugin_map): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ '-d', ctx.classes_dir, # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) if fatal_warnings: javac_cmd.extend(self.get_options().fatal_warnings_enabled_args) else: javac_cmd.extend(self.get_options().fatal_warnings_disabled_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def platform(self): """Platform associated with this target. :return: The jvm platform object. :rtype: JvmPlatformSettings """ return JvmPlatform.global_instance().get_platform_for_target(self)
def execute(self): if JvmPlatform.global_instance().get_options().compiler != self.compiler_name: # If the requested compiler is not the one supported by this task, # bail early. return # In case we have no relevant targets and return early, create the requested product maps. self.create_empty_extra_products() relevant_targets = list(self.context.targets(predicate=self.select)) if not relevant_targets: return # Clone the compile_classpath to the runtime_classpath. classpath_product = self.create_runtime_classpath() fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy( classpath_product) # Note, JVM targets are validated (`vts.update()`) as they succeed. As a result, # we begin writing artifacts out to the cache immediately instead of waiting for # all targets to finish. with self.invalidated(relevant_targets, invalidate_dependents=True, fingerprint_strategy=fingerprint_strategy, topological_order=True) as invalidation_check: compile_contexts = {vt.target: self.create_compile_context(vt.target, vt.results_dir) for vt in invalidation_check.all_vts} self.do_compile( invalidation_check, compile_contexts, classpath_product, ) if not self.get_options().use_classpath_jars: # Once compilation has completed, replace the classpath entry for each target with # its jar'd representation. for ccs in compile_contexts.values(): cc = self.select_runtime_context(ccs) for conf in self._confs: classpath_product.remove_for_target(cc.target, [(conf, cc.classes_dir.path)]) classpath_product.add_for_target(cc.target, [(conf, cc.jar_file.path)])
def execute(self): if self.goal not in JvmPrepCommand.goals(): raise AssertionError('Got goal "{}". Expected goal to be one of {}'.format( self.goal, JvmPrepCommand.goals())) targets = self.context.targets(postorder=True, predicate=self.runnable_prep_cmd) compile_classpath = self.context.products.get_data('compile_classpath') classpath_products = self.context.products.get_data('runtime_classpath', compile_classpath.copy) with self.context.new_workunit(name='jvm_prep_command', labels=[WorkUnitLabel.PREP]) as workunit: for target in targets: distribution = JvmPlatform.preferred_jvm_distribution([target.platform]) executor = SubprocessExecutor(distribution) mainclass = target.payload.get_field_value('mainclass') args = target.payload.get_field_value('args', []) target_jvm_options = target.payload.get_field_value('jvm_options', []) cp = list(ClasspathUtil.classpath(target.closure(), classpath_products)) if not cp: raise TaskError('target {} has no classpath. (Add dependencies= parameter?' .format(target.address.spec)) self.context.log.info('Running prep command for {}'.format(target.address.spec)) returncode = distribution.execute_java( executor=executor, classpath=cp, main=mainclass, jvm_options=target_jvm_options, args=args, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.PREP], ) workunit.set_outcome(WorkUnit.FAILURE if returncode else WorkUnit.SUCCESS) if returncode: raise TaskError('RunJvmPrepCommand failed to run {}'.format(mainclass))
def _run_tests(self, tests_to_targets): if self._coverage: extra_jvm_options = self._coverage.extra_jvm_options classpath_prepend = self._coverage.classpath_prepend classpath_append = self._coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = self._tests_by_properties( tests_to_targets, self._infer_workdir, lambda target: target.test_platform, lambda target: target.payload.extra_jvm_options, lambda target: target.payload.extra_env_vars, lambda target: target.concurrency, lambda target: target.threads ) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for properties, tests in tests_by_properties.items(): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = set(map(tests_to_targets.get, batch)) complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update(self.tool_classpath('junit')) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == junit_tests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == junit_tests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] with argfile.safe_args(batch, self.get_options()) as batch_tests: self.context.log.debug('CWD = {}'.format(workdir)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): result += abs(self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnitRun._MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=self.workdir, create_synthetic_jar=self.synthetic_classpath, )) if result != 0 and self._fail_fast: break if result != 0: failed_targets_and_tests = self._get_failed_targets(tests_to_targets) failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec)) for test in sorted(failed_targets_and_tests[target]): error_message_lines.append('{0}{1}'.format(' '*8, test)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
def console_output(self, targets): targets_map = {} resource_target_map = {} classpath_products = ( self.context.products.get_data("compile_classpath") if self.get_options().libraries else None ) python_interpreter_targets_mapping = defaultdict(list) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(target): if target.is_test: return Export.SourceRootTypes.TEST else: if ( isinstance(target, Resources) and target in resource_target_map and resource_target_map[target].is_test ): return Export.SourceRootTypes.TEST_RESOURCE elif isinstance(target, Resources): return Export.SourceRootTypes.RESOURCE else: return Export.SourceRootTypes.SOURCE info = { "targets": [], "libraries": [], "roots": [], "target_type": get_target_type(current_target), "is_code_gen": current_target.is_codegen, "pants_target_type": self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info["globs"] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info["sources"] = list(current_target.sources_relative_to_buildroot()) if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value("requirements", set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info["requirements"] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self.select_interpreter_for_targets([current_target]) if interpreter_for_target is None: raise TaskError("Unable to find suitable interpreter for {}".format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info["python_interpreter"] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info["targets"].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info["targets"].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info["excludes"] = [self._exclude_id(exclude) for exclude in current_target.excludes] info["platform"] = current_target.platform.name info["roots"] = map( lambda (source_root, package_prefix): {"source_root": source_root, "package_prefix": package_prefix}, self._source_roots_for_target(current_target), ) if classpath_products: info["libraries"] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { "default_platform": JvmPlatform.global_instance().default_platform.name, "platforms": { str(platform_name): { "target_level": str(platform.target_level), "source_level": str(platform.source_level), "args": platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { "version": self.DEFAULT_EXPORT_VERSION, "targets": targets_map, "jvm_platforms": jvm_platforms_map, } jvm_distributions = DistributionLocator.global_instance().all_jdk_paths() if jvm_distributions: graph_info["jvm_distributions"] = jvm_distributions if classpath_products: graph_info["libraries"] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: interpreters = self.interpreter_cache.select_interpreter(python_interpreter_targets_mapping.keys()) default_interpreter = interpreters[0] interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): chroot = self.cached_chroot(interpreter=interpreter, pex_info=PexInfo.default(), targets=targets) interpreters_info[str(interpreter.identity)] = {"binary": interpreter.binary, "chroot": chroot.path()} graph_info["python_setup"] = { "default_interpreter": str(default_interpreter.identity), "interpreters": interpreters_info, } if self.format: return json.dumps(graph_info, indent=4, separators=(",", ": ")).splitlines() else: return [json.dumps(graph_info)]
def preferred_jvm_distribution_for_targets(self, targets): return JvmPlatform.preferred_jvm_distribution([target.platform for target in targets if isinstance(target, JvmTarget)], self._strict_jvm_version)
def get_preferred_distribution(platform, strict): try: return JvmPlatform.preferred_jvm_distribution([platform], strict=strict) except DistributionLocator.Error: return None
def run_tests(self, fail_fast, test_targets, output_dir, coverage): test_registry = self._collect_test_targets(test_targets) if test_registry.empty: return TestResult.rc(0) coverage.instrument(output_dir) def parse_error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error('Error parsing test result file {path}: {cause}' .format(path=parse_error.xml_path, cause=parse_error.cause)) # The 'instrument_classpath' product below below will be `None` if not set, and we'll default # back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for batch_id, (properties, batch) in enumerate(self._iter_batches(test_registry)): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties batch_output_dir = output_dir if self._batched: batch_output_dir = os.path.join(batch_output_dir, 'batch-{}'.format(batch_id)) run_modifications = coverage.run_modifications(batch_output_dir) extra_jvm_options = run_modifications.extra_jvm_options # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(run_modifications.classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(fail_fast, batch_output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: with self._chroot(relevant_targets, workdir) as chroot: self.context.log.debug('CWD = {}'.format(chroot)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): subprocess_result = self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=chroot, synthetic_jar_dir=batch_output_dir, create_synthetic_jar=self.synthetic_classpath, ) self.context.log.debug('JUnit subprocess exited with result ({})' .format(subprocess_result)) result += abs(subprocess_result) tests_info = self.parse_test_info(batch_output_dir, parse_error_handler, ['classname']) for test_name, test_info in tests_info.items(): test_item = Test(test_info['classname'], test_name) test_target = test_registry.get_owning_target(test_item) self.report_all_info_for_single_test(self.options_scope, test_target, test_name, test_info) if result != 0 and fail_fast: break if result == 0: return TestResult.rc(0) target_to_failed_test = parse_failed_targets(test_registry, output_dir, parse_error_handler) def sort_owning_target(t): return t.address.spec if t else None failed_targets = sorted(target_to_failed_test, key=sort_owning_target) error_message_lines = [] if self._failure_summary: def render_owning_target(t): return t.address.reference() if t else '<Unknown Target>' for target in failed_targets: error_message_lines.append('\n{indent}{owner}'.format(indent=' ' * 4, owner=render_owning_target(target))) for test in sorted(target_to_failed_test[target]): error_message_lines.append('{indent}{classname}#{methodname}' .format(indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) return TestResult(msg='\n'.join(error_message_lines), rc=result, failed_targets=failed_targets)
def console_output(self, targets): targets_map = {} resource_target_map = {} ivy_info = None if self.get_options().libraries: ivy_jar_products = self.context.products.get_data('ivy_jar_products') or {} # This product is a list for historical reasons (exclusives groups) but in practice should # have either 0 or 1 entries. ivy_info_list = ivy_jar_products.get('default') if ivy_info_list: assert len(ivy_info_list) == 1, ( 'The values in ivy_jar_products should always be length 1,' ' since we no longer have exclusives groups.' ) ivy_info = ivy_info_list[0] ivy_jar_memo = {} def process_target(current_target): """ :type current_target:pants.base.target.Target """ def get_target_type(target): if target.is_test: return Export.SourceRootTypes.TEST else: if (isinstance(target, Resources) and target in resource_target_map and resource_target_map[target].is_test): return Export.SourceRootTypes.TEST_RESOURCE elif isinstance(target, Resources): return Export.SourceRootTypes.RESOURCE else: return Export.SourceRootTypes.SOURCE def get_transitive_jars(jar_lib): """ :type jar_lib: pants.backend.jvm.targets.jar_library.JarLibrary :rtype: twitter.common.collections.orderedset.OrderedSet """ if not ivy_info or not self.get_options().libraries: return OrderedSet() transitive_jars = OrderedSet() for jar in jar_lib.jar_dependencies: transitive_jars.update(ivy_info.get_jars_for_ivy_module(jar, memo=ivy_jar_memo)) return transitive_jars info = { 'targets': [], 'libraries': [], 'roots': [], 'target_type': get_target_type(current_target), 'is_code_gen': current_target.is_codegen, 'pants_target_type': self._get_pants_target_alias(type(current_target)) } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = get_transitive_jars(current_target) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(IvyModuleRef(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(get_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name info['roots'] = map(lambda (source_root, package_prefix): { 'source_root': source_root, 'package_prefix': package_prefix }, self._source_roots_for_target(current_target)) if self.get_options().libraries: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() } } graph_info = { 'targets': targets_map, 'jvm_platforms' : jvm_platforms_map, } jvm_distributions = DistributionLocator.global_instance().all_jdk_paths() if jvm_distributions: graph_info['jvm_distributions'] = jvm_distributions if self.get_options().libraries: graph_info['libraries'] = self._resolve_jars_info() graph_info['version'] = self.DEFAULT_EXPORT_VERSION if self.format: return json.dumps(graph_info, indent=4, separators=(',', ': ')).splitlines() else: return [json.dumps(graph_info)]
def compile(self, ctx, args, classpath, upstream_analysis, settings, fatal_warnings, zinc_file_manager, javac_plugin_map, scalac_plugin_map): try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend([ '-classpath', ':'.join(classpath), ]) if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_cmd.extend(settings_args) javac_cmd.extend([ # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) if self.execution_strategy == self.HERMETIC: javac_cmd.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. '-d', '.', ]) else: javac_cmd.extend([ '-d', ctx.classes_dir, ]) javac_cmd.extend(self._javac_plugin_args(javac_plugin_map)) javac_cmd.extend(args) if fatal_warnings: javac_cmd.extend(self.get_options().fatal_warnings_enabled_args) else: javac_cmd.extend(self.get_options().fatal_warnings_disabled_args) with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources: javac_cmd.extend(batched_sources) if self.execution_strategy == self.HERMETIC: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def compile(self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map): classpath = (ctx.classes_dir.path,) + tuple(ce.path for ce in dependency_classpath) if self.get_options().capture_classpath: self._record_compile_classpath(classpath, ctx.target, ctx.classes_dir.path) try: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True) except DistributionLocator.Error: distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False) javac_args = [] if settings.args: settings_args = settings.args if any('$JAVA_HOME' in a for a in settings.args): logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.' .format(distribution.home)) settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args) javac_args.extend(settings_args) javac_args.extend([ # TODO: support -release '-source', str(settings.source_level), '-target', str(settings.target_level), ]) if self.execution_strategy == self.HERMETIC: javac_args.extend([ # We need to strip the source root from our output files. Outputting to a directory, and # capturing that directory, does the job. # Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't # have a convenient way of making a directory in the output tree, so let's just use the # working directory as our output dir. # This also has the benefit of not needing to strip leading directories from the returned # snapshot. '-d', '.', ]) else: javac_args.extend([ '-d', ctx.classes_dir.path, ]) javac_args.extend(self._javac_plugin_args(javac_plugin_map)) javac_args.extend(args) compiler_option_sets_args = self.get_merged_args_for_compiler_option_sets(compiler_option_sets) javac_args.extend(compiler_option_sets_args) javac_args.extend([ '-classpath', ':'.join(classpath), ]) javac_args.extend(ctx.sources) # From https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#BHCJEIBB # Wildcards (*) aren’t allowed in these lists (such as for specifying *.java). # Use of the at sign (@) to recursively interpret files isn’t supported. # The -J options aren’t supported because they’re passed to the launcher, # which doesn’t support argument files. j_args = [j_arg for j_arg in javac_args if j_arg.startswith('-J')] safe_javac_args = list(filter(lambda x: x not in j_args, javac_args)) with argfile.safe_args(safe_javac_args, self.get_options()) as batched_args: javac_cmd = ['{}/bin/javac'.format(distribution.real_home)] javac_cmd.extend(j_args) javac_cmd.extend(batched_args) if self.execution_strategy == self.HERMETIC: self._execute_hermetic_compile(javac_cmd, ctx) else: with self.context.new_workunit(name='javac', cmd=' '.join(javac_cmd), labels=[WorkUnitLabel.COMPILER]) as workunit: self.context.log.debug('Executing {}'.format(' '.join(javac_cmd))) p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) return_code = p.wait() workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS) if return_code: raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
def platform(self): return JvmPlatform.global_instance().get_platform_for_target(self)
def test_platform(self): if self.payload.test_platform: return JvmPlatform.global_instance().get_platform_by_name(self.payload.test_platform) return self.platform
def execute(self): if JvmPlatform.global_instance().get_options().compiler == 'javac': return super(JavacCompile, self).execute()
def _run_tests(self, test_registry, output_dir, coverage=None): if coverage: extra_jvm_options = coverage.extra_jvm_options classpath_prepend = coverage.classpath_prepend classpath_append = coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = test_registry.index( lambda tgt: tgt.cwd if tgt.cwd is not None else self._working_dir, lambda tgt: tgt.test_platform, lambda tgt: tgt.payload.extra_jvm_options, lambda tgt: tgt.payload.extra_env_vars, lambda tgt: tgt.concurrency, lambda tgt: tgt.threads) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data('instrument_classpath') result = 0 for properties, tests in tests_by_properties.items(): (workdir, platform, target_jvm_options, target_env_vars, concurrency, threads) = properties for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(output_dir) + [u'-xmlreport'] if concurrency is not None: args = remove_arg(args, '-default-parallel') if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, '-default-concurrency', param='SERIAL') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_METHODS') elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg(args, '-default-concurrency', param='PARALLEL_CLASSES_AND_METHODS') if threads is not None: args = remove_arg(args, '-parallel-threads', has_param=True) args += ['-parallel-threads', str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: self.context.log.debug('CWD = {}'.format(workdir)) self.context.log.debug('platform = {}'.format(platform)) with environment_as(**dict(target_env_vars)): result += abs(self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name='run', workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=output_dir, create_synthetic_jar=self.synthetic_classpath, )) if result != 0 and self._fail_fast: break if result != 0: def error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error('Error parsing test result file {path}: {cause}' .format(path=parse_error.junit_xml_path, cause=parse_error.cause)) target_to_failed_test = parse_failed_targets(test_registry, output_dir, error_handler) failed_targets = sorted(target_to_failed_test, key=lambda t: t.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append('\n{indent}{address}'.format(indent=' ' * 4, address=target.address.spec)) for test in sorted(target_to_failed_test[target]): error_message_lines.append('{indent}{classname}#{methodname}' .format(indent=' ' * 8, classname=test.classname, methodname=test.methodname)) error_message_lines.append( '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.' .format(main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), 'target')) ) raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None target_roots_set = set(self.context.target_roots) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance(t, PythonTests) if is_test(tgt): return ExportTask.SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return ExportTask.SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return ExportTask.SourceRootTypes.RESOURCE else: return ExportTask.SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'id': current_target.id, 'target_type': get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher 'is_code_gen': current_target.is_synthetic, 'is_synthetic': current_target.is_synthetic, 'pants_target_type': self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) info['transitive'] = current_target.transitive info['scope'] = str(current_target.scope) info['is_target_root'] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value('requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError('Unable to find suitable interpreter for {}' .format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info['python_interpreter'] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name if hasattr(current_target, 'test_platform'): info['test_platform'] = current_target.test_platform.name info['roots'] = [{ 'source_root': source_root_package_prefix[0], 'package_prefix': source_root_package_prefix[1] } for source_root_package_prefix in self._source_roots_for_target(current_target)] if classpath_products: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { 'version': self.DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, # `jvm_distributions` are static distribution settings from config, # `preferred_jvm_distributions` are distributions that pants actually uses for the # given platform setting. 'preferred_jvm_distributions': {} } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items(): preferred_distributions = {} for strict, strict_key in [(True, 'strict'), (False, 'non_strict')]: try: dist = JvmPlatform.preferred_jvm_distribution([platform], strict=strict) preferred_distributions[strict_key] = dist.home except DistributionLocator.Error: pass if preferred_distributions: graph_info['preferred_jvm_distributions'][platform_name] = preferred_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: # NB: We've selected a python interpreter compatible with each python target individually into # the `python_interpreter_targets_mapping`. These python targets may not be compatible, ie: we # could have a python target requiring 'CPython>=2.7<3' (ie: CPython-2.7.x) and another # requiring 'CPython>=3.6'. To pick a default interpreter then from among these two choices # is arbitrary and not to be relied on to work as a default interpreter if ever needed by the # export consumer. # # TODO(John Sirois): consider either eliminating the 'default_interpreter' field and pressing # export consumers to make their own choice of a default (if needed) or else use # `select.select_interpreter_for_targets` and fail fast if there is no interpreter compatible # across all the python targets in-play. # # For now, make our arbitrary historical choice of a default interpreter explicit and use the # lowest version. default_interpreter = min(python_interpreter_targets_mapping.keys()) interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): req_libs = [target for target in Target.closure_for_targets(targets) if has_python_requirements(target)] chroot = self.resolve_requirements(interpreter, req_libs) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } return graph_info
def console_output(self, targets): targets_map = {} resource_target_map = {} ivy_info = None if self.get_options().libraries: ivy_jar_products = self.context.products.get_data("ivy_jar_products") or {} # This product is a list for historical reasons (exclusives groups) but in practice should # have either 0 or 1 entries. ivy_info_list = ivy_jar_products.get("default") if ivy_info_list: assert len(ivy_info_list) == 1, ( "The values in ivy_jar_products should always be length 1," " since we no longer have exclusives groups." ) ivy_info = ivy_info_list[0] ivy_jar_memo = {} python_interpreter_targets_mapping = defaultdict(list) def process_target(current_target): """ :type current_target:pants.base.target.Target """ def get_target_type(target): if target.is_test: return Export.SourceRootTypes.TEST else: if ( isinstance(target, Resources) and target in resource_target_map and resource_target_map[target].is_test ): return Export.SourceRootTypes.TEST_RESOURCE elif isinstance(target, Resources): return Export.SourceRootTypes.RESOURCE else: return Export.SourceRootTypes.SOURCE def get_transitive_jars(jar_lib): """ :type jar_lib: pants.backend.jvm.targets.jar_library.JarLibrary :rtype: twitter.common.collections.orderedset.OrderedSet """ if not ivy_info or not self.get_options().libraries: return OrderedSet() transitive_jars = OrderedSet() for jar in jar_lib.jar_dependencies: transitive_jars.update(ivy_info.get_jars_for_ivy_module(jar, memo=ivy_jar_memo)) return transitive_jars info = { "targets": [], "libraries": [], "roots": [], "target_type": get_target_type(current_target), "is_code_gen": current_target.is_codegen, "pants_target_type": self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info["globs"] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info["sources"] = list(current_target.sources_relative_to_buildroot()) if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value("requirements", set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info["requirements"] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self.select_interpreter_for_targets([current_target]) if interpreter_for_target is None: raise TaskError("Unable to find suitable interpreter for {}".format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info["python_interpreter"] = str(interpreter_for_target.identity) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = get_transitive_jars(current_target) for dep in current_target.dependencies: info["targets"].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(IvyModuleRef(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(get_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info["targets"].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info["excludes"] = [self._exclude_id(exclude) for exclude in current_target.excludes] info["platform"] = current_target.platform.name info["roots"] = map( lambda (source_root, package_prefix): {"source_root": source_root, "package_prefix": package_prefix}, self._source_roots_for_target(current_target), ) if self.get_options().libraries: info["libraries"] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { "default_platform": JvmPlatform.global_instance().default_platform.name, "platforms": { str(platform_name): { "target_level": str(platform.target_level), "source_level": str(platform.source_level), "args": platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { "version": self.DEFAULT_EXPORT_VERSION, "targets": targets_map, "jvm_platforms": jvm_platforms_map, } jvm_distributions = DistributionLocator.global_instance().all_jdk_paths() if jvm_distributions: graph_info["jvm_distributions"] = jvm_distributions if self.get_options().libraries: graph_info["libraries"] = self._resolve_jars_info() if python_interpreter_targets_mapping: default_interpreter = self.interpreter_cache.select_interpreter(python_interpreter_targets_mapping.keys())[ 0 ] interpreters_info = {} for interpreter, targets in python_interpreter_targets_mapping.iteritems(): chroot = self.cached_chroot(interpreter=interpreter, pex_info=PexInfo.default(), targets=targets) interpreters_info[str(interpreter.identity)] = {"binary": interpreter.binary, "chroot": chroot.path()} graph_info["python_setup"] = { "default_interpreter": str(default_interpreter.identity), "interpreters": interpreters_info, } if self.format: return json.dumps(graph_info, indent=4, separators=(",", ": ")).splitlines() else: return [json.dumps(graph_info)]
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(target): if target.is_test: return ExportTask.SourceRootTypes.TEST else: if (isinstance(target, Resources) and target in resource_target_map and resource_target_map[target].is_test): return ExportTask.SourceRootTypes.TEST_RESOURCE elif isinstance(target, Resources): return ExportTask.SourceRootTypes.RESOURCE else: return ExportTask.SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'target_type': get_target_type(current_target), 'is_code_gen': current_target.is_codegen, 'pants_target_type': self._get_pants_target_alias(type(current_target)) } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value('requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self.select_interpreter_for_targets([current_target]) if interpreter_for_target is None: raise TaskError('Unable to find suitable interpreter for {}' .format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info['python_interpreter'] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name info['roots'] = map(lambda (source_root, package_prefix): { 'source_root': source_root, 'package_prefix': package_prefix }, self._source_roots_for_target(current_target)) if classpath_products: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() } } graph_info = { 'version': self.DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, } jvm_distributions = DistributionLocator.global_instance().all_jdk_paths() if jvm_distributions: graph_info['jvm_distributions'] = jvm_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: interpreters = self.interpreter_cache.select_interpreter( python_interpreter_targets_mapping.keys()) default_interpreter = interpreters[0] interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): chroot = self.cached_chroot( interpreter=interpreter, pex_info=PexInfo.default(), targets=targets ) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } return graph_info
def _run_tests(self, tests_to_targets): if self._coverage: extra_jvm_options = self._coverage.extra_jvm_options classpath_prepend = self._coverage.classpath_prepend classpath_append = self._coverage.classpath_append else: extra_jvm_options = [] classpath_prepend = () classpath_append = () tests_by_properties = self._tests_by_properties( tests_to_targets, self._infer_workdir, lambda target: target.test_platform, lambda target: target.payload.extra_jvm_options, lambda target: target.payload.extra_env_vars, ) # the below will be None if not set, and we'll default back to runtime_classpath classpath_product = self.context.products.get_data("instrument_classpath") result = 0 for (workdir, platform, target_jvm_options, target_env_vars), tests in tests_by_properties.items(): for batch in self._partition(tests): # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = set(map(tests_to_targets.get, batch)) complete_classpath = OrderedSet() complete_classpath.update(classpath_prepend) complete_classpath.update(self.tool_classpath("junit")) complete_classpath.update(self.classpath(relevant_targets, classpath_product=classpath_product)) complete_classpath.update(classpath_append) distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version) with binary_util.safe_args(batch, self.get_options()) as batch_tests: self.context.log.debug("CWD = {}".format(workdir)) self.context.log.debug("platform = {}".format(platform)) with environment_as(**dict(target_env_vars)): result += abs( self._spawn_and_wait( executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnitRun._MAIN, jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options), args=self._args + batch_tests + ["-xmlreport"], workunit_factory=self.context.new_workunit, workunit_name="run", workunit_labels=[WorkUnitLabel.TEST], cwd=workdir, synthetic_jar_dir=self.workdir, create_synthetic_jar=self.synthetic_classpath, ) ) if result != 0 and self._fail_fast: break if result != 0: failed_targets_and_tests = self._get_failed_targets(tests_to_targets) failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec) error_message_lines = [] if self._failure_summary: for target in failed_targets: error_message_lines.append("\n{0}{1}".format(" " * 4, target.address.spec)) for test in sorted(failed_targets_and_tests[target]): error_message_lines.append("{0}{1}".format(" " * 8, test)) error_message_lines.append( "\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.".format( main=JUnitRun._MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), "target"), ) ) raise TestFailedTaskError("\n".join(error_message_lines), failed_targets=list(failed_targets))