async def resolve_targets(targets: UnexpandedTargets) -> Targets: # TODO: This method duplicates `resolve_targets_with_origins`, because direct expansion of # `Addresses` to `Targets` is common in a few places: we can't always assume that we # have `AddressesWithOrigins`. One way to dedupe these two methods would be to fake some # origins, and then strip them afterward. # Split out and expand any base targets. # TODO: Should recursively expand alias targets here as well. other_targets = [] base_targets = [] for target in targets: if target.address.is_base_target: base_targets.append(target) else: other_targets.append(target) base_targets_subtargets = await MultiGet( Get(Subtargets, Address, bt.address) for bt in base_targets) # Zip the subtargets back to the base targets and replace them. # NB: If a target had no subtargets, we use the base. expanded_targets = OrderedSet(other_targets) expanded_targets.update( target for subtargets in base_targets_subtargets for target in (subtargets.subtargets if subtargets.subtargets else ( subtargets.base, ))) return Targets(expanded_targets)
def compute_classpath_entries(cls, targets, classpath_products, extra_classpath_tuples, confs): """Return the list of classpath entries for a classpath covering the passed targets. Filters and adds paths from extra_classpath_tuples to the end of the resulting list. :param targets: The targets to generate a classpath for. :param ClasspathProducts classpath_products: Product containing classpath elements. :param extra_classpath_tuples: Additional classpath entries as tuples of (string, ClasspathEntry). :param confs: The list of confs for use by this classpath. :returns: The classpath entries as a list of path elements. :rtype: list of ClasspathEntry """ classpath_iter = cls._classpath_iter( classpath_products.get_classpath_entries_for_targets(targets), confs=confs, ) total_classpath = OrderedSet(classpath_iter) filtered_extra_classpath_iter = cls._filtered_classpath_by_confs_iter( extra_classpath_tuples, confs, ) extra_classpath_iter = cls._entries_iter(filtered_extra_classpath_iter) total_classpath.update(extra_classpath_iter) return list(total_classpath)
def console_output(self, unused_method_argument): ordered_closure = OrderedSet() for target in self.context.target_roots: if self.act_transitively: target.walk(ordered_closure.add) else: ordered_closure.update(target.dependencies) include_source = self.get_options().type in [ DependencyType.SOURCE, DependencyType.SOURCE_AND_THIRD_PARTY, ] include_3rdparty = self.get_options().type in [ DependencyType.THIRD_PARTY, DependencyType.SOURCE_AND_THIRD_PARTY, ] for tgt in ordered_closure: if include_source: yield tgt.address.spec if include_3rdparty: # TODO(John Sirois): We need an external payload abstraction at which point knowledge # of jar and requirement payloads can go and this hairball will be untangled. if isinstance(tgt.payload.get_field("requirements"), PythonRequirementsField): for requirement in tgt.payload.requirements: yield str(requirement.requirement) elif isinstance(tgt.payload.get_field("jars"), JarsField): for jar in tgt.payload.jars: data = dict(org=jar.org, name=jar.name, rev=jar.rev) yield ("{org}:{name}:{rev}" if jar.rev else "{org}:{name}").format(**data)
def dependents_of_addresses(self, addresses): """Given an iterable of addresses, return all of those addresses dependents.""" seen = OrderedSet(addresses) for address in addresses: seen.update(self._dependent_address_map[address]) seen.update(self._implicit_dependent_address_map[address]) return seen
def compute_pantsd_invalidation_globs(buildroot, bootstrap_options): """Computes the merged value of the `--pantsd-invalidation-globs` option. Combines --pythonpath and --pants-config-files files that are in {buildroot} dir with those invalidation_globs provided by users. """ invalidation_globs = OrderedSet() globs = [ *sys.path, *bootstrap_options.pythonpath, *bootstrap_options.pants_config_files, "!*.pyc", "!__pycache__/", *bootstrap_options.pantsd_invalidation_globs, ] for glob in globs: if glob.startswith("!"): invalidation_globs.add(glob) continue glob_relpath = fast_relpath_optional( glob, buildroot) if os.path.isabs(glob) else glob if glob_relpath: invalidation_globs.update([glob_relpath, glob_relpath + "/**"]) else: logger.debug( f"Changes to {glob}, outside of the buildroot, will not be invalidated." ) return list(invalidation_globs)
def get_dependencies(self): """Returns the set of data dependencies as producer infos corresponding to data requirements.""" producer_infos = OrderedSet() for product_type in self._dependencies: producer_infos.update( self._get_producer_infos_by_product_type(product_type)) return producer_infos
def closure_for_targets( cls, target_roots, exclude_scopes=None, include_scopes=None, bfs=None, postorder=None, respect_intransitive=False, ): """Computes the closure of the given targets respecting the given input scopes. :API: public :param list target_roots: The list of Targets to start from. These targets will always be included in the closure, regardless of scope settings. :param Scope exclude_scopes: If present and non-empty, only dependencies which have none of the scope names in this Scope will be traversed. :param Scope include_scopes: If present and non-empty, only dependencies which have at least one of the scope names in this Scope will be traversed. :param bool bfs: Whether to traverse in breadth-first or depth-first order. (Defaults to True). :param bool respect_intransitive: If True, any dependencies which have the 'intransitive' scope will not be included unless they are direct dependencies of one of the root targets. (Defaults to False). """ target_roots = list( target_roots ) # Sometimes generators are passed into this function. if not target_roots: return OrderedSet() build_graph = target_roots[0]._build_graph addresses = [target.address for target in target_roots] dep_predicate = cls._closure_dep_predicate( target_roots, include_scopes=include_scopes, exclude_scopes=exclude_scopes, respect_intransitive=respect_intransitive, ) closure = OrderedSet() if not bfs: build_graph.walk_transitive_dependency_graph( addresses=addresses, work=closure.add, postorder=postorder, dep_predicate=dep_predicate, ) else: closure.update( build_graph.transitive_subgraph_of_addresses_bfs( addresses=addresses, dep_predicate=dep_predicate, )) # Make sure all the roots made it into the closure. closure.update(target_roots) return closure
def get_for_targets(self, targets): """Gets the union of the products for the given targets, preserving the input order. :API: public """ products = OrderedSet() for target in targets: products.update(self._products_by_target[target]) return products
def test_update() -> None: set1 = OrderedSet("abcd") set1.update("efgh") assert len(set1) == 8 assert "".join(set1) == "abcdefgh" set2 = OrderedSet("abcd") set2.update("cdef") assert len(set2) == 6 assert "".join(set2) == "abcdef"
def strict_dependencies(self, dep_context): """ :param dep_context: A DependencyContext with configuration for the request. :return: targets that this target "strictly" depends on. This set of dependencies contains only directly declared dependencies, with two exceptions: 1) aliases are expanded transitively 2) the strict_dependencies of targets exported targets exported by strict_dependencies (transitively). :rtype: list of Target """ strict_deps = self._cached_strict_dependencies_map.get( dep_context, None) if strict_deps is None: default_predicate = self._closure_dep_predicate( {self}, **dep_context.target_closure_kwargs) # TODO(#5977): this branch needs testing! if not default_predicate: def default_predicate(*args, **kwargs): return True def dep_predicate(source, dependency): if not default_predicate(source, dependency): return False # Always expand aliases. if type(source) in dep_context.alias_types: return True # Traverse other dependencies if they are exported. if source._dep_is_exported(dependency): return True return False dep_addresses = [ d.address for d in self.dependencies if default_predicate(self, d) ] result = self._build_graph.transitive_subgraph_of_addresses_bfs( addresses=dep_addresses, dep_predicate=dep_predicate) strict_deps = OrderedSet() for declared in result: if type(declared) in dep_context.alias_types: continue if isinstance(declared, dep_context.types_with_closure): strict_deps.update( declared.closure(bfs=True, **dep_context.target_closure_kwargs)) strict_deps.add(declared) strict_deps = list(strict_deps) self._cached_strict_dependencies_map[dep_context] = strict_deps return strict_deps
def bundle(self, app, results_dir): """Create a self-contained application bundle. The bundle will contain the target classes, dependencies and resources. """ assert isinstance(app, BundleCreate.App) bundle_dir = self.get_bundle_dir(app.id, results_dir) self.context.log.debug( f"creating {os.path.relpath(bundle_dir, get_buildroot())}") safe_mkdir(bundle_dir, clean=True) classpath = OrderedSet() # Create symlinks for both internal and external dependencies under `lib_dir`. This is # only needed when not creating a deployjar lib_dir = os.path.join(bundle_dir, self.LIBS_DIR) if not app.deployjar: os.mkdir(lib_dir) consolidated_classpath = self.context.products.get_data( "consolidated_classpath") classpath.update( ClasspathProducts.create_canonical_classpath( consolidated_classpath, app.target.closure(bfs=True, **self._target_closure_kwargs), lib_dir, internal_classpath_only=False, excludes=app.binary.deploy_excludes, )) bundle_jar = os.path.join(bundle_dir, f"{app.binary.basename}.jar") with self.monolithic_jar(app.binary, bundle_jar, manifest_classpath=classpath) as jar: self.add_main_manifest_entry(jar, app.binary) # Make classpath complete by adding the monolithic jar. classpath.update([jar.path]) if app.binary.shading_rules: for jar_path in classpath: # In case `jar_path` is a symlink, this is still safe, shaded jar will overwrite jar_path, # original file `jar_path` linked to remains untouched. # TODO run in parallel to speed up self.shade_jar(shading_rules=app.binary.shading_rules, jar_path=jar_path) self.symlink_bundles(app, bundle_dir) return bundle_dir
def resolve_deps(self, unresolved_deps): """ :API: public """ deps = OrderedSet() for dep in unresolved_deps: try: deps.update(self.context.resolve(dep)) except AddressLookupError as e: raise AddressLookupError( "{message}\n on dependency {dep}".format(message=e, dep=dep)) return deps
class RootedProducts: """File products of a build that have a concept of a 'root' directory. E.g., classfiles, under a root package directory. :API: public """ def __init__(self, root): """ :API: public """ self._root = root self._rel_paths = OrderedSet() def add_abs_paths(self, abs_paths): """ :API: public """ for abs_path in abs_paths: self._rel_paths.add(fast_relpath(abs_path, self._root)) def add_rel_paths(self, rel_paths): """ :API: public """ self._rel_paths.update(rel_paths) def root(self): """ :API: public """ return self._root def rel_paths(self): """ :API: public """ return self._rel_paths def abs_paths(self): """ :API: public """ for relpath in self._rel_paths: yield os.path.join(self._root, relpath) def __bool__(self): return self._rel_paths
def execute_codegen(self, target, target_workdir): sources_by_base = self._calculate_sources(target) sources = target.sources_relative_to_buildroot() bases = OrderedSet() # Note that the root import must come first, otherwise protoc can get confused # when trying to resolve imports from the root against the import's source root. if self.get_options().import_from_root: bases.add(".") bases.update(sources_by_base.keys()) bases.update(self._proto_path_imports([target])) gen_flag = "--java_out" gen = "{0}={1}".format(gen_flag, target_workdir) args = [self.protobuf_binary, gen] if self.plugins: for plugin in self.plugins: args.append("--{0}_out={1}".format(plugin, target_workdir)) for base in bases: args.append("--proto_path={0}".format(base)) args.extend(sources) # Tack on extra path entries. These can be used to find protoc plugins. protoc_environ = os.environ.copy() if self._extra_paths: protoc_environ["PATH"] = os.pathsep.join( self._extra_paths + protoc_environ["PATH"].split(os.pathsep)) # Note: The test_source_ordering integration test scrapes this output, so modify it with care. self.context.log.debug("Executing: {0}".format("\\\n ".join(args))) with self.context.new_workunit(name="protoc", labels=[WorkUnitLabel.TOOL], cmd=" ".join(args)) as workunit: result = subprocess.call( args, env=protoc_environ, stdout=workunit.output("stdout"), stderr=workunit.output("stderr"), ) if result != 0: raise TaskError("{} ... exited non-zero ({})".format( self.protobuf_binary, result))
async def resolve_targets(targets: UnexpandedTargets) -> Targets: # Split out and expand any BUILD targets. other_targets = [] build_targets = [] for target in targets: if not target.address.is_file_target: build_targets.append(target) else: other_targets.append(target) build_targets_subtargets = await MultiGet( Get(Subtargets, Address, bt.address) for bt in build_targets) # Zip the subtargets back to the BUILD targets and replace them. # NB: If a target had no subtargets, we use the original. expanded_targets = OrderedSet(other_targets) expanded_targets.update( target for subtargets in build_targets_subtargets for target in (subtargets.subtargets if subtargets.subtargets else ( subtargets.base, ))) return Targets(expanded_targets)
def checkstyle(self, targets, sources): union_classpath = OrderedSet(self.tool_classpath("checkstyle")) if self.get_options().include_user_classpath: runtime_classpaths = self.context.products.get_data("runtime_classpath") for target in targets: runtime_classpath = runtime_classpaths.get_for_targets(target.closure(bfs=True)) union_classpath.update( jar for conf, jar in runtime_classpath if conf in self.get_options().confs ) config = CheckstyleSubsystem.global_instance().options.config if not config: raise TaskError( "No checkstyle configuration file configured. Configure with `--checkstyle-config`." ) args = ["-c", config, "-f", "plain"] if self.get_options().properties: properties_file = os.path.join(self.workdir, "checkstyle.properties") with safe_open(properties_file, "w") as pf: for k, v in self.get_options().properties.items(): pf.write(f"{k}={v}\n") args.extend(["-p", properties_file]) # We've hit known cases of checkstyle command lines being too long for the system so we guard # with Xargs since checkstyle does not accept, for example, @argfile style arguments. def call(xargs): return self.runjava( classpath=union_classpath, main=self._CHECKSTYLE_MAIN, jvm_options=self.get_options().jvm_options, args=args + xargs, workunit_name="checkstyle", ) checks = Xargs(call) return checks.execute(sources)
def compute_pantsd_invalidation_globs(buildroot, bootstrap_options, absolute_pidfile): """Computes the merged value of the `--pantsd-invalidation-globs` option. Combines --pythonpath and --pants-config-files files that are in {buildroot} dir with those invalidation_globs provided by users. """ invalidation_globs = OrderedSet() # Globs calculated from the sys.path and other file-like configuration need to be sanitized # to relative globs (where possible). potentially_absolute_globs = ( absolute_pidfile, *sys.path, *bootstrap_options.pythonpath, *bootstrap_options.pants_config_files, ) for glob in potentially_absolute_globs: glob_relpath = fast_relpath_optional( glob, buildroot) if os.path.isabs(glob) else glob if glob_relpath: invalidation_globs.update([glob_relpath, glob_relpath + "/**"]) else: logger.debug( f"Changes to {glob}, outside of the buildroot, will not be invalidated." ) # Explicitly specified globs are already relative, and are added verbatim. invalidation_globs.update(( "!*.pyc", "!__pycache__/", # TODO: This is a bandaid for https://github.com/pantsbuild/pants/issues/7022: # macros should be adapted to allow this dependency to be automatically detected. "requirements.txt", "3rdparty/**/requirements.txt", *bootstrap_options.pantsd_invalidation_globs, )) return list(invalidation_globs)
def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance( t, PythonTests) if is_test(tgt): return SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return SourceRootTypes.RESOURCE else: return SourceRootTypes.SOURCE info = { "targets": [], "libraries": [], "roots": [], "id": current_target.id, "target_type": get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher "is_code_gen": current_target.is_synthetic, "is_synthetic": current_target.is_synthetic, "pants_target_type": self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info["globs"] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info["sources"] = list( current_target.sources_relative_to_buildroot()) info["transitive"] = current_target.transitive info["scope"] = str(current_target.scope) info["is_target_root"] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value( "requirements", set()) """:type : set[pants.python.python_requirement.PythonRequirement]""" info["requirements"] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError( "Unable to find suitable interpreter for {}".format( current_target.address)) python_interpreter_targets_mapping[ interpreter_for_target].append(current_target) info["python_interpreter"] = str( interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets( (jar_lib, )) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet( iter_transitive_jars(current_target)) for dep in current_target.dependencies: info["targets"].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add( M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info["targets"].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info["excludes"] = [ self._exclude_id(exclude) for exclude in current_target.excludes ] info["platform"] = current_target.platform.name if hasattr(current_target, "runtime_platform"): info[ "runtime_platform"] = current_target.runtime_platform.name info["roots"] = [{ "source_root": source_root_package_prefix[0], "package_prefix": source_root_package_prefix[1], } for source_root_package_prefix in self._source_roots_for_target( current_target)] if classpath_products: info["libraries"] = [ self._jar_id(lib) for lib in target_libraries ] targets_map[current_target.address.spec] = info
def synthetic_target_extra_dependencies(self, target, target_workdir): deps = OrderedSet() deps.update(self.javadeps) return deps
def all_imported_requirements(self): # TODO: figure out if this OrderedSet is necessary. all_requirements = OrderedSet() for req_lib in self.imported_targets: all_requirements.update(req_lib.requirements) return list(all_requirements)
async def resolve_specs_paths(specs: Specs) -> SpecsPaths: """Resolve all files matching the given specs. All matched targets will use their `sources` field. Certain specs like FileLiteralSpec will also match against all their files, regardless of if a target owns them. Ignores win out over includes, with these edge cases: * Ignored paths: the resolved paths should be excluded. * Ignored targets: their `sources` should be excluded. * File owned by a target that gets filtered out, e.g. via `--tag`. See https://github.com/pantsbuild/pants/issues/15478. """ unfiltered_include_targets, ignore_targets, include_paths, ignore_paths = await MultiGet( Get( Targets, RawSpecs, dataclasses.replace(specs.includes, filter_by_global_options=False)), Get(Targets, RawSpecs, specs.ignores), Get(Paths, PathGlobs, specs.includes.to_specs_paths_path_globs()), Get(Paths, PathGlobs, specs.ignores.to_specs_paths_path_globs()), ) filtered_include_targets = await Get(FilteredTargets, Targets, unfiltered_include_targets) include_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in filtered_include_targets if tgt.has_field(SourcesField)) ignore_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in ignore_targets if tgt.has_field(SourcesField)) result_paths = OrderedSet( itertools.chain.from_iterable( paths.files for paths in include_targets_sources_paths), ) result_paths.update(include_paths.files) result_paths.difference_update( itertools.chain.from_iterable( paths.files for paths in ignore_targets_sources_paths)) result_paths.difference_update(ignore_paths.files) # If include paths were given, we need to also remove any paths from filtered out targets # (e.g. via `--tag`), per https://github.com/pantsbuild/pants/issues/15478. if include_paths.files: filtered_out_include_targets = FrozenOrderedSet( unfiltered_include_targets).difference( FrozenOrderedSet(filtered_include_targets)) filtered_include_targets_sources_paths = await MultiGet( Get(SourcesPaths, SourcesPathsRequest(tgt[SourcesField])) for tgt in filtered_out_include_targets if tgt.has_field(SourcesField)) result_paths.difference_update( itertools.chain.from_iterable( paths.files for paths in filtered_include_targets_sources_paths)) dirs = OrderedSet( itertools.chain.from_iterable( recursive_dirname(os.path.dirname(f)) for f in result_paths)) - {""} return SpecsPaths(tuple(sorted(result_paths)), tuple(sorted(dirs)))
def all_imported_jar_deps(self): jar_deps = OrderedSet() for jar_lib in self.imported_targets: jar_deps.update(jar_lib.jar_dependencies) return list(jar_deps)
def synthetic_target_extra_dependencies(self, target, target_workdir): deps = OrderedSet(self._thrift_dependencies_for_target(target)) deps.update(target.dependencies) return deps
def run_tests(self, fail_fast, test_targets, output_dir, coverage, complete_test_registry): test_registry = complete_test_registry.filter(test_targets) if test_registry.empty: return TestResult.successful coverage.instrument(output_dir) def parse_error_handler(parse_error): # Just log and move on since the result is only used to characterize failures, and raising # an error here would just distract from the underlying test failures. self.context.log.error( "Error parsing test result file {path}: {cause}".format( path=parse_error.xml_path, cause=parse_error.cause ) ) # The 'instrument_classpath' product below below will be `None` if not set, and we'll default # back to runtime_classpath classpath_product = self.context.products.get_data("instrument_classpath") result = 0 for batch_id, (properties, batch) in enumerate(self._iter_batches(test_registry)): ( workdir, platform, target_jvm_options, target_env_vars, concurrency, threads, ) = properties batch_output_dir = output_dir if self._batched: batch_output_dir = os.path.join(batch_output_dir, f"batch-{batch_id}") run_modifications = coverage.run_modifications(batch_output_dir) self.context.log.debug(f"run_modifications: {run_modifications}") extra_jvm_options = run_modifications.extra_jvm_options # Batches of test classes will likely exist within the same targets: dedupe them. relevant_targets = {test_registry.get_owning_target(t) for t in batch} complete_classpath = OrderedSet() complete_classpath.update(run_modifications.classpath_prepend) complete_classpath.update(JUnit.global_instance().runner_classpath(self.context)) complete_classpath.update( self.classpath(relevant_targets, classpath_product=classpath_product) ) distribution = self.preferred_jvm_distribution([platform], self._strict_jvm_version) # Override cmdline args with values from junit_test() target that specify concurrency: args = self._args(fail_fast, batch_output_dir) + ["-xmlreport"] if concurrency is not None: args = remove_arg(args, "-default-parallel") if concurrency == JUnitTests.CONCURRENCY_SERIAL: args = ensure_arg(args, "-default-concurrency", param="SERIAL") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES: args = ensure_arg(args, "-default-concurrency", param="PARALLEL_CLASSES") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS: args = ensure_arg(args, "-default-concurrency", param="PARALLEL_METHODS") elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS: args = ensure_arg( args, "-default-concurrency", param="PARALLEL_CLASSES_AND_METHODS" ) if threads is not None: args = remove_arg(args, "-parallel-threads", has_param=True) args += ["-parallel-threads", str(threads)] batch_test_specs = [test.render_test_spec() for test in batch] with argfile.safe_args(batch_test_specs, self.get_options()) as batch_tests: with self.chroot(relevant_targets, workdir) as chroot: self.context.log.debug(f"CWD = {chroot}") self.context.log.debug(f"platform = {platform}") with environment_as(**dict(target_env_vars)): subprocess_result = self.spawn_and_wait( relevant_targets, executor=SubprocessExecutor(distribution), distribution=distribution, classpath=complete_classpath, main=JUnit.RUNNER_MAIN, jvm_options=self.jvm_options + list(platform.jvm_options) + extra_jvm_options + list(target_jvm_options), args=args + batch_tests, workunit_factory=self.context.new_workunit, workunit_name="run", workunit_labels=[WorkUnitLabel.TEST], cwd=chroot, synthetic_jar_dir=batch_output_dir, create_synthetic_jar=self.synthetic_classpath, ) self.context.log.debug( "JUnit subprocess exited with result ({})".format(subprocess_result) ) result += abs(subprocess_result) tests_info = self.parse_test_info( batch_output_dir, parse_error_handler, ["classname"] ) for test_name, test_info in tests_info.items(): test_item = Test(test_info["classname"], test_name) test_target = test_registry.get_owning_target(test_item) self.report_all_info_for_single_test( self.options_scope, test_target, test_name, test_info ) if result != 0 and fail_fast: break if result == 0: return TestResult.successful # NB: If the TestRegistry fails to find the owning target of a failed test, the target key in # this dictionary will be None: helper methods in this block account for that. target_to_failed_test = parse_failed_targets(test_registry, output_dir, parse_error_handler) def sort_owning_target(t): return t.address.spec if t else "" failed_targets = sorted(target_to_failed_test, key=sort_owning_target) error_message_lines = [] if self._failure_summary: def render_owning_target(t): return t.address.reference() if t else "<Unknown Target>" for target in failed_targets: error_message_lines.append(f"\n{(' ' * 4)}{render_owning_target(target)}") for test in sorted(target_to_failed_test[target]): error_message_lines.append(f"{' ' * 8}{test.classname}#{test.methodname}") error_message_lines.append( "\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.".format( main=JUnit.RUNNER_MAIN, code=result, failed=len(failed_targets), targets=pluralize(len(failed_targets), "target"), ) ) return TestResult( msg="\n".join(error_message_lines), rc=result, failed_targets=failed_targets )
def _process_target( self, current_target, modulizable_target_set, resource_target_map, runtime_classpath, zinc_args_for_target, ): """ :type current_target:pants.build_graph.target.Target """ info = { # this means 'dependencies' "targets": [], "source_dependencies_in_classpath": [], "libraries": [], "roots": [], "id": current_target.id, "target_type": ExportDepAsJar._get_target_type( current_target, resource_target_map, runtime_classpath ), "is_synthetic": current_target.is_synthetic, "pants_target_type": self._get_pants_target_alias(type(current_target)), "is_target_root": current_target in modulizable_target_set, "transitive": current_target.transitive, "scope": str(current_target.scope), "scalac_args": self._extract_arguments_with_prefix_from_zinc_args( zinc_args_for_target, "-S" ), "javac_args": self._extract_arguments_with_prefix_from_zinc_args( zinc_args_for_target, "-C" ), "extra_jvm_options": current_target.payload.get_field_value("extra_jvm_options", []), } def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if runtime_classpath: jar_products = runtime_classpath.get_artifact_classpath_entries_for_targets( (jar_lib,) ) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) def _full_library_set_for_target(target): """Get the full library set for a target, including jar dependencies and jars of the library itself.""" libraries = set([]) if isinstance(target, JarLibrary): jars = set([]) for jar in target.jar_dependencies: jars.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library jars.update(iter_transitive_jars(target)) libraries = [self._jar_id(jar) for jar in jars] else: libraries.add(target.id) return libraries if not current_target.is_synthetic: info["globs"] = current_target.globs_relative_to_buildroot() def _dependencies_needed_in_classpath(target): if isinstance(target, JvmTarget): return [ dep for dep in DependencyContext.global_instance().dependencies_respecting_strict_deps( target ) ] else: return [dep for dep in target.closure()] dependencies_needed_in_classpath = _dependencies_needed_in_classpath(current_target) libraries_for_target = set( [self._jar_id(jar) for jar in iter_transitive_jars(current_target)] ) for dep in self._dependencies_to_include_in_libraries( current_target, modulizable_target_set, dependencies_needed_in_classpath ): libraries_for_target.update(_full_library_set_for_target(dep)) info["libraries"].extend(libraries_for_target) info["roots"] = [ { "source_root": os.path.realpath(source_root_package_prefix[0]), "package_prefix": source_root_package_prefix[1], } for source_root_package_prefix in self._source_roots_for_target(current_target) ] for dep in current_target.dependencies: if dep in modulizable_target_set: info["targets"].append(dep.address.spec) if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info["targets"].append(dep.address.spec) if isinstance(current_target, JvmTarget): info["excludes"] = [self._exclude_id(exclude) for exclude in current_target.excludes] info["platform"] = current_target.platform.name if hasattr(current_target, "runtime_platform"): info["runtime_platform"] = current_target.runtime_platform.name transitive_targets = OrderedSet( [ dep.address.spec for dep in dependencies_needed_in_classpath if dep in modulizable_target_set ] ) transitive_targets.update(info["targets"]) info["source_dependencies_in_classpath"] = [dep for dep in transitive_targets] return info
def synthetic_target_extra_dependencies(self, target, target_workdir): deps = OrderedSet( self.resolve_deps([self.get_options().thrifty_runtime])) deps.update(target.dependencies) return deps