def _calculate_python_sources(self, target_roots: List[Target]): """Filter targets to generate a set of source files from the given targets.""" if self.get_options().whitelist_tag_name: all_targets = self._filter_targets( Target.closure_for_targets([ tgt for tgt in target_roots if self._is_tagged_target(tgt) ])) python_eval_targets = [ tgt for tgt in all_targets if self._is_tagged_non_synthetic_python_target(tgt) ] if not self._all_targets_are_whitelisted(python_eval_targets, all_targets): targets_not_whitelisted = [ tgt for tgt in all_targets if self._not_tagged_non_synthetic_python_target(tgt) ] self._whitelist_warning(targets_not_whitelisted) else: python_eval_targets = self._filter_targets([ tgt for tgt in Target.closure_for_targets(target_roots) if self.is_non_synthetic_python_target(tgt) ]) sources = set() for target in python_eval_targets: sources.update( source for source in target.sources_relative_to_buildroot() if os.path.splitext( source)[1] == self._PYTHON_SOURCE_EXTENSION) return list(sources)
def _calculate_python_sources(self, target_roots: Iterable[Target]) -> List[str]: """Filter targets to generate a set of source files from the given targets.""" all_targets = { tgt for tgt in Target.closure_for_targets(target_roots=target_roots) if self.is_non_synthetic_python_target(tgt) } whitelist_tag_name = self.get_options().whitelist_tag_name if whitelist_tag_name: tagged_targets = {tgt for tgt in all_targets if whitelist_tag_name in tgt.tags} eval_targets = tagged_targets if self.get_options().verbose: self._check_for_untagged_dependencies( tagged_target_roots={tgt for tgt in tagged_targets if tgt in target_roots}, tag_name=whitelist_tag_name, ) else: eval_targets = all_targets sources: Set[str] = set() for target in eval_targets: sources.update( source for source in target.sources_relative_to_buildroot() if os.path.splitext(source)[1] == self._PYTHON_SOURCE_EXTENSION ) return list(sorted(sources))
def _library_targets(self, managed_jar_dependencies): targets = [ t for spec in managed_jar_dependencies.library_specs for t in self.context.resolve(spec) ] for t in Target.closure_for_targets(targets): yield t
def _collect_internal_deps(self, targets): """Collect one level of dependencies from the given targets, and then transitively walk. This is different from directly executing `Target.closure_for_targets`, because the resulting set will not include the roots unless the roots depend on one another. """ roots = set() for target in targets: roots.update(target.dependencies) return Target.closure_for_targets(roots)
def assert_closure(self, expected_targets, roots, include_scopes=None, exclude_scopes=None, respect_intransitive=True, ordered=False): set_type = OrderedSet if ordered else set result = set_type(Target.closure_for_targets( target_roots=roots, include_scopes=include_scopes, exclude_scopes=exclude_scopes, respect_intransitive=respect_intransitive, )) self.assertEquals(set_type(expected_targets), result)
def assert_closure_dfs(self, expected_targets, roots, include_scopes=None, exclude_scopes=None, respect_intransitive=True, ordered=False, postorder=None): set_type = OrderedSet if ordered else set result = set_type(Target.closure_for_targets( target_roots=roots, include_scopes=include_scopes, exclude_scopes=exclude_scopes, respect_intransitive=respect_intransitive, postorder=postorder )) self.assertEquals(set_type(expected_targets), result)
def find_all_relevant_resources_targets(self): # NB: Ordering isn't relevant here, because it is applied during the dep walk to # consume from the runtime_classpath. def is_jvm_target(target): return isinstance(target, JvmTarget) jvm_targets = self.context.targets(predicate=is_jvm_target) all_resources_tgts = OrderedSet() for target in Target.closure_for_targets(jvm_targets, bfs=True): if isinstance(target, Resources): all_resources_tgts.add(target) return all_resources_tgts
def _check_for_untagged_dependencies( self, *, tagged_target_roots: Iterable[Target], tag_name: str ) -> None: untagged_dependencies = { tgt for tgt in Target.closure_for_targets(target_roots=tagged_target_roots) if tag_name not in tgt.tags and self.is_non_synthetic_python_target(tgt) } if not untagged_dependencies: return formatted_targets = "\n".join(tgt.address.spec for tgt in sorted(untagged_dependencies)) self.context.log.warn( f"[WARNING]: The following targets are not marked with the tag name `{tag_name}`, " f"but are dependencies of targets that are type checked. MyPy will check these dependencies, " f"inferring `Any` where possible. You are encouraged to properly type check " f"these dependencies.\n{formatted_targets}" )
def closure(*vargs, **kwargs): """See `Target.closure_for_targets` for arguments. :API: public """ return Target.closure_for_targets(*vargs, **kwargs)
def _collect_targets(self, root_targets, **kwargs): return Target.closure_for_targets(target_roots=root_targets, **kwargs)
def _collect_targets(self, root_targets, **kwargs): return Target.closure_for_targets( target_roots=root_targets, **kwargs )
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None target_roots_set = set(self.context.target_roots) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance( t, PythonTests) if is_test(tgt): return SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return SourceRootTypes.RESOURCE else: return SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'id': current_target.id, 'target_type': get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher 'is_code_gen': current_target.is_synthetic, 'is_synthetic': current_target.is_synthetic, 'pants_target_type': self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list( current_target.sources_relative_to_buildroot()) info['transitive'] = current_target.transitive info['scope'] = str(current_target.scope) info['is_target_root'] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value( 'requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError( 'Unable to find suitable interpreter for {}'.format( current_target.address)) python_interpreter_targets_mapping[ interpreter_for_target].append(current_target) info['python_interpreter'] = str( interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets( (jar_lib, )) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet( iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add( M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [ self._exclude_id(exclude) for exclude in current_target.excludes ] info['platform'] = current_target.platform.name if hasattr(current_target, 'test_platform'): info['test_platform'] = current_target.test_platform.name info['roots'] = [{ 'source_root': source_root_package_prefix[0], 'package_prefix': source_root_package_prefix[1] } for source_root_package_prefix in self._source_roots_for_target( current_target)] if classpath_products: info['libraries'] = [ self._jar_id(lib) for lib in target_libraries ] targets_map[current_target.address.spec] = info for target in targets: process_target(target) scala_platform = ScalaPlatform.global_instance() scala_platform_map = { 'scala_version': scala_platform.version, 'compiler_classpath': [ cp_entry.path for cp_entry in scala_platform.compiler_classpath_entries( self.context.products) ], } jvm_platforms_map = { 'default_platform': JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level': str(platform.target_level), 'source_level': str(platform.source_level), 'args': platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { 'version': DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, 'scala_platform': scala_platform_map, # `jvm_distributions` are static distribution settings from config, # `preferred_jvm_distributions` are distributions that pants actually uses for the # given platform setting. 'preferred_jvm_distributions': {} } for platform_name, platform in JvmPlatform.global_instance( ).platforms_by_name.items(): preferred_distributions = {} for strict, strict_key in [(True, 'strict'), (False, 'non_strict')]: try: dist = JvmPlatform.preferred_jvm_distribution( [platform], strict=strict) preferred_distributions[strict_key] = dist.home except DistributionLocator.Error: pass if preferred_distributions: graph_info['preferred_jvm_distributions'][ platform_name] = preferred_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info( targets, classpath_products) if python_interpreter_targets_mapping: # NB: We've selected a python interpreter compatible with each python target individually into # the `python_interpreter_targets_mapping`. These python targets may not be compatible, ie: we # could have a python target requiring 'CPython>=2.7<3' (ie: CPython-2.7.x) and another # requiring 'CPython>=3.6'. To pick a default interpreter then from among these two choices # is arbitrary and not to be relied on to work as a default interpreter if ever needed by the # export consumer. # # TODO(John Sirois): consider either eliminating the 'default_interpreter' field and pressing # export consumers to make their own choice of a default (if needed) or else use # `select.select_interpreter_for_targets` and fail fast if there is no interpreter compatible # across all the python targets in-play. # # For now, make our arbitrary historical choice of a default interpreter explicit and use the # lowest version. default_interpreter = min( python_interpreter_targets_mapping.keys()) interpreters_info = {} for interpreter, targets in python_interpreter_targets_mapping.items( ): req_libs = [ target for target in Target.closure_for_targets(targets) if has_python_requirements(target) ] chroot = self.resolve_requirements(interpreter, req_libs) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } if self.get_options().available_target_types: graph_info['available_target_types'] = self._target_types() return graph_info
def _library_targets(self, managed_jar_dependencies): targets = [t for spec in managed_jar_dependencies.library_specs for t in self.context.resolve(spec)] for t in Target.closure_for_targets(targets): yield t
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None target_roots_set = set(self.context.target_roots) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance(t, PythonTests) if is_test(tgt): return ExportTask.SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return ExportTask.SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return ExportTask.SourceRootTypes.RESOURCE else: return ExportTask.SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'id': current_target.id, 'target_type': get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher 'is_code_gen': current_target.is_synthetic, 'is_synthetic': current_target.is_synthetic, 'pants_target_type': self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) info['transitive'] = current_target.transitive info['scope'] = str(current_target.scope) info['is_target_root'] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value('requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError('Unable to find suitable interpreter for {}' .format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info['python_interpreter'] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name if hasattr(current_target, 'test_platform'): info['test_platform'] = current_target.test_platform.name info['roots'] = [{ 'source_root': source_root_package_prefix[0], 'package_prefix': source_root_package_prefix[1] } for source_root_package_prefix in self._source_roots_for_target(current_target)] if classpath_products: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { 'version': self.DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, # `jvm_distributions` are static distribution settings from config, # `preferred_jvm_distributions` are distributions that pants actually uses for the # given platform setting. 'preferred_jvm_distributions': {} } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items(): preferred_distributions = {} for strict, strict_key in [(True, 'strict'), (False, 'non_strict')]: try: dist = JvmPlatform.preferred_jvm_distribution([platform], strict=strict) preferred_distributions[strict_key] = dist.home except DistributionLocator.Error: pass if preferred_distributions: graph_info['preferred_jvm_distributions'][platform_name] = preferred_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: # NB: We've selected a python interpreter compatible with each python target individually into # the `python_interpreter_targets_mapping`. These python targets may not be compatible, ie: we # could have a python target requiring 'CPython>=2.7<3' (ie: CPython-2.7.x) and another # requiring 'CPython>=3.6'. To pick a default interpreter then from among these two choices # is arbitrary and not to be relied on to work as a default interpreter if ever needed by the # export consumer. # # TODO(John Sirois): consider either eliminating the 'default_interpreter' field and pressing # export consumers to make their own choice of a default (if needed) or else use # `select.select_interpreter_for_targets` and fail fast if there is no interpreter compatible # across all the python targets in-play. # # For now, make our arbitrary historical choice of a default interpreter explicit and use the # lowest version. default_interpreter = min(python_interpreter_targets_mapping.keys()) interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): req_libs = [target for target in Target.closure_for_targets(targets) if has_python_requirements(target)] chroot = self.resolve_requirements(interpreter, req_libs) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } return graph_info