def execute(self): if not self.context.targets( lambda t: is_python_target(t) or has_python_requirements(t)): return interpreter = self.context.products.get_data(PythonInterpreter) pex = self.resolve_requirements( interpreter, self.context.targets(has_python_requirements)) self.context.products.register_data(self.REQUIREMENTS_PEX, pex)
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True), log=self.context.log) if binary_tgt.shebang: self.context.log.info('Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) pex_builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug('No customized shebang found for {}'.format(binary_tgt.name)) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] constraint_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) if is_python_target(tgt): constraint_tgts.append(tgt) # Add interpreter compatibility constraints to pex info. This will first check the targets for any # constraints, and if they do not have any will resort to the global constraints. pex_builder.add_interpreter_constraints_from(constraint_tgts) # Dump everything into the builder's chroot. for tgt in source_tgts: pex_builder.add_sources_from(tgt) # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. self._python_native_code_settings.check_build_for_current_platform_only(self.context.targets()) pex_builder.add_requirement_libs_from(req_tgts, platforms=binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) pex_builder.build(pex_path) return pex_path
def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt): reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), vt.cache_key.hash)) if not os.path.isdir(reqs_pex_path): req_libs = [t for t in vt.target.closure() if has_python_requirements(t)] with safe_concurrent_creation(reqs_pex_path) as safe_path: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(safe_path, interpreter=interpreter, copy=True), log=self.context.log) pex_builder.add_requirement_libs_from(req_libs) pex_builder.freeze() return PEX(reqs_pex_path, interpreter=interpreter)
def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt): reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), vt.cache_key.hash)) if not os.path.isdir(reqs_pex_path): req_libs = [t for t in vt.target.closure() if has_python_requirements(t)] with safe_concurrent_creation(reqs_pex_path) as safe_path: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(safe_path, interpreter=interpreter, copy=True), log=self.context.log) pex_builder.add_requirement_libs_from(req_libs) pex_builder.freeze() return PEX(reqs_pex_path, interpreter=interpreter)
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. build_properties = PexInfo.make_build_properties() if self.get_options().include_run_information: run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True), log=self.context.log) if binary_tgt.shebang: self.context.log.info( 'Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) pex_builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug( f'No customized shebang found for {binary_tgt.name}') # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] constraint_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) if is_python_target(tgt): constraint_tgts.append(tgt) # Add interpreter compatibility constraints to pex info. Note that we only add the constraints for the final # binary target itself, not its dependencies. The upstream interpreter selection tasks will already validate that # there are no compatibility conflicts among the dependencies and target. If the binary target does not have # `compatibility` in its BUILD entry, the global --python-setup-interpreter-constraints will be used. pex_builder.add_interpreter_constraints_from([binary_tgt]) # Dump everything into the builder's chroot. for tgt in source_tgts: pex_builder.add_sources_from(tgt) # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. self._python_native_code_settings.check_build_for_current_platform_only( self.context.targets()) pex_builder.add_requirement_libs_from( req_tgts, platforms=binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, f'{binary_tgt.name}.pex') pex_builder.build(pex_path) return pex_path
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None target_roots_set = set(self.context.target_roots) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance( t, PythonTests) if is_test(tgt): return SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return SourceRootTypes.RESOURCE else: return SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'id': current_target.id, 'target_type': get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher 'is_code_gen': current_target.is_synthetic, 'is_synthetic': current_target.is_synthetic, 'pants_target_type': self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list( current_target.sources_relative_to_buildroot()) info['transitive'] = current_target.transitive info['scope'] = str(current_target.scope) info['is_target_root'] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value( 'requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError( 'Unable to find suitable interpreter for {}'.format( current_target.address)) python_interpreter_targets_mapping[ interpreter_for_target].append(current_target) info['python_interpreter'] = str( interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets( (jar_lib, )) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet( iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add( M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [ self._exclude_id(exclude) for exclude in current_target.excludes ] info['platform'] = current_target.platform.name if hasattr(current_target, 'test_platform'): info['test_platform'] = current_target.test_platform.name info['roots'] = [{ 'source_root': source_root_package_prefix[0], 'package_prefix': source_root_package_prefix[1] } for source_root_package_prefix in self._source_roots_for_target( current_target)] if classpath_products: info['libraries'] = [ self._jar_id(lib) for lib in target_libraries ] targets_map[current_target.address.spec] = info for target in targets: process_target(target) scala_platform = ScalaPlatform.global_instance() scala_platform_map = { 'scala_version': scala_platform.version, 'compiler_classpath': [ cp_entry.path for cp_entry in scala_platform.compiler_classpath_entries( self.context.products) ], } jvm_platforms_map = { 'default_platform': JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level': str(platform.target_level), 'source_level': str(platform.source_level), 'args': platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { 'version': DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, 'scala_platform': scala_platform_map, # `jvm_distributions` are static distribution settings from config, # `preferred_jvm_distributions` are distributions that pants actually uses for the # given platform setting. 'preferred_jvm_distributions': {} } for platform_name, platform in JvmPlatform.global_instance( ).platforms_by_name.items(): preferred_distributions = {} for strict, strict_key in [(True, 'strict'), (False, 'non_strict')]: try: dist = JvmPlatform.preferred_jvm_distribution( [platform], strict=strict) preferred_distributions[strict_key] = dist.home except DistributionLocator.Error: pass if preferred_distributions: graph_info['preferred_jvm_distributions'][ platform_name] = preferred_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info( targets, classpath_products) if python_interpreter_targets_mapping: # NB: We've selected a python interpreter compatible with each python target individually into # the `python_interpreter_targets_mapping`. These python targets may not be compatible, ie: we # could have a python target requiring 'CPython>=2.7<3' (ie: CPython-2.7.x) and another # requiring 'CPython>=3.6'. To pick a default interpreter then from among these two choices # is arbitrary and not to be relied on to work as a default interpreter if ever needed by the # export consumer. # # TODO(John Sirois): consider either eliminating the 'default_interpreter' field and pressing # export consumers to make their own choice of a default (if needed) or else use # `select.select_interpreter_for_targets` and fail fast if there is no interpreter compatible # across all the python targets in-play. # # For now, make our arbitrary historical choice of a default interpreter explicit and use the # lowest version. default_interpreter = min( python_interpreter_targets_mapping.keys()) interpreters_info = {} for interpreter, targets in python_interpreter_targets_mapping.items( ): req_libs = [ target for target in Target.closure_for_targets(targets) if has_python_requirements(target) ] chroot = self.resolve_requirements(interpreter, req_libs) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } if self.get_options().available_target_types: graph_info['available_target_types'] = self._target_types() return graph_info
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None target_roots_set = set(self.context.target_roots) def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(tgt): def is_test(t): return isinstance(t, JUnitTests) or isinstance(t, PythonTests) if is_test(tgt): return ExportTask.SourceRootTypes.TEST else: if (isinstance(tgt, Resources) and tgt in resource_target_map and is_test(resource_target_map[tgt])): return ExportTask.SourceRootTypes.TEST_RESOURCE elif isinstance(tgt, Resources): return ExportTask.SourceRootTypes.RESOURCE else: return ExportTask.SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'id': current_target.id, 'target_type': get_target_type(current_target), # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher 'is_code_gen': current_target.is_synthetic, 'is_synthetic': current_target.is_synthetic, 'pants_target_type': self._get_pants_target_alias(type(current_target)), } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) info['transitive'] = current_target.transitive info['scope'] = str(current_target.scope) info['is_target_root'] = current_target in target_roots_set if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value('requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets( [current_target]) if interpreter_for_target is None: raise TaskError('Unable to find suitable interpreter for {}' .format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info['python_interpreter'] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.java.jar.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name if hasattr(current_target, 'test_platform'): info['test_platform'] = current_target.test_platform.name info['roots'] = [{ 'source_root': source_root_package_prefix[0], 'package_prefix': source_root_package_prefix[1] } for source_root_package_prefix in self._source_roots_for_target(current_target)] if classpath_products: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }, } graph_info = { 'version': self.DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, # `jvm_distributions` are static distribution settings from config, # `preferred_jvm_distributions` are distributions that pants actually uses for the # given platform setting. 'preferred_jvm_distributions': {} } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items(): preferred_distributions = {} for strict, strict_key in [(True, 'strict'), (False, 'non_strict')]: try: dist = JvmPlatform.preferred_jvm_distribution([platform], strict=strict) preferred_distributions[strict_key] = dist.home except DistributionLocator.Error: pass if preferred_distributions: graph_info['preferred_jvm_distributions'][platform_name] = preferred_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: # NB: We've selected a python interpreter compatible with each python target individually into # the `python_interpreter_targets_mapping`. These python targets may not be compatible, ie: we # could have a python target requiring 'CPython>=2.7<3' (ie: CPython-2.7.x) and another # requiring 'CPython>=3.6'. To pick a default interpreter then from among these two choices # is arbitrary and not to be relied on to work as a default interpreter if ever needed by the # export consumer. # # TODO(John Sirois): consider either eliminating the 'default_interpreter' field and pressing # export consumers to make their own choice of a default (if needed) or else use # `select.select_interpreter_for_targets` and fail fast if there is no interpreter compatible # across all the python targets in-play. # # For now, make our arbitrary historical choice of a default interpreter explicit and use the # lowest version. default_interpreter = min(python_interpreter_targets_mapping.keys()) interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): req_libs = [target for target in Target.closure_for_targets(targets) if has_python_requirements(target)] chroot = self.resolve_requirements(interpreter, req_libs) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } return graph_info
def execute(self): if not self.context.targets(lambda t: is_python_target(t) or has_python_requirements(t)): return interpreter = self.context.products.get_data(PythonInterpreter) pex = self.resolve_requirements(interpreter, self.context.targets(has_python_requirements)) self.context.products.register_data(self.REQUIREMENTS_PEX, pex)