def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties builder = PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True) if binary_tgt.shebang: self.context.log.info( 'Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug( 'No customized shebang found for {}'.format( binary_tgt.name)) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) # Add target's interpreter compatibility constraints to pex info. if has_python_sources(tgt): for constraint in tgt.compatibility: builder.add_interpreter_constraint(constraint) elif has_python_requirements(tgt): req_tgts.append(tgt) # Dump everything into the builder's chroot. for tgt in source_tgts: dump_sources(builder, tgt, self.context.log) dump_requirements(builder, interpreter, req_tgts, self.context.log, binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) builder.build(pex_path) return pex_path
def _build_source_pex(self, interpreter, path, targets): pex_builder = PexBuilderWrapper( PEXBuilder(path=path, interpreter=interpreter, copy=True), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) for target in targets: if has_python_sources(target): pex_builder.add_sources_from(target) pex_builder.freeze()
def collect_source_targets(target): if has_python_sources(target): targets = targets_by_base.get(target.target_base) if targets is None: targets = set() targets_by_base[target.target_base] = targets targets.add(target) elif has_resources(target): resource_targets.add(target)
def collect_source_targets(target): if has_python_sources(target): targets = targets_by_base.get(target.target_base) if targets is None: targets = set() targets_by_base[target.target_base] = targets targets.add(target) elif has_resources(target): resource_targets.add(target)
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" the other pexes via PEX_PATH.""" relevant_targets = self.context.targets( lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Files))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.join(self.workdir, str(interpreter.identity), target_set_id) extra_pex_paths = None # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): source_pexes = self.context.products.get_data(GatherSources.PythonSources).all() requirements_pex = self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX) pexes = [requirements_pex] + source_pexes if self.extra_requirements(): extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()] addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__)) self.context.build_graph.inject_synthetic_target( addr, PythonRequirementLibrary, requirements=extra_reqs) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes extra_pex_paths = [pex.path() for pex in pexes if pex] if extra_pex_paths: pex_info.merge_pex_path(':'.join(extra_pex_paths)) with safe_concurrent_creation(path) as safe_path: builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info) # Add target interpreter compatibilities to pex info. for rt in relevant_targets: if has_python_sources(rt): for constraint in rt.compatibility: builder.add_interpreter_constraint(constraint) builder.freeze() return WrappedPEX(PEX(os.path.realpath(path), interpreter), interpreter)
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties builder = PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True) if binary_tgt.shebang: self.context.log.info('Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug('No customized shebang found for {}'.format(binary_tgt.name)) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) # Add target's interpreter compatibility constraints to pex info. if is_python_target(tgt): for constraint in tgt.compatibility: builder.add_interpreter_constraint(constraint) # Dump everything into the builder's chroot. for tgt in source_tgts: dump_sources(builder, tgt, self.context.log) # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. self._python_native_code_settings.check_build_for_current_platform_only(self.context.targets()) dump_requirement_libs(builder, interpreter, req_tgts, self.context.log, platforms=binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) builder.build(pex_path) return pex_path
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" the other pexes via PEX_PATH.""" relevant_targets = self.context.targets(lambda tgt: isinstance( tgt, (PythonDistribution, PythonRequirementLibrary, PythonTarget, Files))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.realpath( os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): source_pexes = self.context.products.get_data( GatherSources.PythonSources).all() requirements_pex = self.context.products.get_data( ResolveRequirements.REQUIREMENTS_PEX) pexes = [requirements_pex] + source_pexes if self.extra_requirements(): extra_requirements_pex = self.resolve_requirement_strings( interpreter, self.extra_requirements()) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [extra_requirements_pex] + pexes constraints = { constraint for rt in relevant_targets if has_python_sources(rt) for constraint in rt.compatibility } self.merge_pexes(path, pex_info, interpreter, pexes, constraints) return WrappedPEX(PEX(path, interpreter), interpreter)
def collect_source_targets(target): if has_python_sources(target) or has_resources(target): targets.add(target)
def _build_source_pex(self, interpreter, path, targets): builder = PEXBuilder(path=path, interpreter=interpreter, copy=True) for target in targets: if has_python_sources(target): dump_sources(builder, target, self.context.log) builder.freeze()
def _build_source_pex(self, interpreter, path, targets): builder = PEXBuilder(path=path, interpreter=interpreter, copy=True) for target in targets: if has_python_sources(target): dump_sources(builder, target, self.context.log) builder.freeze()