def create_python_awslambda(self, addr: str) -> Tuple[str, bytes]: lambdex_setup = self.request_single_product( LambdexSetup, Params( PythonSetup.global_instance(), PythonNativeCode.global_instance(), SubprocessEnvironment.global_instance(), Lambdex.global_instance(), )) target = self.request_single_product(HydratedTarget, Address.parse(addr)) created_awslambda = self.request_single_product( CreatedAWSLambda, Params( target.adaptor, lambdex_setup, SourceRootConfig.global_instance(), PythonSetup.global_instance(), PythonNativeCode.global_instance(), SubprocessEnvironment.global_instance(), )) files_content = list( self.request_single_product(FilesContent, Params(created_awslambda.digest))) assert len(files_content) == 1 return created_awslambda.name, files_content[0].content
def test_generic_pex_creation(self) -> None: input_files_content = InputFilesContent(( FileContent(path='main.py', content=b'print("from main")'), FileContent(path='subdir/sub.py', content=b'print("from sub")'), )) input_files = self.request_single_product(Digest, input_files_content) pex_output = self.create_pex_and_get_all_data(entry_point='main', input_files=input_files) pex_files = pex_output['files'] self.assertTrue('pex' not in pex_files) self.assertTrue('main.py' in pex_files) self.assertTrue('subdir/sub.py' in pex_files) python_setup = PythonSetup.global_instance() env = { "PATH": create_path_env_var(python_setup.interpreter_search_paths) } pex = pex_output['pex'] req = ExecuteProcessRequest( argv=('python', 'test.pex'), env=env, input_files=pex.directory_digest, description="Run the pex and make sure it works") result = self.request_single_product(ExecuteProcessResult, req) self.assertEqual(result.stdout, b"from main\n")
def _resolve_requirements(self, target_roots, options=None): with temporary_dir() as cache_dir: options = options or {} options.setdefault(PythonSetup.options_scope, {})['interpreter_cache_dir'] = cache_dir context = self.context(target_roots=target_roots, options=options, for_subsystems=[PythonSetup, PythonRepos]) # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly, # to ensure that the interpreter has setuptools and wheel support. interpreter = PythonInterpreter.get() interpreter_cache = PythonInterpreterCache( PythonSetup.global_instance(), PythonRepos.global_instance(), logger=context.log.debug) interpreters = interpreter_cache.setup( paths=[os.path.dirname(interpreter.binary)], filters=[str(interpreter.identity.requirement)]) context.products.get_data(PythonInterpreter, lambda: interpreters[0]) task = self.create_task(context) task.execute() return context.products.get_data( ResolveRequirements.REQUIREMENTS_PEX)
def resolve_requirement_strings(self, interpreter, requirement_strings): """Resolve a list of pip-style requirement strings.""" requirement_strings = sorted(requirement_strings) if len(requirement_strings) == 0: req_strings_id = 'no_requirements' elif len(requirement_strings) == 1: req_strings_id = requirement_strings[0] else: req_strings_id = hash_all(requirement_strings) path = os.path.realpath( os.path.join(self.workdir, str(interpreter.identity), req_strings_id)) if not os.path.isdir(path): reqs = [ PythonRequirement(req_str) for req_str in requirement_strings ] with safe_concurrent_creation(path) as safe_path: pex_builder = PexBuilderWrapper( PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) pex_builder.add_resolved_requirements(reqs) pex_builder.freeze() return PEX(path, interpreter=interpreter)
def run_flake8( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[Sequence[str]] = None, interpreter_constraints: Optional[Sequence[str]] = None, skip: bool = False, ) -> LintResult: if config is not None: self.create_file(relpath=".flake8", contents=config) input_snapshot = self.request_single_product( Snapshot, InputFilesContent(source_files)) target = Flake8Target( PythonTargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), compatibility=interpreter_constraints, )) flake8_subsystem = global_subsystem_instance( Flake8, options={ Flake8.options_scope: { "config": ".flake8" if config else None, "args": passthrough_args or [], "skip": skip, } }) return self.request_single_product( LintResult, Params(target, flake8_subsystem, PythonNativeCode.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance()))
def tgt_closure_platforms(tgts): """ Aggregates a dict that maps a platform string to a list of targets that specify the platform. If no targets have platforms arguments, return a dict containing platforms inherited from the PythonSetup object. :param tgts: a list of :class:`Target` objects. :returns: a dict mapping a platform string to a list of targets that specify the platform. """ tgts_by_platforms = {} def insert_or_append_tgt_by_platform(tgt): if tgt.platforms: for platform in tgt.platforms: if platform in tgts_by_platforms: tgts_by_platforms[platform].append(tgt) else: tgts_by_platforms[platform] = [tgt] map(insert_or_append_tgt_by_platform, tgts) # If no targets specify platforms, inherit the default platforms. if not tgts_by_platforms: for platform in PythonSetup.global_instance().platforms: tgts_by_platforms[platform] = [ '(No target) Platform inherited from either the ' '--platforms option or a pants.ini file.' ] return tgts_by_platforms
def execute(self): python_tgts_and_reqs = self.context.targets( lambda tgt: isinstance(tgt, (PythonTarget, PythonRequirementLibrary)) ) if not python_tgts_and_reqs: return python_tgts = [tgt for tgt in python_tgts_and_reqs if isinstance(tgt, PythonTarget)] fs = PythonInterpreterFingerprintStrategy() with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check: if (PythonSetup.global_instance().interpreter_search_paths and PythonInterpreterCache.pex_python_paths()): self.context.log.warn("Detected both PEX_PYTHON_PATH and " "--python-setup-interpreter-search-paths. Ignoring " "--python-setup-interpreter-search-paths.") # If there are no relevant targets, we still go through the motions of selecting # an interpreter, to prevent downstream tasks from having to check for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter_path_file = self._interpreter_path_file(target_set_id) if not os.path.exists(interpreter_path_file): self._create_interpreter_path_file(interpreter_path_file, python_tgts) interpreter = self._get_interpreter(interpreter_path_file) self.context.products.register_data(PythonInterpreter, interpreter)
def dump_requirements(builder, interpreter, reqs, log, platforms=None): """Multi-platform dependency resolution for PEX files. :param builder: Dump the requirements into this builder. :param interpreter: The :class:`PythonInterpreter` to resolve requirements for. :param reqs: A list of :class:`PythonRequirement` to resolve. :param log: Use this logger. :param platforms: A list of :class:`Platform`s to resolve requirements for. Defaults to the platforms specified by PythonSetup. """ deduped_reqs = OrderedSet(reqs) find_links = OrderedSet() blacklist = PythonSetup.global_instance().resolver_blacklist for req in deduped_reqs: log.debug(' Dumping requirement: {}'.format(req)) if not (req.key in blacklist and interpreter.identity.matches(blacklist[req.key])): builder.add_requirement(req.requirement) if req.repository: find_links.add(req.repository) # Resolve the requirements into distributions. distributions = _resolve_multi(interpreter, deduped_reqs, platforms, find_links) locations = set() for platform, dists in distributions.items(): for dist in dists: if dist.location not in locations: log.debug(' Dumping distribution: .../{}'.format( os.path.basename(dist.location))) builder.add_distribution(dist) locations.add(dist.location)
def _resolve_multi(interpreter, requirements, platforms, find_links): """Multi-platform dependency resolution for PEX files. Returns a list of distributions that must be included in order to satisfy a set of requirements. That may involve distributions for multiple platforms. :param interpreter: The :class:`PythonInterpreter` to resolve for. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param platforms: A list of :class:`Platform`s to resolve for. :param find_links: Additional paths to search for source packages during resolution. :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed to satisfy the requirements on that platform. """ python_setup = PythonSetup.global_instance() python_repos = PythonRepos.global_instance() platforms = platforms or python_setup.platforms find_links = find_links or [] distributions = {} fetchers = python_repos.get_fetchers() fetchers.extend(Fetcher([path]) for path in find_links) for platform in platforms: requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir, str(interpreter.identity)) distributions[platform] = resolve( requirements=[req.requirement for req in requirements], interpreter=interpreter, fetchers=fetchers, platform=None if platform == 'current' else platform, context=python_repos.get_network_context(), cache=requirements_cache_dir, cache_ttl=python_setup.resolver_cache_ttl, allow_prereleases=python_setup.resolver_allow_prereleases) return distributions
def execute(self): python_tgts = self.context.targets( lambda tgt: isinstance(tgt, PythonTarget)) fs = PythonInterpreterFingerprintStrategy() with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check: if (PythonSetup.global_instance().interpreter_search_paths and PythonInterpreterCache.pex_python_paths()): self.context.log.warn( "Detected both PEX_PYTHON_PATH and " "--python-setup-interpreter-search-paths. Ignoring " "--python-setup-interpreter-search-paths.") # If there are no relevant targets, we still go through the motions of selecting # an interpreter, to prevent downstream tasks from having to check for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter_path_file = self._interpreter_path_file(target_set_id) if not os.path.exists(interpreter_path_file): self._create_interpreter_path_file(interpreter_path_file, python_tgts) interpreter = self._get_interpreter(interpreter_path_file) self.context.products.register_data(PythonInterpreter, interpreter)
def create_pex_and_get_pex_info( self, *, requirements=None, entry_point=None, interpreter_constraints=None ): def hashify_optional_collection(iterable): return tuple(sorted(iterable)) if iterable is not None else tuple() request = RequirementsPexRequest( output_filename="test.pex", requirements=hashify_optional_collection(requirements), interpreter_constraints=hashify_optional_collection(interpreter_constraints), entry_point=entry_point, ) requirements_pex = assert_single_element( self.scheduler.product_request(RequirementsPex, [Params( request, PythonSetup.global_instance(), PythonNativeCode.global_instance() )]) ) with temporary_dir() as tmp_dir: self.scheduler.materialize_directories(( DirectoryToMaterialize(path=tmp_dir, directory_digest=requirements_pex.directory_digest), )) with zipfile.ZipFile(os.path.join(tmp_dir, "test.pex"), "r") as pex: with pex.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() return json.loads(pex_info_content)
def subsystem_dependencies(cls): return super(ResolveRequirementsTaskBase, cls).subsystem_dependencies() + ( PexBuilderWrapper.Factory, PythonNativeCode.scoped(cls), PythonSetup.scoped(cls), )
def create(cls, builder, log=None): log = log or logging.getLogger(__name__) return PexBuilderWrapper( builder=builder, python_repos_subsystem=PythonRepos.global_instance(), python_setup_subsystem=PythonSetup.global_instance(), log=log)
def tgt_closure_platforms(tgts): """ Aggregates a dict that maps a platform string to a list of targets that specify the platform. If no targets have platforms arguments, return a dict containing platforms inherited from the PythonSetup object. :param tgts: a list of :class:`Target` objects. :returns: a dict mapping a platform string to a list of targets that specify the platform. """ tgts_by_platforms = {} def insert_or_append_tgt_by_platform(tgt): if tgt.platforms: for platform in tgt.platforms: if platform in tgts_by_platforms: tgts_by_platforms[platform].append(tgt) else: tgts_by_platforms[platform] = [tgt] map(insert_or_append_tgt_by_platform, tgts) # If no targets specify platforms, inherit the default platforms. if not tgts_by_platforms: for platform in PythonSetup.global_instance().platforms: tgts_by_platforms[platform] = ['(No target) Platform inherited from either the ' '--platforms option or a pants.ini file.'] return tgts_by_platforms
def create_pex_and_get_all_data( self, *, requirements=PexRequirements(), entry_point=None, interpreter_constraints=PexInterpreterConstraints(), input_files: Digest = None) -> (Dict, List[str]): def hashify_optional_collection(iterable): return tuple(sorted(iterable)) if iterable is not None else tuple() request = CreatePex( output_filename="test.pex", requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=entry_point, input_files_digest=input_files, ) requirements_pex = self.request_single_product( Pex, Params(request, PythonSetup.global_instance(), SubprocessEnvironment.global_instance(), PythonNativeCode.global_instance())) self.scheduler.materialize_directory( DirectoryToMaterialize(requirements_pex.directory_digest), ) with zipfile.ZipFile(os.path.join(self.build_root, "test.pex"), "r") as pex: with pex.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() pex_list = pex.namelist() return { 'pex': requirements_pex, 'info': json.loads(pex_info_content), 'files': pex_list }
def dump_requirements(builder, interpreter, reqs, log, platforms=None): """Multi-platform dependency resolution for PEX files. :param builder: Dump the requirements into this builder. :param interpreter: The :class:`PythonInterpreter` to resolve requirements for. :param reqs: A list of :class:`PythonRequirement` to resolve. :param log: Use this logger. :param platforms: A list of :class:`Platform`s to resolve requirements for. Defaults to the platforms specified by PythonSetup. """ deduped_reqs = OrderedSet(reqs) find_links = OrderedSet() blacklist = PythonSetup.global_instance().resolver_blacklist for req in deduped_reqs: log.debug(' Dumping requirement: {}'.format(req)) if not (req.key in blacklist and interpreter.identity.matches(blacklist[req.key])): builder.add_requirement(req.requirement) if req.repository: find_links.add(req.repository) # Resolve the requirements into distributions. distributions = _resolve_multi(interpreter, deduped_reqs, platforms, find_links) locations = set() for platform, dists in distributions.items(): for dist in dists: if dist.location not in locations: log.debug(' Dumping distribution: .../{}'.format(os.path.basename(dist.location))) builder.add_distribution(dist) locations.add(dist.location)
def test_setuptools_version(self): self.create_file('src/python/foo/__init__.py') self.create_python_library( relpath='src/python/foo/commands', name='commands', source_contents_map={ 'print_sys_path.py': dedent(""" import os import sys from setuptools import Command class PrintSysPath(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): with open(os.path.join(os.path.dirname(__file__), 'sys_path.txt'), 'w') as fp: fp.write(os.linesep.join(sys.path)) """) }, ) foo = self.create_python_library( relpath='src/python/foo', name='foo', dependencies=[ 'src/python/foo/commands', ], provides=dedent(""" setup_py( name='foo', version='0.0.0', ) """) ) self.set_options(run='print_sys_path') # Make sure setup.py can see our custom distutils Command 'print_sys_path'. sdist_srcdir = os.path.join(self.distdir, 'foo-0.0.0', 'src') with environment_as(PYTHONPATH=sdist_srcdir): with self.run_execute(foo): with open(os.path.join(sdist_srcdir, 'foo', 'commands', 'sys_path.txt'), 'r') as fp: def assert_extra(name, expected_version): package = Package.from_href(fp.readline().strip()) self.assertEqual(name, package.name) self.assertEqual(expected_version, package.raw_version) # The 1st two elements of the sys.path should be our custom SetupPyRunner Installer's # setuptools and wheel mixins, which should match the setuptools and wheel versions # specified by the PythonSetup subsystem. init_subsystem(PythonSetup) python_setup = PythonSetup.global_instance() assert_extra('setuptools', python_setup.setuptools_version) assert_extra('wheel', python_setup.wheel_version)
def test_setuptools_version(self): self.create_file('src/python/foo/__init__.py') self.create_python_library( relpath='src/python/foo/commands', name='commands', source_contents_map={ 'print_sys_path.py': dedent(""" import os import sys from setuptools import Command class PrintSysPath(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): with open(os.path.join(os.path.dirname(__file__), 'sys_path.txt'), 'w') as fp: fp.write(os.linesep.join(sys.path)) """) }, ) foo = self.create_python_library( relpath='src/python/foo', name='foo', dependencies=[ 'src/python/foo/commands', ], provides=dedent(""" setup_py( name='foo', version='0.0.0', ) """) ) self.set_options(run='print_sys_path') # Make sure setup.py can see our custom distutils Command 'print_sys_path'. sdist_srcdir = os.path.join(self.distdir, 'foo-0.0.0', 'src') with environment_as(PYTHONPATH=sdist_srcdir): with self.run_execute(foo): with open(os.path.join(sdist_srcdir, 'foo', 'commands', 'sys_path.txt')) as fp: def assert_extra(name, expected_version): package = Package.from_href(fp.readline().strip()) self.assertEqual(name, package.name) self.assertEqual(expected_version, package.raw_version) # The 1st two elements of the sys.path should be our custom SetupPyRunner Installer's # setuptools and wheel mixins, which should match the setuptools and wheel versions # specified by the PythonSetup subsystem. init_subsystem(PythonSetup) python_setup = PythonSetup.global_instance() assert_extra('setuptools', python_setup.setuptools_version) assert_extra('wheel', python_setup.wheel_version)
def create_from_adaptors( cls, adaptors: Tuple[PythonTargetAdaptor, ...], python_setup: PythonSetup) -> 'PexInterpreterContraints': interpreter_constraints = frozenset( constraint for target_adaptor in adaptors for constraint in python_setup.compatibility_or_constraints( getattr(target_adaptor, 'compatibility', None))) return PexInterpreterContraints(constraint_set=interpreter_constraints)
def execute(self): """"Run Checkstyle on all found non-synthetic source files.""" python_tgts = self.context.targets( lambda tgt: isinstance(tgt, (PythonTarget))) if not python_tgts: return 0 interpreter_cache = PythonInterpreterCache( PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug) with self.invalidated(self.get_targets( self._is_checked)) as invalidation_check: failure_count = 0 tgts_by_compatibility, _ = interpreter_cache.partition_targets_by_compatibility( [vt.target for vt in invalidation_check.invalid_vts]) for filters, targets in tgts_by_compatibility.items(): if self.get_options( ).interpreter_constraints_whitelist is None and not self._constraints_are_whitelisted( filters): deprecated_conditional( lambda: self.get_options( ).interpreter_constraints_whitelist is None, '1.14.0.dev2', "Python linting is currently restricted to targets that match the global " "interpreter constraints: {}. Pants detected unacceptable filters: {}. " "Use the `--interpreter-constraints-whitelist` lint option to whitelist " "compatibiltiy constraints.".format( PythonSetup.global_instance(). interpreter_constraints, filters)) else: sources = self.calculate_sources([tgt for tgt in targets]) if sources: allowed_interpreters = set( interpreter_cache.setup(filters=filters)) if not allowed_interpreters: raise TaskError( 'No valid interpreters found for targets: {}\n(filters: {})' .format(targets, filters)) interpreter = min(allowed_interpreters) failure_count += self.checkstyle(interpreter, sources) if failure_count > 0 and self.get_options().fail: raise TaskError( '{} Python Style issues found. You may try `./pants fmt <targets>`' .format(failure_count)) return failure_count
def create_from_adaptors(cls, adaptors: Iterable[PythonTargetAdaptor], python_setup: PythonSetup) -> 'PexInterpreterConstraints': interpreter_constraints = { constraint for target_adaptor in adaptors for constraint in python_setup.compatibility_or_constraints( getattr(target_adaptor, 'compatibility', None) ) } return PexInterpreterConstraints(constraint_set=tuple(sorted(interpreter_constraints)))
def _build_source_pex(self, interpreter, path, targets): pex_builder = PexBuilderWrapper( PEXBuilder(path=path, interpreter=interpreter, copy=True), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) for target in targets: if has_python_sources(target): pex_builder.add_sources_from(target) pex_builder.freeze()
def _acceptable_interpreter_constraints(self): default_constraints = PythonSetup.global_instance().interpreter_constraints whitelisted_constraints = self.get_options().interpreter_constraints_whitelist # The user wants to lint everything. if whitelisted_constraints == []: return [] # The user did not pass a whitelist option. elif whitelisted_constraints is None: whitelisted_constraints = () return [version.parse(v) for v in default_constraints + whitelisted_constraints]
def _interpreter_cache(self): interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug) # Cache setup's requirement fetching can hang if run concurrently by another pants proc. self.context.acquire_lock() try: interpreter_cache.setup() finally: self.context.release_lock() return interpreter_cache
def create(cls, builder, log=None): options = cls.global_instance().get_options() setuptools_requirement = f'setuptools=={options.setuptools_version}' log = log or logging.getLogger(__name__) return PexBuilderWrapper(builder=builder, python_repos_subsystem=PythonRepos.global_instance(), python_setup_subsystem=PythonSetup.global_instance(), setuptools_requirement=PythonRequirement(setuptools_requirement), log=log)
def create(cls, builder, log=None): options = cls.global_instance().get_options() setuptools_requirement = 'setuptools=={}'.format(options.setuptools_version) log = log or logging.getLogger(__name__) return PexBuilderWrapper(builder=builder, python_repos_subsystem=PythonRepos.global_instance(), python_setup_subsystem=PythonSetup.global_instance(), setuptools_requirement=PythonRequirement(setuptools_requirement), log=log)
def _build_tool_pex(self, context, interpreter, pex_path, requirements_lib): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper( PEXBuilder(path=chroot, interpreter=interpreter), PythonRepos.global_instance(), PythonSetup.global_instance(), context.log) pex_builder.add_requirement_libs_from(req_libs=[requirements_lib]) pex_builder.set_entry_point( self._tool_subsystem().get_entry_point()) pex_builder.freeze()
def create_chroot(self, interpreter, builder, targets, platforms, extra_requirements): return PythonChroot(python_setup=PythonSetup.global_instance(), python_repos=PythonRepos.global_instance(), ivy_bootstrapper=self.ivy_bootstrapper, thrift_binary_factory=self.thrift_binary_factory, interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements, log=self.context.log)
def _create_interpreter_path_file(self, interpreter_path_file, targets): interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug) interpreter = interpreter_cache.select_interpreter_for_targets(targets) safe_mkdir_for(interpreter_path_file) with open(interpreter_path_file, 'w') as outfile: outfile.write(b'{}\n'.format(interpreter.binary)) for dist, location in interpreter.extras.items(): dist_name, dist_version = dist outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV') if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper( PEXBuilder(path=chroot, interpreter=interpreter), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint( str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [ tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary) ] pex_builder.add_requirement_libs_from( req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve( [Requirement.parse(self._CHECKER_REQ)]): pex_builder.add_direct_requirements( dist.requires()) pex_builder.add_distribution(dist) pex_builder.add_direct_requirements( [self._CHECKER_REQ]) except DistributionNotFound: # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)
def test_expand_interpreter_search_paths(self): with environment_as(PATH='/env/path1:/env/path2'): with setup_pexrc_with_pex_python_path(['/pexrc/path1:/pexrc/path2']): with fake_pyenv_root(['2.7.14', '3.5.5']) as (pyenv_root, expected_pyenv_paths): paths = ['/foo', '<PATH>', '/bar', '<PEXRC>', '/baz', '<PYENV>', '/qux'] expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, pyenv_root_func=lambda: pyenv_root) expected = ['/foo', '/env/path1', '/env/path2', '/bar', '/pexrc/path1', '/pexrc/path2', '/baz'] + expected_pyenv_paths + ['/qux'] self.assertListEqual(expected, expanded_paths)
def test_empty_target_succeeds(self) -> None: # NB: Because this particular edge case should early return, we can avoid providing valid # mocked yield gets for most of the rule's body. Future tests added to this file will need to # provide valid mocks instead. unimplemented_mock = lambda _: NotImplemented target = PythonTestsAdaptor( address=BuildFileAddress(target_name="target", rel_path="test")) result: TestResult = run_rule( run_python_test, rule_args=[ target, PyTest.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance(), ], mock_gets=[ MockGet(product_type=TransitiveHydratedTargets, subject_type=BuildFileAddresses, mock=lambda _: TransitiveHydratedTargets(roots=(), closure=())), MockGet( product_type=SourceRootStrippedSources, subject_type=Address, mock=lambda _: SourceRootStrippedSources(snapshot= EMPTY_SNAPSHOT), ), MockGet( product_type=SourceRootStrippedSources, subject_type=HydratedTarget, mock=unimplemented_mock, ), MockGet( product_type=Digest, subject_type=DirectoriesToMerge, mock=unimplemented_mock, ), MockGet( product_type=InjectedInitDigest, subject_type=Digest, mock=unimplemented_mock, ), MockGet( product_type=Pex, subject_type=CreatePex, mock=unimplemented_mock, ), MockGet( product_type=FallibleExecuteProcessResult, subject_type=ExecuteProcessRequest, mock=unimplemented_mock, ), ], ) self.assertEqual(result.status, Status.SUCCESS)
def build_isort_pex(cls, context, interpreter, pex_path, requirements_lib): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper( PEXBuilder(path=chroot, interpreter=interpreter), PythonRepos.global_instance(), PythonSetup.global_instance(), context.log) pex_builder.add_requirement_libs_from( req_libs=[requirements_lib]) pex_builder.set_script('isort') pex_builder.freeze()
def run_black( self, source_files: List[FileContent], *, config: Optional[str] = None, passthrough_args: Optional[Sequence[str]] = None, ) -> Tuple[LintResult, FmtResult]: if config is not None: self.create_file(relpath="pyproject.toml", contents=config) input_snapshot = self.request_single_product( Snapshot, InputFilesContent(source_files)) target = FormattablePythonTarget( TargetAdaptor( sources=EagerFilesetWithSpec('test', {'globs': []}, snapshot=input_snapshot), address=Address.parse("test:target"), )) black_subsystem = global_subsystem_instance( Black, options={ Black.options_scope: { "config": "pyproject.toml" if config else None, "args": passthrough_args or [], } }) black_setup = self.request_single_product( BlackSetup, Params( black_subsystem, PythonNativeCode.global_instance(), PythonSetup.global_instance(), SubprocessEnvironment.global_instance(), )) fmt_and_lint_params = Params(target, black_setup, PythonSetup.global_instance(), SubprocessEnvironment.global_instance()) lint_result = self.request_single_product(LintResult, fmt_and_lint_params) fmt_result = self.request_single_product(FmtResult, fmt_and_lint_params) return lint_result, fmt_result
def test_setup_using_eggs(self): def link_egg(repo_root, requirement): existing_dist_location = self._interpreter.get_location(requirement) if existing_dist_location is not None: existing_dist = Package.from_href(existing_dist_location) requirement = '{}=={}'.format(existing_dist.name, existing_dist.raw_version) distributions = resolve([requirement], interpreter=self._interpreter, precedence=(EggPackage, SourcePackage)) self.assertEqual(1, len(distributions)) dist_location = distributions[0].location self.assertRegexpMatches(dist_location, r'\.egg$') os.symlink(dist_location, os.path.join(repo_root, os.path.basename(dist_location))) return Package.from_href(dist_location).raw_version with temporary_dir() as root: egg_dir = os.path.join(root, 'eggs') os.makedirs(egg_dir) setuptools_version = link_egg(egg_dir, 'setuptools') wheel_version = link_egg(egg_dir, 'wheel') interpreter_requirement = self._interpreter.identity.requirement self.context(for_subsystems=[PythonSetup, PythonRepos], options={ PythonSetup.options_scope: { 'interpreter_cache_dir': None, 'pants_workdir': os.path.join(root, 'workdir'), 'constraints': [interpreter_requirement], 'setuptools_version': setuptools_version, 'wheel_version': wheel_version, }, PythonRepos.options_scope: { 'indexes': [], 'repos': [egg_dir], } }) cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance()) interpereters = cache.setup(paths=[os.path.dirname(self._interpreter.binary)], filters=[str(interpreter_requirement)]) self.assertGreater(len(interpereters), 0) def assert_egg_extra(interpreter, name, version): location = interpreter.get_location('{}=={}'.format(name, version)) self.assertIsNotNone(location) self.assertIsInstance(Package.from_href(location), EggPackage) for interpreter in interpereters: assert_egg_extra(interpreter, 'setuptools', setuptools_version) assert_egg_extra(interpreter, 'wheel', wheel_version)
def _interpreter_cache(self): interpreter_cache = PythonInterpreterCache( PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug) # Cache setup's requirement fetching can hang if run concurrently by another pants proc. self.context.acquire_lock() try: interpreter_cache.setup() finally: self.context.release_lock() return interpreter_cache
def _create_interpreter_path_file(self, interpreter_path_file, targets): interpreter_cache = PythonInterpreterCache( PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug) interpreter = interpreter_cache.select_interpreter_for_targets(targets) safe_mkdir_for(interpreter_path_file) with open(interpreter_path_file, 'w') as outfile: outfile.write(b'{}\n'.format(interpreter.binary)) for dist, location in interpreter.extras.items(): dist_name, dist_version = dist outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" the other pexes via PEX_PATH.""" relevant_targets = self.context.targets(lambda tgt: isinstance( tgt, (PythonDistribution, PythonRequirementLibrary, PythonTarget, Files))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.realpath( os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): pexes = [ self.context.products.get_data( ResolveRequirements.REQUIREMENTS_PEX), self.context.products.get_data( GatherSources.PYTHON_SOURCES) ] if self.extra_requirements(): extra_requirements_pex = self.resolve_requirement_strings( interpreter, self.extra_requirements()) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [extra_requirements_pex] + pexes constraints = { constraint for rt in relevant_targets if is_python_target(rt) for constraint in PythonSetup.global_instance(). compatibility_or_constraints(rt) } with self.merged_pex(path, pex_info, interpreter, pexes, constraints) as builder: for extra_file in self.extra_files(): extra_file.add_to(builder) builder.freeze() return PEX(path, interpreter)
def dump_requirement_libs(builder, interpreter, req_libs, log, platforms=None): """Multi-platform dependency resolution for PEX files. :param builder: Dump the requirements into this builder. :param interpreter: The :class:`PythonInterpreter` to resolve requirements for. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :param log: Use this logger. :param platforms: A list of :class:`Platform`s to resolve requirements for. Defaults to the platforms specified by PythonSetup. """ deprecated( '1.11.0.dev0', 'This function has been moved onto the PexBuilderWrapper class.') PexBuilderWrapper(builder, PythonRepos.global_instance(), PythonSetup.global_instance(), log).add_requirement_libs_from(req_libs, platforms)
def _gather_sources(self, target_roots): context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos]) # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly, # to ensure that the interpreter has setuptools and wheel support. interpreter = PythonInterpreter.get() interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance(), logger=context.log.debug) interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)], filters=[str(interpreter.identity.requirement)]) context.products.get_data(PythonInterpreter, lambda: interpreters[0]) task = self.create_task(context) task.execute() return context.products.get_data(GatherSources.PYTHON_SOURCES)
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" the other pexes via PEX_PATH.""" relevant_targets = self.context.targets( lambda tgt: isinstance(tgt, ( PythonDistribution, PythonRequirementLibrary, PythonTarget, Files))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): pexes = [ self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX), self.context.products.get_data(GatherSources.PYTHON_SOURCES) ] if self.extra_requirements(): extra_requirements_pex = self.resolve_requirement_strings( interpreter, self.extra_requirements()) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [extra_requirements_pex] + pexes constraints = {constraint for rt in relevant_targets if is_python_target(rt) for constraint in PythonSetup.global_instance().compatibility_or_constraints(rt)} with self.merged_pex(path, pex_info, interpreter, pexes, constraints) as builder: for extra_file in self.extra_files(): extra_file.add_to(builder) builder.freeze() return PEX(path, interpreter)
def test_get_pex_python_paths(self): with setup_pexrc_with_pex_python_path(['foo/bar', 'baz', '/qux/quux']): paths = PythonSetup.get_pex_python_paths() self.assertListEqual(['foo/bar', 'baz', '/qux/quux'], paths)
def test_get_pyenv_paths(self): with fake_pyenv_root(['2.7.14', '3.5.5']) as (pyenv_root, expected_paths): paths = PythonSetup.get_pyenv_paths(pyenv_root_func=lambda: pyenv_root) self.assertListEqual(expected_paths, paths)
def _interpreter_cache(self): return PythonInterpreterCache( PythonSetup.global_instance(), PythonRepos.global_instance(), logger=self.context.log.debug )
def test_get_environment_paths(self): with environment_as(PATH='foo/bar:baz:/qux/quux'): paths = PythonSetup.get_environment_paths() self.assertListEqual(['foo/bar', 'baz', '/qux/quux'], paths)
def _python_setup(self): return PythonSetup.scoped_instance(self)
def subsystem_dependencies(cls): return super(PythonNativeCode, cls).subsystem_dependencies() + ( NativeToolchain.scoped(cls), PythonSetup.scoped(cls), )
def python_setup(self): return PythonSetup.global_instance()
def chroot_cache_dir(self): return PythonSetup.global_instance().chroot_cache_dir
def _compatible_interpreter(self, unpacked_whls): constraints = PythonSetup.global_instance().compatibility_or_constraints(unpacked_whls.compatibility) allowable_interpreters = PythonInterpreterCache.global_instance().setup(filters=constraints) return min(allowable_interpreters)