def base_requirements(self): return [ PythonRequirement('setuptools=={}'.format( self.get_options().setuptools_version)), PythonRequirement('wheel=={}'.format( self.get_options().wheel_version)), ]
def base_requirements(self): # TODO: would we ever want to configure these requirement versions separately from the global # PythonSetup values? return [ PythonRequirement('setuptools=={}'.format(self.python_setup.setuptools_version)), PythonRequirement('wheel=={}'.format(self.python_setup.wheel_version)), ]
def test_python_requirements_field(self): req1 = PythonRequirement('foo==1.0') req2 = PythonRequirement('bar==1.0') self.assertNotEqual( PythonRequirementsField([req1]).fingerprint(), PythonRequirementsField([req2]).fingerprint(), )
def test_python_requirements_field_version_filter(self): """version_filter is a lambda and can't be hashed properly. Since in practice this is only ever used to differentiate between py3k and py2, it should use a tuple of strings or even just a flag instead. """ req1 = PythonRequirement('foo==1.0', version_filter=lambda py, pl: False) req2 = PythonRequirement('foo==1.0') self.assertEqual( PythonRequirementsField([req1]).fingerprint(), PythonRequirementsField([req2]).fingerprint(), )
def __call__(self, name=None, dist=None): """ :param string name: The name to use for the target, defaults to the dist name if specified and otherwise the parent dir name. :param string dist: The pants dist to create a requirement for. This must be a 'pantsbuild.pants*' distribution; eg: 'pantsbuild.pants.contrib.python.checks'. """ name = name or dist or os.path.basename(self._parse_context.rel_path) dist = dist or 'pantsbuild.pants' if not (dist == 'pantsbuild.pants' or dist.startswith('pantsbuild.pants.')): target = Address(spec_path=self._parse_context.rel_path, target_name=name) raise TargetDefinitionException( target=target, msg='The {} target only works for pantsbuild.pants ' 'distributions, given {}'.format(self.alias, dist)) # TODO(John Sirois): Modify to constraint to >=3.5,<4 as part of # https://github.com/pantsbuild/pants/issues/6062 env_marker = "python_version>='2.7' and python_version<'3'" requirement = PythonRequirement( requirement="{key}=={version} ; {env_marker}".format( key=dist, version=pants_version(), env_marker=env_marker)) self._parse_context.create_object('python_requirement_library', name=name, requirements=[requirement])
def __call__(self, name=None, dist=None): """ :param string name: The name to use for the target, defaults to the dist name if specified and otherwise the parent dir name. :param string dist: The pants dist to create a requirement for. This must be a 'pantsbuild.pants*' distribution; eg: 'pantsbuild.pants.contrib.python.checks'. """ name = name or dist or os.path.basename(self._parse_context.rel_path) dist = dist or 'pantsbuild.pants' if not (dist == 'pantsbuild.pants' or dist.startswith('pantsbuild.pants.')): target = Address(spec_path=self._parse_context.rel_path, target_name=name) raise TargetDefinitionException( target=target, msg='The {} target only works for pantsbuild.pants ' 'distributions, given {}'.format(self.alias, dist)) requirement = PythonRequirement(requirement="{key}=={version}".format( key=dist, version=pants_version())) self._parse_context.create_object('python_requirement_library', name=name, requirements=[requirement])
def _test_runner(self, targets, workunit): interpreter = self.select_interpreter_for_targets(targets) pex_info = PexInfo.default() pex_info.entry_point = 'pytest' # We hard-code the requirements here because they can't be upgraded without # major changes to this code, and the PyTest subsystem now contains the versions # for the new PytestRun task. This one is about to be deprecated anyway. testing_reqs = [ PythonRequirement(s) for s in [ 'pytest>=2.6,<2.7', 'pytest-timeout<1.0.0', 'pytest-cov>=1.8,<1.9', 'unittest2>=0.6.0,<=1.9.0', ] ] chroot = self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=('current', ), extra_requirements=testing_reqs) pex = chroot.pex() with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def assert_pants_requirement(self, python_requirement_library, expected_dist='pantsbuild.pants'): self.assertIsInstance(python_requirement_library, PythonRequirementLibrary) expected = PythonRequirement('{key}=={version}'.format( key=expected_dist, version=pants_version())) def key(python_requirement): return (python_requirement.requirement.key, python_requirement.requirement.specs, python_requirement.requirement.extras) self.assertEqual([key(expected)], [ key(pr) for pr in python_requirement_library.payload.requirements ]) req = list(python_requirement_library.payload.requirements)[0] self.assertIsNotNone(req.requirement.marker) self.assertTrue( req.requirement.marker.evaluate(), 'pants_requirement() should always work in the current test environment' ) self.assertFalse( req.requirement.marker.evaluate({'python_version': '3.5'})) self.assertFalse( req.requirement.marker.evaluate({'python_version': '2.6'})) self.assertTrue( req.requirement.marker.evaluate({'python_version': '2.7'}))
def setUp(self): super(PythonDependenciesTests, self).setUp() python_leaf = self.make_target( 'dependencies:python_leaf', target_type=PythonLibrary, sources=[], ) python_inner = self.make_target( 'dependencies:python_inner', target_type=PythonLibrary, sources=[], dependencies=[ python_leaf, ], ) python_inner_with_external = self.make_target( 'dependencies:python_inner_with_external', target_type=PythonRequirementLibrary, requirements=[PythonRequirement("antlr_python_runtime==3.1.3")], ) self.make_target( 'dependencies:python_root', target_type=PythonLibrary, sources=[], dependencies=[ python_inner, python_inner_with_external, ], )
def _assert_unpacking(self, module_name): # TODO: figure out how to generate a nice fake wheel that the pex resolve will accept instead of # depending on a real wheel! pex_requirement = PythonRequirement('pex==1.5.3') unpacked_wheel_tgt = self._make_unpacked_wheel( pex_requirement, include_patterns=['pex/pex.py', 'pex/__init__.py'], module_name=module_name, # TODO: `within_data_subdir` is only tested implicitly by the tensorflow_custom_op target # in examples/! Make a fake wheel, resolve it, and test that `within_data_subdir` # descends into the correct directory! within_data_subdir=None) context = self.context(target_roots=[unpacked_wheel_tgt]) unpack_task = self.create_task(context) unpack_task.execute() expected_files = {'pex/__init__.py', 'pex/pex.py'} with unpack_task.invalidated([unpacked_wheel_tgt ]) as invalidation_check: vt = assert_single_element(invalidation_check.all_vts) self.assertEqual(vt.target, unpacked_wheel_tgt) archives = context.products.get_data(UnpackedArchives, dict)[vt.target] self.assertEqual(expected_files, set(archives.found_files))
def assert_pants_requirement(self, python_requirement_library, expected_dist='pantsbuild.pants'): self.assertIsInstance(python_requirement_library, PythonRequirementLibrary) expected = PythonRequirement('{key}=={version}'.format( key=expected_dist, version=pants_version())) def key(python_requirement): return (python_requirement.requirement.key, python_requirement.requirement.specs, python_requirement.requirement.extras) self.assertEqual([key(expected)], [ key(pr) for pr in python_requirement_library.payload.requirements ]) req = list(python_requirement_library.payload.requirements)[0] self.assertIsNotNone(req.requirement.marker) def evaluate_version(version): return req.requirement.marker.evaluate({'python_version': version}) self.assertTrue(evaluate_version('2.7')) self.assertFalse( all( evaluate_version(v) for v in ('2.6', '3.4', '3.5', '3.6', '3.7')))
def resolve_requirement_strings(self, interpreter, requirement_strings): """Resolve a list of pip-style requirement strings.""" requirement_strings = sorted(requirement_strings) if len(requirement_strings) == 0: req_strings_id = 'no_requirements' elif len(requirement_strings) == 1: req_strings_id = requirement_strings[0] else: req_strings_id = hash_all(requirement_strings) path = os.path.realpath( os.path.join(self.workdir, str(interpreter.identity), req_strings_id)) if not os.path.isdir(path): reqs = [ PythonRequirement(req_str) for req_str in requirement_strings ] with safe_concurrent_creation(path) as safe_path: pex_builder = PexBuilderWrapper( PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) pex_builder.add_resolved_requirements(reqs) pex_builder.freeze() return PEX(path, interpreter=interpreter)
def execute(self, **pex_run_kwargs): (accept_predicate, reject_predicate) = Target.lang_discriminator('python') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: # We can't throw if the target isn't a python target, because perhaps we were called on a # JVM target, in which case we have to no-op and let scala repl do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point for req in self.get_options().ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' pex_info = PexInfo.default() pex_info.entry_point = entry_point with self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=None, extra_requirements=extra_requirements) as chroot: pex = chroot.pex() self.context.release_lock() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='run', labels=[WorkUnitLabel.RUN]): po = pex.run(blocking=False, **pex_run_kwargs) try: return po.wait() except KeyboardInterrupt: pass
def PythonEgg(glob, name=None): """Refers to pre-built Python eggs in the file system. (To instead fetch eggs in a ``pip``/``easy_install`` way, use ``python_requirement``) E.g., ``egg(name='foo', glob='foo-0.1-py2.6.egg')`` would pick up the file ``foo-0.1-py2.6.egg`` from the ``BUILD`` file's directory; targets could depend on it by name ``foo``. :param string glob: File glob pattern. :param string name: Target name; by default uses the egg's project name. """ # TODO(John Sirois): Rationalize with globs handling in ParseContext eggs = fsglob(ParseContext.path(glob)) requirements = set() for egg in eggs: if os.path.isdir(egg): metadata = PathMetadata(egg, os.path.join(egg, 'EGG-INFO')) else: metadata = EggMetadata(zipimporter(egg)) dist = Distribution.from_filename(egg, metadata=metadata) requirements.add(dist.as_requirement()) if len(requirements) > 1: raise ValueError( 'Got multiple egg versions! => {}'.format(requirements)) return PythonRequirement(str(requirements.pop()), name=name)
def test_antlr(self): SourceRoot.register('src/antlr', PythonThriftLibrary) self.create_file(relpath='src/antlr/word/word.g', contents=dedent(""" grammar word; options { language=Python; output=AST; } WORD: ('a'..'z'|'A'..'Z'|'!')+; word_up: WORD (' ' WORD)*; """)) antlr_target = self.make_target(spec='src/antlr/word', target_type=PythonAntlrLibrary, antlr_version='3.1.3', sources=['word.g'], module='word') SourceRoot.register('src/python', PythonBinary) antlr3 = self.make_target( spec='3rdparty/python:antlr3', target_type=PythonRequirementLibrary, requirements=[PythonRequirement('antlr_python_runtime==3.1.3')]) self.create_file(relpath='src/python/test/main.py', contents=dedent(""" import antlr3 from word import wordLexer, wordParser def word_up(): input = 'Hello World!' char_stream = antlr3.ANTLRStringStream(input) lexer = wordLexer.wordLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) parser = wordParser.wordParser(tokens) def print_node(node): print(node.text) visitor = antlr3.tree.TreeVisitor() visitor.visit(parser.word_up().tree, pre_action=print_node) """)) binary = self.make_target(spec='src/python/test', target_type=PythonBinary, source='main.py', dependencies=[antlr_target, antlr3]) with self.dumped_chroot([binary]) as (pex_builder, python_chroot): pex_builder.set_entry_point('test.main:word_up') pex_builder.freeze() pex = python_chroot.pex() process = pex.run(blocking=False, stdout=subprocess.PIPE) stdout, _ = process.communicate() self.assertEqual(0, process.returncode) self.assertEqual(['Hello', ' ', 'World!'], stdout.splitlines())
def __call__(self, name=None): """ :param string name: The name to use for the target, defaults to the parent dir name. """ name = name or os.path.basename(self._parse_context.rel_path) requirement = PythonRequirement(requirement='pantsbuild.pants=={}'.format(pants_version())) self._parse_context.create_object(PythonRequirementLibrary, name=name, requirements=[requirement])
def test_bad_libraries_ref(self): self.make_target(':right-type', PythonRequirementLibrary, requirements=[ PythonRequirement('foo==123'), ]) # Making a target which is not a requirement library, which causes an error. self.make_target(':wrong-type', UnpackedWheels, libraries=[':right-type'], module_name='foo') target = self.make_target(':foo', UnpackedWheels, libraries=[':wrong-type'], module_name='foo') with self.assertRaises(ImportWheelsMixin.WrongTargetTypeError): target.imported_targets
def assert_pants_requirement(self, python_requirement_library): self.assertIsInstance(python_requirement_library, PythonRequirementLibrary) pants_requirement = PythonRequirement('pantsbuild.pants=={}'.format( pants_version())) self.assertEqual( [pants_requirement.requirement], list(pr.requirement for pr in python_requirement_library.payload.requirements))
def _build_tool_pex(self, tool_subsystem, interpreter, pex_path): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=chroot, interpreter=interpreter), log=self.context.log) reqs = [PythonRequirement(r) for r in tool_subsystem.get_requirement_specs()] pex_builder.add_resolved_requirements(reqs=reqs, platforms=['current']) pex_builder.set_entry_point(tool_subsystem.get_entry_point()) pex_builder.freeze()
def dump(self): self.debug('Building chroot for %s:' % self._targets) targets = self.resolve(self._targets) for lib in targets['libraries'] | targets['binaries']: self._dump_library(lib) generated_reqs = OrderedSet() if targets['thrifts']: for thr in set(targets['thrifts']): if thr not in self.MEMOIZED_THRIFTS: self.MEMOIZED_THRIFTS[ thr] = self._generate_thrift_requirement(thr) generated_reqs.add(self.MEMOIZED_THRIFTS[thr]) generated_reqs.add(PythonRequirement('thrift', use_2to3=True)) for antlr in targets['antlrs']: generated_reqs.add(self._generate_antlr_requirement(antlr)) reqs_from_libraries = OrderedSet() for req_lib in targets['reqs']: for req in req_lib.payload.requirements: reqs_from_libraries.add(req) reqs_to_build = OrderedSet() find_links = [] for req in reqs_from_libraries | generated_reqs | self._extra_requirements: if not req.should_build(self._interpreter.python, Platform.current()): self.debug('Skipping %s based upon version filter' % req) continue reqs_to_build.add(req) self._dump_requirement(req.requirement) if req.repository: find_links.append(req.repository) distributions = resolve_multi(self._config, reqs_to_build, interpreter=self._interpreter, platforms=self._platforms, find_links=find_links) locations = set() for platform, dist_set in distributions.items(): for dist in dist_set: if dist.location not in locations: self._dump_distribution(dist) locations.add(dist.location) if len(targets['binaries']) > 1: print('WARNING: Target has multiple python_binary targets!', file=sys.stderr) return self._builder
def test_simple(self): self.make_target(':import_whls', PythonRequirementLibrary, requirements=[ PythonRequirement('foo==123'), ]) target = self.make_target(':foo', UnpackedWheels, libraries=[':import_whls'], module_name='foo') self.assertIsInstance(target, UnpackedWheels) dependency_specs = [spec for spec in target.compute_dependency_specs(payload=target.payload)] self.assertSequenceEqual([':import_whls'], dependency_specs) import_whl_dep = assert_single_element(target.all_imported_requirements) self.assertIsInstance(import_whl_dep, PythonRequirement)
def create(cls, builder, log=None): options = cls.global_instance().get_options() setuptools_requirement = f'setuptools=={options.setuptools_version}' log = log or logging.getLogger(__name__) return PexBuilderWrapper(builder=builder, python_repos_subsystem=PythonRepos.global_instance(), python_setup_subsystem=PythonSetup.global_instance(), setuptools_requirement=PythonRequirement(setuptools_requirement), log=log)
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV') if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper( PEXBuilder(path=chroot, interpreter=interpreter), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint( str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [ tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary) ] pex_builder.add_requirement_libs_from( req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve( [Requirement.parse(self._CHECKER_REQ)]): pex_builder.add_direct_requirements( dist.requires()) pex_builder.add_distribution(dist) pex_builder.add_direct_requirements( [self._CHECKER_REQ]) except DistributionNotFound: # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)
def test_unpack_wheels_fingerprint_strategy(self): fingerprint_strategy = UnpackWheelsFingerprintStrategy() make_unpacked_wheel = functools.partial(self._make_unpacked_wheel, include_patterns=['bar']) req1 = PythonRequirement('com.example.bar==0.0.1') target = make_unpacked_wheel(req1) fingerprint1 = fingerprint_strategy.compute_fingerprint(target) # Now, replace the build file with a different version. self.reset_build_graph() target = make_unpacked_wheel(PythonRequirement('com.example.bar==0.0.2')) fingerprint2 = fingerprint_strategy.compute_fingerprint(target) self.assertNotEqual(fingerprint1, fingerprint2) # Go back to the original library. self.reset_build_graph() target = make_unpacked_wheel(req1) fingerprint3 = fingerprint_strategy.compute_fingerprint(target) self.assertEqual(fingerprint1, fingerprint3)
def _generate_requirement(self, library, builder_cls): library_key = self._key_generator.key_for_target(library) builder = builder_cls(library, get_buildroot(), self._config, '-' + library_key.hash[:8]) cache_dir = os.path.join(self._egg_cache_root, library_key.id) if self._build_invalidator.needs_update(library_key): sdist = builder.build(interpreter=self._interpreter) safe_mkdir(cache_dir) shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist))) self._build_invalidator.update(library_key) return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
def create_requirements(cls, context, workdir): options = cls.global_instance().get_options() address = Address(spec_path=fast_relpath( workdir, get_buildroot()), target_name='isort') requirements = ['isort=={}'.format(options.version) ] + options.additional_requirements context.build_graph.inject_synthetic_target( address=address, target_type=PythonRequirementLibrary, requirements=[PythonRequirement(r) for r in requirements]) return context.build_graph.get_target(address=address)
def _create_requirements(self, context, workdir): tool_subsystem = self._tool_subsystem() address = Address(spec_path=fast_relpath(workdir, get_buildroot()), target_name=tool_subsystem.options_scope) context.build_graph.inject_synthetic_target( address=address, target_type=PythonRequirementLibrary, requirements=[ PythonRequirement(r) for r in tool_subsystem.get_requirement_specs() ]) return context.build_graph.get_target(address=address)
def _run_mypy(self, py3_interpreter, mypy_args, **kwargs): pex_info = PexInfo.default() pex_info.entry_point = 'mypy' chroot = self.cached_chroot(interpreter=py3_interpreter, pex_info=pex_info, targets=[], extra_requirements=[ PythonRequirement('mypy=={}'.format( self.get_options().mypy_version)) ]) pex = chroot.pex() return pex.run(mypy_args, **kwargs)
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" the other pexes via PEX_PATH.""" relevant_targets = self.context.targets( lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Resources))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.join(self.workdir, str(interpreter.identity), target_set_id) extra_pex_paths_file_path = path + '.extra_pex_paths' extra_pex_paths = None # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): pexes = [ self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX), self.context.products.get_data(GatherSources.PYTHON_SOURCES) ] if self.extra_requirements(): extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()] addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__)) self.context.build_graph.inject_synthetic_target( addr, PythonRequirementLibrary, requirements=extra_reqs) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes extra_pex_paths = [pex.path() for pex in pexes if pex] with safe_concurrent_creation(path) as safe_path: builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info) builder.freeze() with open(extra_pex_paths_file_path, 'w') as outfile: for epp in extra_pex_paths: outfile.write(epp) outfile.write(b'\n') if extra_pex_paths is None: with open(extra_pex_paths_file_path, 'r') as infile: extra_pex_paths = [p.strip() for p in infile.readlines()] return WrappedPEX(PEX(os.path.realpath(path), interpreter), extra_pex_paths, interpreter)
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV') if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=chroot, interpreter=interpreter), log=self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary)] pex_builder.add_requirement_libs_from(req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. platform = Platform.current() platform_name = platform.platform env = Environment(search_path=sys.path, platform=platform_name, python=interpreter.version_string) working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env): pex_builder.add_direct_requirements(dist.requires()) # NB: We add the dist location instead of the dist itself to make sure its a # distribution style pex knows how to package. pex_builder.add_dist_location(dist.location) pex_builder.add_direct_requirements([self._CHECKER_REQ]) except (DistributionNotFound, PEXBuilder.InvalidDistribution): # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)