def test_find_compatible_interpreters(): py27 = ensure_python_interpreter(PY27) py35 = ensure_python_interpreter(PY35) py36 = ensure_python_interpreter(PY36) pex_python_path = ':'.join([py27, py35, py36]) def find_interpreters(*constraints): return [interp.binary for interp in find_compatible_interpreters(pex_python_path=pex_python_path, compatibility_constraints=constraints)] assert [py35, py36] == find_interpreters('>3') assert [py27] == find_interpreters('<3') assert [py36] == find_interpreters('>{}'.format(PY35)) assert [py35] == find_interpreters('>{}, <{}'.format(PY27, PY36)) assert [py36] == find_interpreters('>=3.6') assert [] == find_interpreters('<2') assert [] == find_interpreters('>4') assert [] == find_interpreters('>{}, <{}'.format(PY27, PY35)) # All interpreters on PATH including whatever interpreter is currently running. all_known_interpreters = set(PythonInterpreter.all()) all_known_interpreters.add(PythonInterpreter.get()) interpreters = find_compatible_interpreters(compatibility_constraints=['<3']) assert set(interpreters).issubset(all_known_interpreters)
def find_compatible_interpreters(pex_python_path=None, compatibility_constraints=None): """Find all compatible interpreters on the system within the supplied constraints and use PEX_PYTHON_PATH if it is set. If not, fall back to interpreters on $PATH. """ if pex_python_path: interpreters = [] for binary in pex_python_path.split(os.pathsep): try: interpreters.append(PythonInterpreter.from_binary(binary)) except Executor.ExecutionError: print("Python interpreter %s in PEX_PYTHON_PATH failed to load properly." % binary, file=sys.stderr) if not interpreters: die('PEX_PYTHON_PATH was defined, but no valid interpreters could be identified. Exiting.') else: # We may have been invoked with a specific interpreter not on the $PATH, make sure our # sys.executable is included as a candidate in this case. interpreters = OrderedSet([PythonInterpreter.get()]) # Add all qualifying interpreters found in $PATH. interpreters.update(PythonInterpreter.all()) return list( matched_interpreters(interpreters, compatibility_constraints) if compatibility_constraints else interpreters )
def to_python_interpreter(full_path_or_basename): if os.path.exists(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interpreter = PythonInterpreter.from_env(full_path_or_basename) if interpreter is None: die('Failed to find interpreter: %s' % full_path_or_basename) return interpreter
def interpreter_from_options(options): interpreter = None if options.python: if os.path.exists(options.python): interpreter = PythonInterpreter.from_binary(options.python) else: interpreter = PythonInterpreter.from_env(options.python) if interpreter is None: die('Failed to find interpreter: %s' % options.python) else: interpreter = PythonInterpreter.get() return interpreter
def fake_interpreter(id_str): interpreter_dir = safe_mkdtemp() binary = os.path.join(interpreter_dir, 'binary') with open(binary, 'w') as fp: fp.write(dedent(""" #!{} from __future__ import print_function print({!r}) """.format(PythonInterpreter.get().binary, id_str)).strip()) chmod_plus_x(binary) return PythonInterpreter.from_binary(binary)
def __init__(self, context, python_setup, python_repos, targets, extra_requirements=None, builder=None, platforms=None, interpreter=None): self.context = context self._python_setup = python_setup self._python_repos = python_repos self._targets = targets self._extra_requirements = list(extra_requirements) if extra_requirements else [] self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._builder = builder or PEXBuilder(os.path.realpath(tempfile.mkdtemp()), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( self._python_setup.scratch_dir, 'artifacts', str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def __init__(self, pex, pex_info, interpreter=None, **kw): self._internal_cache = os.path.join(pex, pex_info.internal_cache) self._pex = pex self._pex_info = pex_info self._activated = False self._working_set = None self._interpreter = interpreter or PythonInterpreter.get() self._inherit_path = pex_info.inherit_path self._supported_tags = [] # For the bug this works around, see: https://bitbucket.org/pypy/pypy/issues/1686 # NB: This must be installed early before the underlying pex is loaded in any way. if self._interpreter.identity.abbr_impl == 'pp' and zipfile.is_zipfile(self._pex): self._install_pypy_zipimporter_workaround(self._pex) platform = Platform.current() platform_name = platform.platform super(PEXEnvironment, self).__init__( search_path=[] if pex_info.inherit_path == 'false' else sys.path, # NB: Our pkg_resources.Environment base-class wants the platform name string and not the # pex.platform.Platform object. platform=platform_name, **kw ) self._target_interpreter_env = self._interpreter.identity.pkg_resources_env(platform_name) self._supported_tags.extend(platform.supported_tags(self._interpreter)) TRACER.log( 'E: tags for %r x %r -> %s' % (self.platform, self._interpreter, self._supported_tags), V=9 )
def __init__(self, path=None, interpreter=None, chroot=None, pex_info=None, preamble=None, copy=False): """Initialize a pex builder. :keyword path: The path to write the PEX as it is built. If ``None`` is specified, a temporary directory will be created. :keyword interpreter: The interpreter to use to build this PEX environment. If ``None`` is specified, the current interpreter is used. :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX. :keyword pex_info: A preexisting PexInfo to use to build the PEX. :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX environment. :type preamble: str :keyword copy: If False, attempt to create the pex environment via hard-linking, falling back to copying across devices. If True, always copy. .. versionchanged:: 0.8 The temporary directory created when ``path`` is not specified is now garbage collected on interpreter exit. """ self._interpreter = interpreter or PythonInterpreter.get() self._chroot = chroot or Chroot(path or safe_mkdtemp()) self._pex_info = pex_info or PexInfo.default(self._interpreter) self._preamble = preamble or '' self._copy = copy self._shebang = self._interpreter.identity.hashbang() self._logger = logging.getLogger(__name__) self._frozen = False self._distributions = set()
def test_resolve_multiplatform_requirements(self): cffi_tgt = self._fake_target('cffi', ['cffi==1.9.1']) pex = self._resolve_requirements([cffi_tgt], { 'python-setup': { # We have 'current' so we can import the module in order to get the path to it. # The other platforms (one of which may happen to be the same as current) are what we # actually test the presence of. 'platforms': ['current', 'macosx-10.10-x86_64', 'manylinux1_i686', 'win_amd64'] } }) stdout_data, stderr_data = self._exercise_module(pex, 'cffi') self.assertEquals('', stderr_data.strip()) path = stdout_data.strip() wheel_dir = os.path.join(path[0:path.find('{sep}.deps{sep}'.format(sep=os.sep))], '.deps') wheels = set(os.listdir(wheel_dir)) def name_and_platform(whl): # The wheel filename is of the format # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl # See https://www.python.org/dev/peps/pep-0425/. # We don't care about the python or abi versions (they depend on what we're currently # running on), we just want to make sure we have all the platforms we expect. parts = os.path.splitext(whl)[0].split('-') return '{}-{}'.format(parts[0], parts[1]), parts[-1] names_and_platforms = set(name_and_platform(w) for w in wheels) expected_name_and_platforms = { # Note that we don't check for 'current' because if there's no published wheel for the # current platform we may end up with a wheel for a compatible platform (e.g., if there's no # wheel for macosx_10_11_x86_64, 'current' will be satisfied by macosx_10_10_x86_64). # This is technically also true for the hard-coded platforms we list below, but we chose # those and we happen to know that cffi wheels exist for them. Whereas we have no such # advance knowledge for the current platform, whatever that might be in the future. ('cffi-1.9.1', 'macosx_10_10_x86_64'), ('cffi-1.9.1', 'manylinux1_i686'), ('cffi-1.9.1', 'win_amd64'), } # pycparser is a dependency of cffi only on CPython. We might as well check for it, # as extra verification that we correctly fetch transitive dependencies. if PythonInterpreter.get().identity.interpreter == 'CPython': # N.B. Since pycparser is a floating transitive dep of cffi, we do a version-agnostic # check here to avoid master breakage as new pycparser versions are released on pypi. self.assertTrue( any( (package.startswith('pycparser-') and platform == 'any') for package, platform in names_and_platforms ), 'could not find pycparser in transitive dependencies!' ) self.assertTrue(expected_name_and_platforms.issubset(names_and_platforms), '{} is not a subset of {}'.format(expected_name_and_platforms, names_and_platforms)) # Check that the path is under the test's build root, so we know the pex was created there. self.assertTrue(path.startswith(os.path.realpath(get_buildroot())))
def test_conflict_via_config(self): # Tests that targets with compatibility conflict with targets with default compatibility. # NB: Passes empty `args` to avoid having the default CLI args override the config. config = { 'python-setup': { 'interpreter_constraints': ['CPython<2.7'], } } binary_target = '{}:echo_interpreter_version'.format(self.testproject) pants_run = self._build_pex(binary_target, config=config, args=[]) self.assert_failure( pants_run, 'Unexpected successful build of {binary}.'.format(binary=binary_target) ) self.assertIn( "Unable to detect a suitable interpreter for compatibilities", pants_run.stdout_data ) self.assertIn( "CPython<2.7", pants_run.stdout_data, "Did not output requested compatibiility." ) self.assertIn("Conflicting targets: {}".format(binary_target), pants_run.stdout_data) # NB: we expect the error message to print *all* interpreters resolved by Pants. However, # to simplify the tests and for hermicity, here we only test that the current interpreter # gets printed as a proxy for the overall behavior. self.assertIn( PythonInterpreter.get().version_string, pants_run.stdout_data, "Did not output interpreters discoved by Pants." )
def build(self, targets, args, interpreter=None, conn_timeout=None, fast_tests=False): test_targets = [] binary_targets = [] interpreter = interpreter or PythonInterpreter.get() for target in targets: assert target.is_python, "PythonBuilder can only build PythonTargets, given %s" % str(target) # PythonBuilder supports PythonTests and PythonBinaries for target in targets: if isinstance(target, PythonTests): test_targets.append(target) elif isinstance(target, PythonBinary): binary_targets.append(target) rv = PythonTestBuilder( test_targets, args, interpreter=interpreter, conn_timeout=conn_timeout, fast=fast_tests).run() if rv != 0: return rv for binary_target in binary_targets: rv = PythonBinaryBuilder( binary_target, self._run_tracker, interpreter=interpreter, conn_timeout=conn_timeout).run() if rv != 0: return rv return 0
def __init__(self, target, run_tracker, interpreter=None): self.target = target self.interpreter = interpreter or PythonInterpreter.get() if not isinstance(target, PythonBinary): raise PythonBinaryBuilder.NotABinaryTargetException( "Target %s is not a PythonBinary!" % target) config = Config.from_cache() self.distdir = config.getdefault('pants_distdir') distpath = tempfile.mktemp(dir=self.distdir, prefix=target.name) run_info = run_tracker.run_info build_properties = {} build_properties.update(run_info.add_basic_info(run_id=None, timestamp=time.time())) build_properties.update(run_info.add_scm_info()) pexinfo = target.pexinfo.copy() pexinfo.build_properties = build_properties builder = PEXBuilder(distpath, pex_info=pexinfo, interpreter=self.interpreter) self.chroot = PythonChroot( targets=[target], builder=builder, platforms=target.platforms, interpreter=self.interpreter)
def __init__(self, context, targets, extra_requirements=None, builder=None, platforms=None, interpreter=None): self.context = context # TODO: These should come from the caller, and we should not know about config. self._python_setup = PythonSetup(self.context.config) self._python_repos = PythonRepos(self.context.config) self._targets = targets self._extra_requirements = list(extra_requirements) if extra_requirements else [] self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._builder = builder or PEXBuilder(os.path.realpath(tempfile.mkdtemp()), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( self._python_setup.scratch_dir, 'artifacts', str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def compilation(valid_paths=None, invalid_paths=None, compile_paths=None): with temporary_dir() as root: for path in valid_paths: write_source(os.path.join(root, path)) for path in invalid_paths: write_source(os.path.join(root, path), valid=False) compiler = Compiler(PythonInterpreter.get()) yield root, compiler.compile(root, compile_paths)
def _interpreter_from_path(self, path, filters): try: executable = os.readlink(os.path.join(path, 'python')) except OSError: return None interpreter = PythonInterpreter.from_binary(executable, include_site_extras=False) if self._matches(interpreter, filters): return self._resolve(interpreter) return None
def _get_interpreter(interpreter_path_file): with open(interpreter_path_file, 'r') as infile: lines = infile.readlines() binary = lines[0].strip() interpreter = PythonInterpreter.from_binary(binary, include_site_extras=False) for line in lines[1:]: dist_name, dist_version, location = line.strip().split('\t') interpreter = interpreter.with_extra(dist_name, dist_version, location) return interpreter
def _validate_good_interpreter_path_file(path): with open(path, 'r') as fp: lines = fp.readlines() binary = lines[0].strip() try: interpreter = PythonInterpreter.from_binary(binary) return True if interpreter else False except Executor.ExecutableNotFound: return False
def _setup_paths(self, paths, filters=()): """Find interpreters under paths, and cache them.""" for interpreter in self._matching(PythonInterpreter.all(paths), filters=filters): identity_str = str(interpreter.identity) pi = self._interpreter_from_relpath(identity_str, filters=filters) if pi is None: self._setup_interpreter(interpreter, identity_str) pi = self._interpreter_from_relpath(identity_str, filters=filters) if pi: yield pi
def __init__(self, targets, args, interpreter=None, conn_timeout=None, fast=False): self.targets = targets self.args = args self.interpreter = interpreter or PythonInterpreter.get() self._conn_timeout = conn_timeout # If fast is true, we run all the tests in a single chroot. This is MUCH faster than # creating a chroot for each test target. However running each test separately is more # correct, as the isolation verifies that its dependencies are correctly declared. self._fast = fast
def __init__(self, pex=sys.argv[0], interpreter=None, env=ENV, verify_entry_point=False): self._pex = pex self._interpreter = interpreter or PythonInterpreter.get() self._pex_info = PexInfo.from_pex(self._pex) self._pex_info_overrides = PexInfo.from_env(env=env) self._vars = env self._envs = [] self._working_set = None if verify_entry_point: self._do_entry_point_verification()
def test_resolve_multiplatform_requirements(self): cffi_tgt = self._fake_target("cffi", ["cffi==1.9.1"]) pex = self._resolve_requirements( [cffi_tgt], { "python-setup": { # We have 'current' so we can import the module in order to get the path to it. # The other platforms (one of which may happen to be the same as current) are what we # actually test the presence of. "platforms": ["current", "macosx-10.10-x86_64", "manylinux1_i686", "win_amd64"] } }, ) stdout_data, stderr_data = self._exercise_module(pex, "cffi") self.assertEquals("", stderr_data.strip()) path = stdout_data.strip() wheel_dir = os.path.join(path[0 : path.find("{sep}.deps{sep}".format(sep=os.sep))], ".deps") wheels = set(os.listdir(wheel_dir)) def name_and_platform(whl): # The wheel filename is of the format # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl # See https://www.python.org/dev/peps/pep-0425/. # We don't care about the python or abi versions (they depend on what we're currently # running on), we just want to make sure we have all the platforms we expect. parts = os.path.splitext(whl)[0].split("-") return "{}-{}".format(parts[0], parts[1]), parts[-1] names_and_platforms = set(name_and_platform(w) for w in wheels) expected_name_and_platforms = { # Note that we don't check for 'current' because if there's no published wheel for the # current platform we may end up with a wheel for a compatible platform (e.g., if there's no # wheel for macosx_10_11_x86_64, 'current' will be satisfied by macosx_10_10_x86_64). # This is technically also true for the hard-coded platforms we list below, but we chose # those and we happen to know that cffi wheels exist for them. Whereas we have no such # advance knowledge for the current platform, whatever that might be in the future. ("cffi-1.9.1", "macosx_10_10_x86_64"), ("cffi-1.9.1", "manylinux1_i686"), ("cffi-1.9.1", "win_amd64"), } # pycparser is a dependency of cffi only on CPython. We might as well check for it, # as extra verification that we correctly fetch transitive dependencies. if PythonInterpreter.get().identity.interpreter == "CPython": expected_name_and_platforms.add(("pycparser-2.17", "any")) self.assertTrue( expected_name_and_platforms.issubset(names_and_platforms), "{} is not a subset of {}".format(expected_name_and_platforms, names_and_platforms), ) # Check that the path is under the test's build root, so we know the pex was created there. self.assertTrue(path.startswith(os.path.realpath(get_buildroot())))
def _setup_paths(self, paths, filters): """Find interpreters under paths, and cache them.""" for interpreter in self._matching(PythonInterpreter.all(paths), filters): identity_str = str(interpreter.identity) cache_path = os.path.join(self._cache_dir, identity_str) pi = self._interpreter_from_path(cache_path, filters) if pi is None: self._setup_interpreter(interpreter, cache_path) pi = self._interpreter_from_path(cache_path, filters) if pi: yield pi
def test_iter_ordering(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) whl = WheelPackage('psutil-0.6.1-cp%s-none-%s.whl' % ( pi.python.replace('.', ''), get_build_platform().replace('-', '_').replace('.', '_').lower())) req = Requirement.parse('psutil') assert list(FakeObtainer([tgz, egg, whl]).iter(req)) == [whl, egg, tgz] assert list(FakeObtainer([egg, tgz, whl]).iter(req)) == [whl, egg, tgz]
def _setup_paths(self, paths, filters): for interpreter in self._matching(PythonInterpreter.all(paths), filters): identity_str = str(interpreter.identity) path = os.path.join(self._path, identity_str) pi = self._interpreter_from_path(path, filters) if pi is None: self._setup_interpreter(interpreter) pi = self._interpreter_from_path(path, filters) if pi is None: continue self._interpreters.add(pi)
def _get_interpreter(self, interpreter_path_file, targets): if os.path.exists(interpreter_path_file): with open(interpreter_path_file, "r") as infile: binary = infile.read().strip() try: return PythonInterpreter.from_binary(binary) except PythonInterpreter.Error: self.context.log.info( "Stale interpreter reference detected: {}, removing reference and " "selecting a new interpreter.".format(binary)) os.remove(interpreter_path_file) return self._select_interpreter(interpreter_path_file, targets)
def test_resolvable_directory(): builder = ResolverOptionsBuilder() interpreter = PythonInterpreter.get() with make_source_dir(name='my_project') as td: rdir = ResolvableDirectory.from_string(td, builder, interpreter) assert rdir.name == pkg_resources.safe_name('my_project') assert rdir.extras() == [] rdir = ResolvableDirectory.from_string(td + '[extra1,extra2]', builder, interpreter) assert rdir.name == pkg_resources.safe_name('my_project') assert rdir.extras() == ['extra1', 'extra2']
def __init__(self, targets, args, interpreter=None, conn_timeout=None, fast=False, debug=False): self._targets = targets self._args = args self._interpreter = interpreter or PythonInterpreter.get() self._conn_timeout = conn_timeout # If fast is true, we run all the tests in a single chroot. This is MUCH faster than # creating a chroot for each test target. However running each test separately is more # correct, as the isolation verifies that its dependencies are correctly declared. self._fast = fast self._debug = debug
def resolve_pytest(python_version, pytest_version): interpreter = PythonInterpreter.from_binary( ensure_python_interpreter(python_version)) resolved_dists = resolve_multi( interpreters=[interpreter], requirements=["pytest=={}".format(pytest_version)]) project_to_version = { rd.distribution.key: rd.distribution.version for rd in resolved_dists } assert project_to_version["pytest"] == pytest_version return project_to_version
def resolve_multi(config, requirements, interpreter=None, platforms=None, conn_timeout=None, ttl=3600): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param config: Pants :class:`Config` object. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. If None specified, defaults to current interpreter. :param platforms: Optional list of platforms against requirements will be resolved. If None specified, the defaults from `config` will be used. :param conn_timeout: Optional connection timeout for any remote fetching. :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. "flask>=0.2" if a matching distribution is available on disk. Defaults to 3600. """ distributions = dict() interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): raise TypeError( 'Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') platforms = get_platforms( platforms or config.getlist('python-setup', 'platforms', ['current'])) for platform in platforms: translator = Translator.default(install_cache=install_cache, interpreter=interpreter, platform=platform, conn_timeout=conn_timeout) obtainer = PantsObtainer( install_cache=install_cache, crawler=crawler_from_config(config, conn_timeout=conn_timeout), fetchers=fetchers_from_config(config) or [PyPIFetcher()], translators=translator) distributions[platform] = resolve(requirements=requirements, obtainer=obtainer, interpreter=interpreter, platform=platform) return distributions
def create_bare_interpreter(binary_path): """Creates an interpreter for python binary at the given path. The interpreter is bare in that it has no extras associated with it. :returns: A bare python interpreter with no extras. :rtype: :class:`pex.interpreter.PythonInterpreter` """ # TODO(John Sirois): Replace with a more direct PythonInterpreter construction API call when # https://github.com/pantsbuild/pex/issues/510 is fixed. interpreter_with_extras = PythonInterpreter.from_binary(binary_path) return PythonInterpreter(binary_path, interpreter_with_extras.identity, extras=None)
def __init__(self, source_dir, interpreter=None, install_dir=None): """Create an installer from an unpacked source distribution in source_dir.""" self._source_dir = source_dir self._install_tmp = install_dir or safe_mkdtemp() self._installed = None from pex import vendor self._interpreter = vendor.setup_interpreter(distributions=self.mixins, interpreter=interpreter or PythonInterpreter.get()) if not self._interpreter.satisfies(self.mixins): raise self.IncapableInterpreter('Interpreter %s not capable of running %s' % ( self._interpreter.binary, self.__class__.__name__))
def _interpreter_from_relpath(self, path, filters=()): path = os.path.join(self._cache_dir, path) try: executable = os.readlink(os.path.join(path, 'python')) if not os.path.exists(executable): self._purge_interpreter(path) return None except OSError: return None interpreter = PythonInterpreter.from_binary(executable) if self._matches(interpreter, filters=filters): return interpreter return None
def __init__( self, source_dir, # type: str interpreter=None, # type: Optional[PythonInterpreter] wheel_dir=None, # type: Optional[str] verify=True, # type: bool ): # type: (...) -> None """Create a wheel from an unpacked source distribution in source_dir.""" self._source_dir = source_dir self._wheel_dir = wheel_dir or safe_mkdtemp() self._interpreter = interpreter or PythonInterpreter.get() self._verify = verify
def test_resolve_current_platform(p537_resolve_cache): # type: (str) -> None resolve_current = functools.partial( resolve_p537_wheel_names, cache=p537_resolve_cache, platforms=["current"] ) other_python_version = PY36 if PY_VER == (3, 5) else PY35 other_python = PythonInterpreter.from_binary(ensure_python_interpreter(other_python_version)) current_python = PythonInterpreter.get() resolved_other = resolve_current(interpreters=[other_python]) resolved_current = resolve_current() assert 1 == len(resolved_other) assert 1 == len(resolved_current) assert resolved_other != resolved_current assert resolved_current == resolve_current(interpreters=[current_python]) assert resolved_current == resolve_current(interpreters=[current_python, current_python]) # Here we have 2 local interpreters satisfying current but with different platforms and thus # different dists for 2 total dists. assert 2 == len(resolve_current(interpreters=[current_python, other_python]))
def _select_pex_python_interpreter(target_python, compatibility_constraints=None): target = find_in_path(target_python) if not target: die('Failed to find interpreter specified by PEX_PYTHON: %s' % target) if compatibility_constraints: pi = PythonInterpreter.from_binary(target) if not list(matched_interpreters([pi], compatibility_constraints)): die('Interpreter specified by PEX_PYTHON (%s) is not compatible with specified ' 'interpreter constraints: %s' % (target, str(compatibility_constraints))) if not os.path.exists(target): die('Target interpreter specified by PEX_PYTHON %s does not exist. Exiting.' % target) return target
def test_find_compatible_interpreters(): # type: () -> None py27 = ensure_python_interpreter(PY27) py35 = ensure_python_interpreter(PY35) py36 = ensure_python_interpreter(PY36) path = [py27, py35, py36] assert [py35, py36] == find_interpreters(path, ">3") assert [py27] == find_interpreters(path, "<3") assert [py36] == find_interpreters(path, ">{}".format(PY35)) assert [py35] == find_interpreters(path, ">{}, <{}".format(PY27, PY36)) assert [py36] == find_interpreters(path, ">=3.6") with pytest.raises(UnsatisfiableInterpreterConstraintsError): find_interpreters(path, "<2") with pytest.raises(UnsatisfiableInterpreterConstraintsError): find_interpreters(path, ">4") with pytest.raises(UnsatisfiableInterpreterConstraintsError): find_interpreters(path, ">{}, <{}".format(PY27, PY35)) # All interpreters on PATH including whatever interpreter is currently running. all_known_interpreters = set(PythonInterpreter.all()) all_known_interpreters.add(PythonInterpreter.get()) interpreters = set( iter_compatible_interpreters(compatibility_constraints=["<3"])) i_rendered = "\n ".join(sorted(map(repr, interpreters))) aki_rendered = "\n ".join(sorted(map(repr, all_known_interpreters))) assert interpreters.issubset(all_known_interpreters), dedent(""" interpreters '<3': {interpreters} all known interpreters: {all_known_interpreters} """.format(interpreters=i_rendered, all_known_interpreters=aki_rendered))
def _get_interpreter(self, interpreter_path_file, targets): if os.path.exists(interpreter_path_file): with open(interpreter_path_file, 'r') as infile: binary = infile.read().strip() try: return PythonInterpreter.from_binary(binary) except Executor.ExecutableNotFound: # TODO(John Sirois): Trap a more appropriate exception once available: # https://github.com/pantsbuild/pex/issues/672 self.context.log.info('Stale interpreter reference detected: {}, removing reference and ' 'selecting a new interpreter.'.format(binary)) os.remove(interpreter_path_file) return self._select_interpreter(interpreter_path_file, targets)
def _cache_current_interpreter(self): cache = PythonInterpreterCache(self.config()) # We only need to cache the current interpreter, avoid caching for every interpreter on the # PATH. current_interpreter = PythonInterpreter.get() current_id = (current_interpreter.binary, current_interpreter.identity) for cached_interpreter in cache.setup(filters=[current_interpreter.identity.requirement]): # TODO(John Sirois): Revert to directly comparing interpreters when # https://github.com/pantsbuild/pex/pull/31 is in, released and consumed by pants. if (cached_interpreter.binary, cached_interpreter.identity) == current_id: return cached_interpreter raise RuntimeError('Could not find suitable interpreter to run tests.')
def _establish_interpreter(args): if args.python: if os.path.exists(args.python): interpreter = PythonInterpreter.from_binary(args.python) else: interpreter = PythonInterpreter.from_env(args.python) if interpreter is None: die('Failed to find interpreter: %s' % args.python) else: interpreter = PythonInterpreter.get() with TRACER.timed('Setting up interpreter %s' % interpreter.binary, V=2): resolve = functools.partial(resolve_interpreter, args.interpreter_cache_dir, args.repos) # resolve setuptools interpreter = resolve(interpreter, SETUPTOOLS_REQUIREMENT) # possibly resolve wheel if interpreter and args.use_wheel: interpreter = resolve(interpreter, WHEEL_REQUIREMENT) return interpreter
def test_iter_interpreter_mixed(self, test_interpreter1, test_interpreter2, invalid_interpreter): # type: (str, str, str) -> None results = list( PythonInterpreter.iter_candidates(paths=[ test_interpreter1, invalid_interpreter, test_interpreter2 ])) assert len(results) == 3 assert [ PythonInterpreter.from_binary(path) for path in (test_interpreter1, test_interpreter2) ] == [ result for result in results if isinstance(result, PythonInterpreter) ] errors = [ result for result in results if not isinstance(result, PythonInterpreter) ] assert len(errors) == 1 self.assert_error(errors[0], invalid_interpreter)
def test_iter_ordering(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) whl = WheelPackage( 'psutil-0.6.1-cp%s-none-%s.whl' % (pi.python.replace('.', ''), get_build_platform().replace( '-', '_').replace('.', '_').lower())) req = Requirement.parse('psutil') assert list(FakeIterator([tgz, egg, whl]).iter(req)) == [whl, egg, tgz] assert list(FakeIterator([egg, tgz, whl]).iter(req)) == [whl, egg, tgz]
def test_resolve_multiplatform_requirements(self): cffi_tgt = self._fake_target('cffi', ['cffi==1.9.1']) pex = self._resolve_requirements( [cffi_tgt], { 'python-setup': { # We have 'current' so we can import the module in order to get the path to it. # The other platforms (one of which may happen to be the same as current) are what we # actually test the presence of. 'platforms': [ 'current', 'macosx-10.10-x86_64', 'manylinux1_i686', 'win_amd64' ] } }) stdout_data, stderr_data = self._exercise_module(pex, 'cffi') self.assertEquals('', stderr_data.strip()) path = stdout_data.strip() wheel_dir = os.path.join( path[0:path.find('{sep}.deps{sep}'.format(sep=os.sep))], '.deps') wheels = set(os.listdir(wheel_dir)) def name_and_platform(whl): # The wheel filename is of the format # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl # See https://www.python.org/dev/peps/pep-0425/. # We don't care about the python or abi versions (they depend on what we're currently # running on), we just want to make sure we have all the platforms we expect. parts = os.path.splitext(whl)[0].split('-') return '{}-{}'.format(parts[0], parts[1]), parts[-1] names_and_platforms = set(name_and_platform(w) for w in wheels) expected_name_and_platforms = { # Note that Platform.current() may happen to be the same as one of the other platforms. ('cffi-1.9.1', Platform.current().replace('-', '_')), ('cffi-1.9.1', 'macosx_10_10_x86_64'), ('cffi-1.9.1', 'manylinux1_i686'), ('cffi-1.9.1', 'win_amd64'), } # pycparser is a dependency of cffi only on CPython. We might as well check for it, # as extra verification that we correctly fetch transitive dependencies. if PythonInterpreter.get().identity.interpreter == 'CPython': expected_name_and_platforms.add(('pycparser-2.17', 'any')) self.assertEquals(expected_name_and_platforms, names_and_platforms) # Check that the path is under the test's build root, so we know the pex was created there. self.assertTrue(path.startswith(os.path.realpath(get_buildroot())))
def _do_test_exact_requirements_interpreter_change(rule_runner: RuleRunner, sdist: bool) -> None: python36 = PythonInterpreter.from_binary(python_interpreter_path(PY_36)) python37 = PythonInterpreter.from_binary(python_interpreter_path(PY_37)) with plugin_resolution( rule_runner, interpreter=python36, plugins=[("jake", "1.2.3"), ("jane", "3.4.5")], sdist=sdist, ) as results: working_set, chroot, repo_dir = results safe_rmtree(repo_dir) with pytest.raises(ExecutionError): with plugin_resolution( rule_runner, interpreter=python37, chroot=chroot, plugins=[("jake", "1.2.3"), ("jane", "3.4.5")], ): pytest.fail( "Plugin re-resolution is expected for an incompatible interpreter and it is " "expected to fail since we removed the dist `repo_dir` above." ) # But for a compatible interpreter the exact resolve results should be re-used and load # directly from the still in-tact cache. with plugin_resolution( rule_runner, interpreter=python36, chroot=chroot, plugins=[("jake", "1.2.3"), ("jane", "3.4.5")], ) as results2: working_set2, _, _ = results2 assert list(working_set) == list(working_set2)
def _get_interpreter(interpreter_path_file): with open(interpreter_path_file, 'r') as infile: lines = infile.readlines() binary = lines[0].strip() try: interpreter = PythonInterpreter.from_binary( binary, include_site_extras=False) except Executor.ExecutableNotFound: return None for line in lines[1:]: dist_name, dist_version, location = line.strip().split('\t') interpreter = interpreter.with_extra(dist_name, dist_version, location) return interpreter
def __init__(self, allow_prereleases=None, interpreter=None, platform=None, use_manylinux=None): self._interpreter = interpreter or PythonInterpreter.get() self._platform = self._maybe_expand_platform(self._interpreter, platform) self._allow_prereleases = allow_prereleases platform_name = self._platform.platform self._target_interpreter_env = self._interpreter.identity.pkg_resources_env(platform_name) self._supported_tags = self._platform.supported_tags( self._interpreter, use_manylinux ) TRACER.log( 'R: tags for %r x %r -> %s' % (self._platform, self._interpreter, self._supported_tags), V=9 )
def test_run_pex(): incompatible_platforms_warning_msg = ( "WARNING: attempting to run PEX with incompatible platforms!" ) assert incompatible_platforms_warning_msg not in assert_run_pex() assert incompatible_platforms_warning_msg not in assert_run_pex(pex_args=["--platform=current"]) assert incompatible_platforms_warning_msg not in assert_run_pex( pex_args=["--platform={}".format(PythonInterpreter.get().platform)] ) py27 = ensure_python_interpreter(PY27) stderr_lines = assert_run_pex(python=py27, pex_args=["--platform=macosx-10.13-x86_64-cp-37-m"]) assert incompatible_platforms_warning_msg in stderr_lines
def test_exact_requirements_interpreter_change(self, unused_test_name, packager_cls): python36 = PythonInterpreter.from_binary( python_interpreter_path(PY_36)) python37 = PythonInterpreter.from_binary( python_interpreter_path(PY_37)) with self.plugin_resolution( interpreter=python36, plugins=[("jake", "1.2.3"), ("jane", "3.4.5")], packager_cls=packager_cls, ) as results: working_set, chroot, repo_dir, cache_dir = results safe_rmtree(repo_dir) with self.assertRaises(Unsatisfiable): with self.plugin_resolution( interpreter=python37, chroot=chroot, plugins=[("jake", "1.2.3"), ("jane", "3.4.5")], ): self.fail( "Plugin re-resolution is expected for an incompatible interpreter and it is " "expected to fail since we removed the dist `repo_dir` above." ) # But for a compatible interpreter the exact resolve results should be re-used and load # directly from the still in-tact cache. with self.plugin_resolution(interpreter=python36, chroot=chroot, plugins=[("jake", "1.2.3"), ("jane", "3.4.5") ]) as results2: working_set2, _, _, _ = results2 self.assertEqual(list(working_set), list(working_set2))
def resolve_multi(config, requirements, interpreter=None, platforms=None, ttl=3600, find_links=None): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param config: Pants :class:`Config` object. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. If None specified, defaults to current interpreter. :param platforms: Optional list of platforms against requirements will be resolved. If None specified, the defaults from `config` will be used. :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. "flask>=0.2" if a matching distribution is available on disk. Defaults to 3600. :param find_links: Additional paths to search for source packages during resolution. """ distributions = dict() interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): raise TypeError( 'Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') platforms = get_platforms( platforms or config.getlist('python-setup', 'platforms', ['current'])) fetchers = fetchers_from_config(config) if find_links: fetchers.extend(Fetcher([path]) for path in find_links) context = context_from_config(config) for platform in platforms: distributions[platform] = resolve(requirements=requirements, interpreter=interpreter, fetchers=fetchers, platform=platform, context=context, cache=cache, cache_ttl=ttl) return distributions
def pack_in_pex(requirements: List[str], output: str, ignored_packages: Collection[str] = [], pex_inherit_path: str = "prefer", editable_requirements: Dict[str, str] = {} ) -> str: """ Pack current environment using a pex. :param requirements: list of requirements (ex {'tensorflow': '1.15.0'}) :param output: location of the pex :param ignored_packages: packages to be exluded from pex :param pex_inherit_path: see https://github.com/pantsbuild/pex/blob/master/pex/bin/pex.py#L264, possible values ['false', 'fallback', 'prefer'] :return: destination of the archive, name of the pex """ interpreter = PythonInterpreter.get() pex_info = PexInfo.default(interpreter) pex_info.inherit_path = pex_inherit_path pex_builder = PEXBuilder( copy=True, interpreter=interpreter, pex_info=pex_info) for current_package in editable_requirements.values(): _logger.debug("Add current path as source", current_package) _walk_and_do(pex_builder.add_source, current_package) try: resolveds = resolve_multi( requirements=requirements, indexes=[CRITEO_PYPI_URL] if _is_criteo() else None) for resolved in resolveds: if resolved.distribution.key in ignored_packages: _logger.debug(f"Ignore requirement {resolved.distribution}") continue else: _logger.debug(f"Add requirement {resolved.distribution}") pex_builder.add_distribution(resolved.distribution) pex_builder.add_requirement(resolved.requirement) except (Unsatisfiable, Untranslatable): _logger.exception('Cannot create pex') raise pex_builder.build(output) return output
def test_sorter_sort(): pi = PythonInterpreter.get() tgz = SourcePackage('psutil-0.6.1.tar.gz') egg = EggPackage('psutil-0.6.1-py%s-%s.egg' % (pi.python, get_build_platform())) whl = WheelPackage('psutil-0.6.1-cp%s-none-%s.whl' % ( pi.python.replace('.', ''), get_build_platform().replace('-', '_').replace('.', '_').lower())) assert Sorter().sort([tgz, egg, whl]) == [whl, egg, tgz] assert Sorter().sort([egg, tgz, whl]) == [whl, egg, tgz] # test unknown type sorter = Sorter(precedence=(EggPackage, WheelPackage)) assert sorter.sort([egg, tgz, whl], filter=False) == [egg, whl, tgz] assert sorter.sort([egg, tgz, whl], filter=True) == [egg, whl]
def _select_interpreter(candidate_interpreters_iter): current_interpreter = PythonInterpreter.get() candidate_interpreters = [] for interpreter in candidate_interpreters_iter: if current_interpreter == interpreter: # Always prefer continuing with the current interpreter when possible. return current_interpreter else: candidate_interpreters.append(interpreter) if not candidate_interpreters: return None # TODO: Allow the selection strategy to be parameterized: # https://github.com/pantsbuild/pex/issues/430 return min(candidate_interpreters)