Esempio n. 1
0
def test_local_cached_open():
  cache = safe_mkdtemp()
  web = CachedWeb(cache=cache)

  source_dir = safe_mkdtemp()
  source = os.path.join(source_dir, 'filename')
  with open(source, 'wb') as fp:
    fp.write(b'data')

  with contextlib.closing(web.open(source)) as cached_fp1:
    assert b'data' == cached_fp1.read()
  with contextlib.closing(web.open(source)) as cached_fp2:
    assert b'data' == cached_fp2.read()
Esempio n. 2
0
def isolated():
  """Returns a chroot for third_party isolated from the ``sys.path``.

  PEX will typically be installed in site-packages flat alongside many other distributions; as such,
  adding the location of the pex distribution to the ``sys.path`` will typically expose many other
  distributions. An isolated chroot can be used as a ``sys.path`` entry to effect only the exposure
  of pex.

  :return: The path of the chroot.
  :rtype: str
  """
  global _ISOLATED
  if _ISOLATED is None:
    from pex import vendor
    from pex.common import safe_mkdtemp, Chroot

    chroot = Chroot(safe_mkdtemp())
    with _tracer().timed('Isolating pex in {}'.format(chroot)):
      pex_path = os.path.join(vendor.VendorSpec.ROOT, 'pex')
      for root, _, files in os.walk(pex_path):
        for f in files:
          if not f.endswith('.pyc'):
            abs_file_path = os.path.join(root, f)
            relpath = os.path.relpath(abs_file_path, pex_path)
            chroot.copy(abs_file_path, os.path.join('pex', relpath), label='pex')

    _ISOLATED = chroot
  return _ISOLATED.path()
Esempio n. 3
0
  def _add_dist_zip(self, path, dist_name):
    # We need to distinguish between wheels and other zips. Most of the time,
    # when we have a zip, it contains its contents in an importable form.
    # But wheels don't have to be importable, so we need to force them
    # into an importable shape. We can do that by installing it into its own
    # wheel dir.
    if dist_name.endswith("whl"):
      from pex.third_party.wheel.install import WheelFile
      tmp = safe_mkdtemp()
      whltmp = os.path.join(tmp, dist_name)
      os.mkdir(whltmp)
      wf = WheelFile(path)
      wf.install(overrides=self._get_installer_paths(whltmp), force=True)
      for root, _, files in os.walk(whltmp):
        pruned_dir = os.path.relpath(root, tmp)
        for f in files:
          fullpath = os.path.join(root, f)
          target = os.path.join(self._pex_info.internal_cache, pruned_dir, f)
          self._copy_or_link(fullpath, target)
      return CacheHelper.dir_hash(whltmp)

    with open_zip(path) as zf:
      for name in zf.namelist():
        if name.endswith('/'):
          continue
        target = os.path.join(self._pex_info.internal_cache, dist_name, name)
        self._chroot.write(zf.read(name), target)
      return CacheHelper.zip_hash(zf)
Esempio n. 4
0
  def __init__(self, path=None, interpreter=None, chroot=None, pex_info=None, preamble=None,
               copy=False):
    """Initialize a pex builder.

    :keyword path: The path to write the PEX as it is built.  If ``None`` is specified,
      a temporary directory will be created.
    :keyword interpreter: The interpreter to use to build this PEX environment.  If ``None``
      is specified, the current interpreter is used.
    :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX.
    :keyword pex_info: A preexisting PexInfo to use to build the PEX.
    :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX
      environment.
    :type preamble: str
    :keyword copy: If False, attempt to create the pex environment via hard-linking, falling
                   back to copying across devices. If True, always copy.

    .. versionchanged:: 0.8
      The temporary directory created when ``path`` is not specified is now garbage collected on
      interpreter exit.
    """
    self._interpreter = interpreter or PythonInterpreter.get()
    self._chroot = chroot or Chroot(path or safe_mkdtemp())
    self._pex_info = pex_info or PexInfo.default(self._interpreter)
    self._preamble = preamble or ''
    self._copy = copy

    self._shebang = self._interpreter.identity.hashbang()
    self._logger = logging.getLogger(__name__)
    self._frozen = False
    self._distributions = set()
Esempio n. 5
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreter', V=2):
    interpreter = interpreter_from_options(options)

  if interpreter is None:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

  if options.cache_dir:
    resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
  else:
    resolver = Resolver(**resolver_kwargs)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolver.resolve(resolvables)
    except Unsatisfiable as e:
      die(e)

  for dist in resolveds:
    log('  %s' % dist, v=options.verbosity)
    pex_builder.add_distribution(dist)
    pex_builder.add_requirement(dist.as_requirement())

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Esempio n. 6
0
 def unpack(cls, filename, location=None):
   path = location or safe_mkdtemp()
   ext = cls.get_extension(filename)
   if ext is None:
     raise cls.InvalidArchive('Unknown archive format: %s' % filename)
   archive_class, error_class = cls.EXTENSIONS[ext]
   try:
     with contextlib.closing(archive_class(filename)) as package:
       package.extractall(path=path)
   except error_class:
     raise cls.UnpackError('Could not extract %s' % filename)
   return cls.first_nontrivial_dir(path)
Esempio n. 7
0
File: pex.py Progetto: twitter/heron
def build_pex(args, options, resolver_option_builder, interpreter=None):
    if interpreter is None:
        with TRACER.timed("Resolving interpreter", V=2):
            interpreter = interpreter_from_options(options)

    if interpreter is None:
        die("Could not find compatible interpreter", CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

    for requirements_txt in options.requirement_files:
        resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed("Resolving distributions"):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log("  %s" % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 8
0
def build_pex(args):
    with TRACER.timed('Resolving interpreter', V=2):
        interpreter = _establish_interpreter(args)

    if interpreter is None:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=_PREAMBLE)

    pex_info = pex_builder.info

    pex_info.zip_safe = False
    pex_info.always_write_cache = True
    pex_info.inherit_path = False

    resolver_option_builder = _establish_resolver_options(args)
    reqs = args.reqs
    resolvables = [Resolvable.get(req, resolver_option_builder) for req in reqs]

    for requirements_txt in args.requirement_files:
        resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=args.platform)
    _add_spex_deps(resolvables, pex_builder, resolver_option_builder=resolver_option_builder)

    if not args.disable_cache:
        resolver = CachingResolver(args.cache_dir, args.cache_ttl, **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    resolveds = []
    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as exception:
            die(exception)

    for dist in resolveds:
        log('  %s' % dist, verbose=args.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    pex_builder.set_entry_point('spex:spex')

    if args.python_shebang:
        pex_builder.set_shebang(args.python_shebang)

    return pex_builder
Esempio n. 9
0
File: pex.py Progetto: kamilchm/pex
def build_pex(args, options):
  interpreter = interpreter_from_options(options)

  pex_builder = PEXBuilder(
      path=safe_mkdtemp(),
      interpreter=interpreter,
  )

  pex_info = pex_builder.info

  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  installer = WheelInstaller if options.use_wheel else EggInstaller

  resolveds = requirement_resolver(
      options.requirements,
      obtainer=build_obtainer(options),
      interpreter=interpreter,
      platform=options.platform)

  if resolveds:
    log('Resolved distributions:', v=options.verbosity)

  for pkg in resolveds:
    log('  %s' % pkg, v=options.verbosity)
    pex_builder.add_distribution(pkg)
    pex_builder.add_requirement(pkg.as_requirement())

  for source_dir in options.source_dirs:
    try:
      bdist = installer(source_dir).bdist()
    except installer.Error:
      die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL)
    pex_builder.add_dist_location(bdist)

  if options.entry_point is not None:
    log('Setting entry point to %s' % options.entry_point, v=options.verbosity)
    pex_builder.info.entry_point = options.entry_point
  else:
    log('Creating environment PEX.', v=options.verbosity)

  return pex_builder
Esempio n. 10
0
File: http.py Progetto: jsirois/pex
  def fetch(self, link, into=None):
    """Fetch the binary content associated with the link and write to a file.

    :param link: The :class:`Link` to fetch.
    :keyword into: If specified, write into the directory ``into``.  If ``None``, creates a new
      temporary directory that persists for the duration of the interpreter.
    """
    target = os.path.join(into or safe_mkdtemp(), link.filename)

    if os.path.exists(target):
      # Assume that if the local file already exists, it is safe to use.
      return target

    with TRACER.timed('Fetching %s' % link.url, V=2):
      target_tmp = '%s.%s' % (target, uuid.uuid4())
      with contextlib.closing(self.open(link)) as in_fp:
        with safe_open(target_tmp, 'wb') as out_fp:
          shutil.copyfileobj(in_fp, out_fp)

    os.rename(target_tmp, target)
    return target
Esempio n. 11
0
File: util.py Progetto: jsirois/pex
  def access_zipped_assets(cls, static_module_name, static_path, dir_location=None):
    """
    Create a copy of static resource files as we can't serve them from within the pex file.

    :param static_module_name: Module name containing module to cache in a tempdir
    :type static_module_name: string, for example 'twitter.common.zookeeper' or similar
    :param static_path: Module name, for example 'serverset'
    :param dir_location: create a new temporary directory inside, or None to have one created
    :returns temp_dir: Temporary directory with the zipped assets inside
    :rtype: str
    """

    # asset_path is initially a module name that's the same as the static_path, but will be
    # changed to walk the directory tree
    def walk_zipped_assets(static_module_name, static_path, asset_path, temp_dir):
      for asset in resource_listdir(static_module_name, asset_path):
        asset_target = os.path.normpath(
            os.path.join(os.path.relpath(asset_path, static_path), asset))
        if resource_isdir(static_module_name, os.path.join(asset_path, asset)):
          safe_mkdir(os.path.join(temp_dir, asset_target))
          walk_zipped_assets(static_module_name, static_path, os.path.join(asset_path, asset),
            temp_dir)
        else:
          with open(os.path.join(temp_dir, asset_target), 'wb') as fp:
            path = os.path.join(static_path, asset_target)
            file_data = resource_string(static_module_name, path)
            fp.write(file_data)

    if dir_location is None:
      temp_dir = safe_mkdtemp()
    else:
      temp_dir = dir_location

    walk_zipped_assets(static_module_name, static_path, static_path, temp_dir)

    return temp_dir
Esempio n. 12
0
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):

        def to_python_interpreter(full_path_or_basename):
            if os.path.exists(full_path_or_basename):
                return PythonInterpreter.from_binary(full_path_or_basename)
            else:
                interpreter = PythonInterpreter.from_env(full_path_or_basename)
                if interpreter is None:
                    die('Failed to find interpreter: %s' %
                        full_path_or_basename)
                return interpreter

        interpreters = [
            to_python_interpreter(interp)
            for interp in options.python or [sys.executable]
        ]

    if options.interpreter_constraint:
        # NB: options.python and interpreter constraints cannot be used together, so this will not
        # affect usages of the interpreter(s) specified by the "--python" command line flag.
        constraints = options.interpreter_constraint
        validate_constraints(constraints)
        if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
            rc_variables = Variables.from_rc(rc=options.rc_file)
            pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
        else:
            pex_python_path = ""
        interpreters = find_compatible_interpreters(pex_python_path,
                                                    constraints)

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters)

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    resolvables = resolvables_from_iterable(args,
                                            resolver_option_builder,
                                            interpreter=interpreter)

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt,
                                   builder=resolver_option_builder,
                                   interpreter=interpreter))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        builder=resolver_option_builder,
                                        interpreter=interpreter):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(
                resolvables,
                interpreters=interpreters,
                platforms=options.platforms,
                cache=options.cache_dir,
                cache_ttl=options.cache_ttl,
                allow_prereleases=resolver_option_builder.prereleases_allowed,
                use_manylinux=options.use_manylinux)

            for resolved_dist in resolveds:
                log('  %s -> %s' %
                    (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity)
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 13
0
 def __init__(self, source_dir, interpreter=None, wheel_dir=None):
     # type: (str, Optional[PythonInterpreter], Optional[str]) -> None
     """Create a wheel from an unpacked source distribution in source_dir."""
     self._source_dir = source_dir
     self._wheel_dir = wheel_dir or safe_mkdtemp()
     self._interpreter = interpreter or PythonInterpreter.get()
Esempio n. 14
0
def main(args=None):
    args = args[:] if args else sys.argv[1:]
    args = [transform_legacy_arg(arg) for arg in args]
    parser = configure_clp()

    try:
        separator = args.index('--')
        args, cmdline = args[:separator], args[separator + 1:]
    except ValueError:
        args, cmdline = args, []

    options, reqs = parser.parse_args(args=args)

    if options.cache_dir:
        pex_warnings.warn(
            'The --cache-dir option is deprecated, use --pex-root instead.')
        if options.pex_root and options.cache_dir != options.pex_root:
            die('Both --cache-dir and --pex-root were passed with conflicting values. '
                'Just set --pex-root.')

    if options.disable_cache:

        def warn_ignore_pex_root(set_via):
            pex_warnings.warn(
                'The pex root has been set via {via} but --disable-cache is also set. '
                'Ignoring {via} and disabling caches.'.format(via=set_via))

        if options.cache_dir:
            warn_ignore_pex_root('--cache-dir')
        elif options.pex_root:
            warn_ignore_pex_root('--pex-root')
        elif os.environ.get('PEX_ROOT'):
            warn_ignore_pex_root('PEX_ROOT')

        pex_root = safe_mkdtemp()
    else:
        pex_root = options.cache_dir or options.pex_root or ENV.PEX_ROOT

    if options.python and options.interpreter_constraint:
        die('The "--python" and "--interpreter-constraint" options cannot be used together.'
            )

    with ENV.patch(PEX_VERBOSE=str(options.verbosity),
                   PEX_ROOT=pex_root) as patched_env:
        with TRACER.timed('Building pex'):
            pex_builder = build_pex(reqs, options, cache=ENV.PEX_ROOT)

        pex_builder.freeze(bytecode_compile=options.compile)
        pex = PEX(pex_builder.path(),
                  interpreter=pex_builder.interpreter,
                  verify_entry_point=options.validate_ep)

        if options.pex_name is not None:
            log('Saving PEX file to %s' % options.pex_name,
                V=options.verbosity)
            tmp_name = options.pex_name + '~'
            safe_delete(tmp_name)
            pex_builder.build(
                tmp_name,
                bytecode_compile=options.compile,
                deterministic_timestamp=not options.use_system_time)
            os.rename(tmp_name, options.pex_name)
        else:
            if not _compatible_with_current_platform(options.platforms):
                log('WARNING: attempting to run PEX with incompatible platforms!',
                    V=1)
                log('Running on platform {} but built for {}'.format(
                    Platform.current(), ', '.join(map(str,
                                                      options.platforms))),
                    V=1)

            log('Running PEX file at %s with args %s' %
                (pex_builder.path(), cmdline),
                V=options.verbosity)
            sys.exit(pex.run(args=list(cmdline), env=patched_env))
Esempio n. 15
0
    def _spawn_from_binary_external(cls, binary):
        def create_interpreter(stdout, check_binary=False):
            identity = stdout.decode("utf-8").strip()
            if not identity:
                raise cls.IdentificationError(
                    "Could not establish identity of {}.".format(binary))
            interpreter = cls(PythonIdentity.decode(identity))
            # We should not need to check this since binary == interpreter.binary should always be
            # true, but historically this could be untrue as noted in `PythonIdentity.get`.
            if check_binary and not os.path.exists(interpreter.binary):
                raise cls.InterpreterNotFound(
                    "Cached interpreter for {} reports a binary of {}, which could not be found"
                    .format(binary, interpreter.binary))
            return interpreter

        # Part of the PythonInterpreter data are environment markers that depend on the current OS
        # release. That data can change when the OS is upgraded but (some of) the installed interpreters
        # remain the same. As such, include the OS in the hash structure for cached interpreters.
        os_digest = hashlib.sha1()
        for os_identifier in platform.release(), platform.version():
            os_digest.update(os_identifier.encode("utf-8"))
        os_hash = os_digest.hexdigest()

        interpreter_cache_dir = os.path.join(ENV.PEX_ROOT, "interpreters")
        os_cache_dir = os.path.join(interpreter_cache_dir, os_hash)
        if os.path.isdir(
                interpreter_cache_dir) and not os.path.isdir(os_cache_dir):
            with TRACER.timed("GCing interpreter cache from prior OS version"):
                safe_rmtree(interpreter_cache_dir)

        interpreter_hash = CacheHelper.hash(binary)

        # Some distributions include more than one copy of the same interpreter via a hard link (e.g.:
        # python3.7 is a hardlink to python3.7m). To ensure a deterministic INTERP-INFO file we must
        # emit a separate INTERP-INFO for each link since INTERP-INFO contains the interpreter path and
        # would otherwise be unstable.
        #
        # See cls._REGEXEN for a related affordance.
        #
        # N.B.: The path for --venv mode interpreters can be quite long; so we just used a fixed
        # length hash of the interpreter binary path to ensure uniqueness and not run afoul of file
        # name length limits.
        path_id = hashlib.sha1(binary.encode("utf-8")).hexdigest()

        cache_dir = os.path.join(os_cache_dir, interpreter_hash, path_id)
        cache_file = os.path.join(cache_dir, cls.INTERP_INFO_FILE)
        if os.path.isfile(cache_file):
            try:
                with open(cache_file, "rb") as fp:
                    return SpawnedJob.completed(
                        create_interpreter(fp.read(), check_binary=True))
            except (IOError, OSError, cls.Error, PythonIdentity.Error):
                safe_rmtree(cache_dir)
                return cls._spawn_from_binary_external(binary)
        else:
            pythonpath = third_party.expose(["pex"])
            cmd, env = cls._create_isolated_cmd(
                binary,
                args=[
                    "-c",
                    dedent("""\
                        import os
                        import sys

                        from pex.common import atomic_directory, safe_open
                        from pex.interpreter import PythonIdentity


                        encoded_identity = PythonIdentity.get(binary={binary!r}).encode()
                        sys.stdout.write(encoded_identity)
                        with atomic_directory({cache_dir!r}, exclusive=False) as cache_dir:
                            if cache_dir:
                                with safe_open(os.path.join(cache_dir, {info_file!r}), 'w') as fp:
                                    fp.write(encoded_identity)
                        """.format(binary=binary,
                                   cache_dir=cache_dir,
                                   info_file=cls.INTERP_INFO_FILE)),
                ],
                pythonpath=pythonpath,
            )
            # Ensure the `.` implicit PYTHONPATH entry contains no Pex code (of a different version)
            # that might interfere with the behavior we expect in the script above.
            cwd = safe_mkdtemp()
            process = Executor.open_process(cmd,
                                            env=env,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE,
                                            cwd=cwd)
            job = Job(command=cmd,
                      process=process,
                      finalizer=lambda: safe_rmtree(cwd))
            return SpawnedJob.stdout(job, result_func=create_interpreter)
Esempio n. 16
0
File: pex.py Progetto: jsirois/pex
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    def to_python_interpreter(full_path_or_basename):
      if os.path.exists(full_path_or_basename):
        return PythonInterpreter.from_binary(full_path_or_basename)
      else:
        interpreter = PythonInterpreter.from_env(full_path_or_basename)
        if interpreter is None:
          die('Failed to find interpreter: %s' % full_path_or_basename)
        return interpreter

    interpreters = [to_python_interpreter(interp) for interp in options.python or [sys.executable]]

  if options.interpreter_constraint:
    # NB: options.python and interpreter constraints cannot be used together, so this will not
    # affect usages of the interpreter(s) specified by the "--python" command line flag.
    constraints = options.interpreter_constraint
    validate_constraints(constraints)
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
      rc_variables = Variables.from_rc(rc=options.rc_file)
      pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
    else:
      pex_python_path = ""
    interpreters = find_compatible_interpreters(pex_python_path, constraints)

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = min(interpreters)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  def walk_and_do(fn, src_dir):
    src_dir = os.path.normpath(src_dir)
    for root, dirs, files in os.walk(src_dir):
      for f in files:
        src_file_path = os.path.join(root, f)
        dst_path = os.path.relpath(src_file_path, src_dir)
        fn(src_file_path, dst_path)

  for directory in options.sources_directory:
    walk_and_do(pex_builder.add_source, directory)

  for directory in options.resources_directory:
    walk_and_do(pex_builder.add_resource, directory)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.emit_warnings = options.emit_warnings
  pex_info.inherit_path = options.inherit_path
  if options.interpreter_constraint:
    for ic in options.interpreter_constraint:
      pex_builder.add_interpreter_constraint(ic)

  resolvables = resolvables_from_iterable(args, resolver_option_builder, interpreter=interpreter)

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt,
                                              builder=resolver_option_builder,
                                              interpreter=interpreter))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt,
                                    builder=resolver_option_builder,
                                    interpreter=interpreter):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platforms,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed,
                                use_manylinux=options.use_manylinux)

      for resolved_dist in resolveds:
        log('  %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),
            V=options.verbosity)
        pex_builder.add_distribution(resolved_dist.distribution)
        pex_builder.add_requirement(resolved_dist.requirement)
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Esempio n. 17
0
    def _extract_sdist(
        pex,  # type: PEX
        dest_dir,  # type: str
    ):
        # type: (...) -> None
        chroot = safe_mkdtemp()
        src = os.path.join(chroot, "src")
        safe_mkdir(src)
        excludes = ["__main__.py", "PEX-INFO"]
        if zipfile.is_zipfile(pex.path()):
            PEXEnvironment(pex.path()).explode_code(src, exclude=excludes)
        else:
            shutil.copytree(pex.path(), src, ignore=lambda _dir, _names: excludes)

        pex_info = pex.pex_info()

        name, _ = os.path.splitext(os.path.basename(pex.path()))
        version = "0.0.0+{}".format(pex_info.code_hash)
        zip_safe = pex_info.zip_safe
        py_modules = [os.path.splitext(f)[0] for f in os.listdir(src) if f.endswith(".py")]
        packages = [
            os.path.relpath(os.path.join(root, d), src).replace(os.sep, ".")
            for root, dirs, _ in os.walk(src)
            for d in dirs
        ]
        install_requires = [str(req) for req in pex_info.requirements]

        python_requires = None
        if len(pex_info.interpreter_constraints) == 1:
            python_requires = str(
                PythonIdentity.parse_requirement(pex_info.interpreter_constraints[0]).specifier
            )
        elif pex_info.interpreter_constraints:
            pex_warnings.warn(
                "Omitting `python_requires` for {name} sdist since {pex} has multiple "
                "interpreter constraints:\n{interpreter_constraints}".format(
                    name=name,
                    pex=os.path.normpath(pex.path()),
                    interpreter_constraints="\n".join(
                        "{index}.) {constraint}".format(index=index, constraint=constraint)
                        for index, constraint in enumerate(
                            pex_info.interpreter_constraints, start=1
                        )
                    ),
                )
            )

        entry_points = []
        if pex_info.entry_point and ":" in pex_info.entry_point:
            entry_points = [(name, pex_info.entry_point)]

        with open(os.path.join(chroot, "setup.cfg"), "w") as fp:
            fp.write(
                dedent(
                    """\
                    [metadata]
                    name = {name}
                    version = {version}

                    [options]
                    zip_safe = {zip_safe}
                    {py_modules}
                    {packages}
                    package_dir =
                        =src
                    include_package_data = True

                    {python_requires}
                    {install_requires}

                    [options.entry_points]
                    {entry_points}
                    """
                ).format(
                    name=name,
                    version=version,
                    zip_safe=zip_safe,
                    py_modules=(
                        "py_modules =\n  {}".format("\n  ".join(py_modules)) if py_modules else ""
                    ),
                    packages=(
                        "packages = \n  {}".format("\n  ".join(packages)) if packages else ""
                    ),
                    install_requires=(
                        "install_requires =\n  {}".format("\n  ".join(install_requires))
                        if install_requires
                        else ""
                    ),
                    python_requires=(
                        "python_requires = {}".format(python_requires) if python_requires else ""
                    ),
                    entry_points=(
                        "console_scripts =\n  {}".format(
                            "\n  ".join(
                                "{} = {}".format(name, entry_point)
                                for name, entry_point in entry_points
                            )
                        )
                        if entry_points
                        else ""
                    ),
                )
            )

        with open(os.path.join(chroot, "MANIFEST.in"), "w") as fp:
            fp.write("recursive-include src *")

        with open(os.path.join(chroot, "setup.py"), "w") as fp:
            fp.write("import setuptools; setuptools.setup()")

        spawn_python_job(
            args=["setup.py", "sdist", "--dist-dir", dest_dir],
            interpreter=pex.interpreter,
            expose=["setuptools"],
            cwd=chroot,
        ).wait()
Esempio n. 18
0
File: pex.py Progetto: Yasumoto/pex
def build_pex(args, options):
  interpreter = interpreter_from_options(options)

  pex_builder = PEXBuilder(
      path=safe_mkdtemp(),
      interpreter=interpreter,
  )

  pex_info = pex_builder.info

  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  installer = WheelInstaller if options.use_wheel else EggInstaller

  interpreter = interpreter_from_options(options)

  fetchers = [Fetcher(options.repos)]

  if options.pypi:
    fetchers.append(PyPIFetcher())

  if options.indices:
    fetchers.extend(PyPIFetcher(index) for index in options.indices)

  translator = translator_from_options(options)

  if options.use_wheel:
    precedence = (WheelPackage, EggPackage, SourcePackage)
  else:
    precedence = (EggPackage, SourcePackage)

  requirements = options.requirements[:]

  if options.source_dirs:
    temporary_package_root = safe_mkdtemp()

    for source_dir in options.source_dirs:
      try:
        sdist = Packager(source_dir).sdist()
      except installer.Error:
        die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL)

      # record the requirement information
      sdist_pkg = Package.from_href(sdist)
      requirements.append('%s==%s' % (sdist_pkg.name, sdist_pkg.raw_version))

      # copy the source distribution
      shutil.copyfile(sdist, os.path.join(temporary_package_root, os.path.basename(sdist)))

    # Tell pex where to find the packages
    fetchers.append(Fetcher([temporary_package_root]))

  with TRACER.timed('Resolving distributions'):
    resolveds = requirement_resolver(
        requirements,
        fetchers=fetchers,
        translator=translator,
        interpreter=interpreter,
        platform=options.platform,
        precedence=precedence,
        cache=options.cache_dir,
        cache_ttl=options.cache_ttl)

  for pkg in resolveds:
    log('  %s' % pkg, v=options.verbosity)
    pex_builder.add_distribution(pkg)
    pex_builder.add_requirement(pkg.as_requirement())

  if options.entry_point is not None:
    log('Setting entry point to %s' % options.entry_point, v=options.verbosity)
    pex_builder.info.entry_point = options.entry_point
  else:
    log('Creating environment PEX.', v=options.verbosity)

  return pex_builder
Esempio n. 19
0
def build_pex(args, options):
    interpreter = interpreter_from_options(options)

    pex_builder = PEXBuilder(
        path=safe_mkdtemp(),
        interpreter=interpreter,
    )

    pex_info = pex_builder.info

    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    installer = WheelInstaller if options.use_wheel else EggInstaller

    interpreter = interpreter_from_options(options)

    fetchers = [Fetcher(options.repos)]

    if options.pypi:
        fetchers.append(PyPIFetcher())

    if options.indices:
        fetchers.extend(PyPIFetcher(index) for index in options.indices)

    translator = translator_from_options(options)

    if options.use_wheel:
        precedence = (WheelPackage, EggPackage, SourcePackage)
    else:
        precedence = (EggPackage, SourcePackage)

    requirements = options.requirements[:]

    if options.source_dirs:
        temporary_package_root = safe_mkdtemp()

        for source_dir in options.source_dirs:
            try:
                sdist = Packager(source_dir).sdist()
            except installer.Error:
                die('Failed to run installer for %s' % source_dir,
                    CANNOT_DISTILL)

            # record the requirement information
            sdist_pkg = Package.from_href(sdist)
            requirements.append('%s==%s' %
                                (sdist_pkg.name, sdist_pkg.raw_version))

            # copy the source distribution
            shutil.copyfile(
                sdist,
                os.path.join(temporary_package_root, os.path.basename(sdist)))

        # Tell pex where to find the packages
        fetchers.append(Fetcher([temporary_package_root]))

    with TRACER.timed('Resolving distributions'):
        resolveds = requirement_resolver(requirements,
                                         fetchers=fetchers,
                                         translator=translator,
                                         interpreter=interpreter,
                                         platform=options.platform,
                                         precedence=precedence,
                                         cache=options.cache_dir,
                                         cache_ttl=options.cache_ttl)

    for pkg in resolveds:
        log('  %s' % pkg, v=options.verbosity)
        pex_builder.add_distribution(pkg)
        pex_builder.add_requirement(pkg.as_requirement())

    if options.entry_point is not None:
        log('Setting entry point to %s' % options.entry_point,
            v=options.verbosity)
        pex_builder.info.entry_point = options.entry_point
    else:
        log('Creating environment PEX.', v=options.verbosity)

    return pex_builder
Esempio n. 20
0
    def spawn_download_distributions(
            self,
            download_dir,  # type: str
            requirements=None,  # type: Optional[Iterable[str]]
            requirement_files=None,  # type: Optional[Iterable[str]]
            constraint_files=None,  # type: Optional[Iterable[str]]
            allow_prereleases=False,  # type: bool
            transitive=True,  # type: bool
            target=None,  # type: Optional[DistributionTarget]
            package_index_configuration=None,  # type: Optional[PackageIndexConfiguration]
            cache=None,  # type: Optional[str]
            build=True,  # type: bool
            use_wheel=True,  # type: bool
    ):
        # type: (...) -> Job
        target = target or DistributionTarget.current()

        platform, manylinux = target.get_platform()
        if not use_wheel:
            if not build:
                raise ValueError(
                    "Cannot both ignore wheels (use_wheel=False) and refrain from building "
                    "distributions (build=False).")
            elif target.is_foreign:
                raise ValueError(
                    "Cannot ignore wheels (use_wheel=False) when resolving for a foreign "
                    "platform: {}".format(platform))

        download_cmd = ["download", "--dest", download_dir]
        if target.is_foreign:
            # We're either resolving for a different host / platform or a different interpreter for
            # the current platform that we have no access to; so we need to let pip know and not
            # otherwise pickup platform info from the interpreter we execute pip with.
            download_cmd.extend(
                self._iter_platform_args(
                    platform=platform.platform,
                    impl=platform.impl,
                    version=platform.version,
                    abi=platform.abi,
                    manylinux=manylinux,
                ))

        if target.is_foreign or not build:
            download_cmd.extend(["--only-binary", ":all:"])

        if not use_wheel:
            download_cmd.extend(["--no-binary", ":all:"])

        if allow_prereleases:
            download_cmd.append("--pre")

        if not transitive:
            download_cmd.append("--no-deps")

        if requirement_files:
            for requirement_file in requirement_files:
                download_cmd.extend(["--requirement", requirement_file])

        if constraint_files:
            for constraint_file in constraint_files:
                download_cmd.extend(["--constraint", constraint_file])

        if requirements:
            download_cmd.extend(requirements)

        # The Pip 2020 resolver hides useful dependency conflict information in stdout interspersed
        # with other information we want to suppress. We jump though some hoops here to get at that
        # information and surface it on stderr. See: https://github.com/pypa/pip/issues/9420.
        log = None
        if (self._calculate_resolver_version(
                package_index_configuration=package_index_configuration) ==
                ResolverVersion.PIP_2020):
            log = os.path.join(safe_mkdtemp(), "pip.log")
            download_cmd = ["--log", log] + download_cmd

        command, process = self._spawn_pip_isolated(
            download_cmd,
            package_index_configuration=package_index_configuration,
            cache=cache,
            interpreter=target.get_interpreter(),
        )
        return self._Issue9420Job(command, process, log) if log else Job(
            command, process)
Esempio n. 21
0
  def resolve_distributions(self, ignore_errors=False):
    # This method has four stages:
    # 1. Resolve sdists and wheels.
    # 2. Build local projects and sdists.
    # 3. Install wheels in individual chroots.
    # 4. Calculate the final resolved requirements.
    #
    # You'd think we might be able to just pip install all the requirements, but pexes can be
    # multi-platform / multi-interpreter, in which case only a subset of distributions resolved into
    # the PEX should be activated for the runtime interpreter. Sometimes there are platform specific
    # wheels and sometimes python version specific dists (backports being the common case). As such,
    # we need to be able to add each resolved distribution to the `sys.path` individually
    # (`PEXEnvironment` handles this selective activation at runtime). Since pip install only
    # accepts a single location to install all resolved dists, that won't work.
    #
    # This means we need to separately resolve all distributions, then install each in their own
    # chroot. To do this we use `pip download` for the resolve and download of all needed
    # distributions and then `pip install` to install each distribution in its own chroot.
    #
    # As a complicating factor, the runtime activation scheme relies on PEP 425 tags; i.e.: wheel
    # names. Some requirements are only available or applicable in source form - either via sdist,
    # VCS URL or local projects. As such we need to insert a `pip wheel` step to generate wheels for
    # all requirements resolved in source form via `pip download` / inspection of requirements to
    # discover those that are local directories (local setup.py or pyproject.toml python projects).
    #
    # Finally, we must calculate the pinned requirement corresponding to each distribution we
    # resolved along with any environment markers that control which runtime environments the
    # requirement should be activated in.

    if not self._requirements and not self._requirement_files:
      # Nothing to resolve.
      return []

    workspace = safe_mkdtemp()
    cache = self._cache or workspace

    resolved_dists_dir = os.path.join(workspace, 'resolved_dists')
    spawn_resolve = functools.partial(self._spawn_resolve, resolved_dists_dir)
    to_resolve = self._targets

    built_wheels_dir = os.path.join(cache, 'built_wheels')
    spawn_wheel_build = functools.partial(self._spawn_wheel_build, built_wheels_dir)
    to_build = list(self._iter_local_projects())

    installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
    spawn_install = functools.partial(self._spawn_install, installed_wheels_dir)
    to_install = []

    to_calculate_requirements_for = []

    # 1. Resolve sdists and wheels.
    with TRACER.timed('Resolving for:\n  '.format('\n  '.join(map(str, to_resolve)))):
      for resolve_result in self._run_parallel(inputs=to_resolve,
                                               spawn_func=spawn_resolve,
                                               raise_type=Unsatisfiable):
        to_build.extend(resolve_result.build_requests())
        to_install.extend(resolve_result.install_requests())

    if not any((to_build, to_install)):
      # Nothing to build or install.
      return []

    # 2. Build local projects and sdists.
    if to_build:
      with TRACER.timed('Building distributions for:\n  {}'
                        .format('\n  '.join(map(str, to_build)))):

        build_requests, install_requests = self._categorize_build_requests(
          build_requests=to_build,
          dist_root=built_wheels_dir
        )
        to_install.extend(install_requests)

        for build_result in self._run_parallel(inputs=build_requests,
                                               spawn_func=spawn_wheel_build,
                                               raise_type=Untranslateable):
          to_install.extend(build_result.finalize_build())

    # 3. Install wheels in individual chroots.

    # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
    # downloaded / built for each interpreter or platform.
    install_requests_by_wheel_file = OrderedDict()
    for install_request in to_install:
      install_requests = install_requests_by_wheel_file.setdefault(install_request.wheel_file, [])
      install_requests.append(install_request)

    representative_install_requests = [
      requests[0] for requests in install_requests_by_wheel_file.values()
    ]

    def add_requirements_requests(install_result):
      install_requests = install_requests_by_wheel_file[install_result.request.wheel_file]
      to_calculate_requirements_for.extend(install_result.finalize_install(install_requests))

    with TRACER.timed('Installing:\n  {}'
                      .format('\n  '.join(map(str, representative_install_requests)))):

      install_requests, install_results = self._categorize_install_requests(
        install_requests=representative_install_requests,
        installed_wheels_dir=installed_wheels_dir
      )
      for install_result in install_results:
        add_requirements_requests(install_result)

      for install_result in self._run_parallel(inputs=install_requests,
                                               spawn_func=spawn_install,
                                               raise_type=Untranslateable):
        add_requirements_requests(install_result)

    # 4. Calculate the final resolved requirements.
    with TRACER.timed('Calculating resolved requirements for:\n  {}'
                      .format('\n  '.join(map(str, to_calculate_requirements_for)))):
      distribution_requirements = DistributionRequirements.merged(
        self._run_parallel(
          inputs=to_calculate_requirements_for,
          spawn_func=DistributionRequirements.Request.spawn_calculation,
          raise_type=Untranslateable
        )
      )

    resolved_distributions = OrderedSet()
    for requirements_request in to_calculate_requirements_for:
      for distribution in requirements_request.distributions:
        resolved_distributions.add(
          ResolvedDistribution(
            requirement=distribution_requirements.to_requirement(distribution),
            distribution=distribution
          )
        )

    if not ignore_errors and self._transitive:
      self._check_resolve(resolved_distributions)
    return resolved_distributions
Esempio n. 22
0
File: pex.py Progetto: jneuff/pex
def main(args=None):
    args = args[:] if args else sys.argv[1:]
    args = [transform_legacy_arg(arg) for arg in args]
    parser = configure_clp()

    try:
        separator = args.index("--")
        args, cmdline = args[:separator], args[separator + 1 :]
    except ValueError:
        args, cmdline = args, []

    options = parser.parse_args(args=args)

    # Ensure the TMPDIR is an absolute path (So subprocesses that change CWD can find it) and
    # that it exists.
    tmpdir = os.path.realpath(options.tmpdir)
    if not os.path.exists(tmpdir):
        die("The specified --tmpdir does not exist: {}".format(tmpdir))
    if not os.path.isdir(tmpdir):
        die("The specified --tmpdir is not a directory: {}".format(tmpdir))
    tempfile.tempdir = os.environ["TMPDIR"] = tmpdir

    if options.cache_dir:
        pex_warnings.warn("The --cache-dir option is deprecated, use --pex-root instead.")
        if options.pex_root and options.cache_dir != options.pex_root:
            die(
                "Both --cache-dir and --pex-root were passed with conflicting values. "
                "Just set --pex-root."
            )

    if options.disable_cache:

        def warn_ignore_pex_root(set_via):
            pex_warnings.warn(
                "The pex root has been set via {via} but --disable-cache is also set. "
                "Ignoring {via} and disabling caches.".format(via=set_via)
            )

        if options.cache_dir:
            warn_ignore_pex_root("--cache-dir")
        elif options.pex_root:
            warn_ignore_pex_root("--pex-root")
        elif os.environ.get("PEX_ROOT"):
            warn_ignore_pex_root("PEX_ROOT")

        pex_root = safe_mkdtemp()
    else:
        pex_root = options.cache_dir or options.pex_root or ENV.PEX_ROOT

    if options.python and options.interpreter_constraint:
        die('The "--python" and "--interpreter-constraint" options cannot be used together.')

    with ENV.patch(
        PEX_VERBOSE=str(options.verbosity), PEX_ROOT=pex_root, TMPDIR=tmpdir
    ) as patched_env:
        with TRACER.timed("Building pex"):
            pex_builder = build_pex(options.requirements, options, cache=ENV.PEX_ROOT)

        pex_builder.freeze(bytecode_compile=options.compile)
        interpreter = pex_builder.interpreter
        pex = PEX(
            pex_builder.path(), interpreter=interpreter, verify_entry_point=options.validate_ep
        )

        if options.pex_name is not None:
            log("Saving PEX file to %s" % options.pex_name, V=options.verbosity)
            tmp_name = options.pex_name + "~"
            safe_delete(tmp_name)
            pex_builder.build(
                tmp_name,
                bytecode_compile=options.compile,
                deterministic_timestamp=not options.use_system_time,
            )
            os.rename(tmp_name, options.pex_name)
        else:
            if not _compatible_with_current_platform(interpreter, options.platforms):
                log("WARNING: attempting to run PEX with incompatible platforms!", V=1)
                log(
                    "Running on platform {} but built for {}".format(
                        interpreter.platform, ", ".join(map(str, options.platforms))
                    ),
                    V=1,
                )

            log(
                "Running PEX file at %s with args %s" % (pex_builder.path(), cmdline),
                V=options.verbosity,
            )
            sys.exit(pex.run(args=list(cmdline), env=patched_env))
Esempio n. 23
0
File: pex.py Progetto: jneuff/pex
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = options.python_path  # If None, this will result in using $PATH.
    # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag.
    if pex_python_path is None and (options.rc_file or not ENV.PEX_IGNORE_RCFILES):
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interp = PythonInterpreter.from_env(full_path_or_basename)
                    if interp is None:
                        die("Failed to find interpreter: %s" % full_path_or_basename)
                    return interp

            interpreters = [to_python_interpreter(interp) for interp in options.python]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(
                    iter_compatible_interpreters(
                        path=pex_python_path, interpreter_constraints=constraints
                    )
                )
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message("Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
            "Searching for local interpreters matching {}".format(", ".join(map(str, platforms)))
        ):
            candidate_interpreters = OrderedSet(iter_compatible_interpreters(path=pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms
                )
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log(
                            "Resolved {} for platform {}".format(
                                candidate_interpreter, resolved_platform
                            )
                        )
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform" if len(platforms) == 1 else "these platforms",
                )
            )

    interpreter = (
        PythonInterpreter.latest_release_of_min_compatible_version(interpreters)
        if interpreters
        else None
    )

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

    if options.resources_directory:
        pex_warnings.warn(
            "The `-R/--resources-directory` option is deprecated. Resources should be added via "
            "`-D/--sources-directory` instead."
        )

    for directory in OrderedSet(options.sources_directory + options.resources_directory):
        src_dir = os.path.normpath(directory)
        for root, _, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                pex_builder.add_source(src_file_path, dst_path)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = InheritPath.for_value(options.inherit_path)
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    indexes = compute_indexes(options)

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=list(platforms),
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors,
            )

            for resolved_dist in resolveds:
                log(
                    "  %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity,
                )
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(str(e))

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 24
0
def p537_resolve_cache():
    return safe_mkdtemp()
Esempio n. 25
0
 def __init__(self, source_dir, interpreter=None, install_dir=None):
   """Create an installer from an unpacked source distribution in source_dir."""
   self._source_dir = source_dir
   self._install_tmp = install_dir or safe_mkdtemp()
   self._interpreter = interpreter or PythonInterpreter.get()
   self._installed = None
Esempio n. 26
0
def resolve_multi(requirements=None,
                  requirement_files=None,
                  constraint_files=None,
                  allow_prereleases=False,
                  transitive=True,
                  interpreters=None,
                  platforms=None,
                  indexes=None,
                  find_links=None,
                  cache=None,
                  build=True,
                  use_wheel=True,
                  compile=False,
                  manylinux=None,
                  max_parallel_jobs=None,
                  ignore_errors=False):
    """Resolves all distributions needed to meet requirements for multiple distribution targets.

  The resulting distributions are installed in individual chroots that can be independently added
  to `sys.path`

  :keyword requirements: A sequence of requirement strings.
  :type requirements: list of str
  :keyword requirement_files: A sequence of requirement file paths.
  :type requirement_files: list of str
  :keyword constraint_files: A sequence of constraint file paths.
  :type constraint_files: list of str
  :keyword bool allow_prereleases: Whether to include pre-release and development versions when
    resolving requirements. Defaults to ``False``, but any requirements that explicitly request
    prerelease or development versions will override this setting.
  :keyword bool transitive: Whether to resolve transitive dependencies of requirements.
    Defaults to ``True``.
  :keyword interpreters: The interpreters to use for building distributions and for testing
    distribution compatibility. Defaults to the current interpreter.
  :type interpreters: list of :class:`pex.interpreter.PythonInterpreter`
  :keyword platforms: An iterable of PEP425-compatible platform strings to resolve distributions
    for. If ``None`` (the default) or an empty iterable, use the platforms of the given
    interpreters.
  :type platforms: list of str
  :keyword indexes: A list of urls or paths pointing to PEP 503 compliant repositories to search for
    distributions. Defaults to ``None`` which indicates to use the default pypi index. To turn off
    use of all indexes, pass an empty list.
  :type indexes: list of str
  :keyword find_links: A list or URLs, paths to local html files or directory paths. If URLs or
    local html file paths, these are parsed for links to distributions. If a local directory path,
    its listing is used to discover distributons.
  :type find_links: list of str
  :keyword str cache: A directory path to use to cache distributions locally.
  :keyword bool build: Whether to allow building source distributions when no wheel is found.
    Defaults to ``True``.
  :keyword bool use_wheel: Whether to allow resolution of pre-built wheel distributions.
    Defaults to ``True``.
  :keyword bool compile: Whether to pre-compile resolved distribution python sources.
    Defaults to ``False``.
  :keyword str manylinux: The upper bound manylinux standard to support when targeting foreign linux
    platforms. Defaults to ``None``.
  :keyword int max_parallel_jobs: The maximum number of parallel jobs to use when resolving,
    building and installing distributions in a resolve. Defaults to the number of CPUs available.
  :keyword bool ignore_errors: Whether to ignore resolution solver errors. Defaults to ``False``.
  :returns: List of :class:`ResolvedDistribution` instances meeting ``requirements``.
  :raises Unsatisfiable: If ``requirements`` is not transitively satisfiable.
  :raises Untranslateable: If no compatible distributions could be acquired for
    a particular requirement.
  """

    # A resolve happens in four stages broken into two phases:
    # 1. Download phase: resolves sdists and wheels in a single operation per distribution target.
    # 2. Install phase:
    #   1. Build local projects and sdists.
    #   2. Install wheels in individual chroots.
    #   3. Calculate the final resolved requirements.
    #
    # You'd think we might be able to just pip install all the requirements, but pexes can be
    # multi-platform / multi-interpreter, in which case only a subset of distributions resolved into
    # the PEX should be activated for the runtime interpreter. Sometimes there are platform specific
    # wheels and sometimes python version specific dists (backports being the common case). As such,
    # we need to be able to add each resolved distribution to the `sys.path` individually
    # (`PEXEnvironment` handles this selective activation at runtime). Since pip install only
    # accepts a single location to install all resolved dists, that won't work.
    #
    # This means we need to separately resolve all distributions, then install each in their own
    # chroot. To do this we use `pip download` for the resolve and download of all needed
    # distributions and then `pip install` to install each distribution in its own chroot.
    #
    # As a complicating factor, the runtime activation scheme relies on PEP 425 tags; i.e.: wheel
    # names. Some requirements are only available or applicable in source form - either via sdist,
    # VCS URL or local projects. As such we need to insert a `pip wheel` step to generate wheels for
    # all requirements resolved in source form via `pip download` / inspection of requirements to
    # discover those that are local directories (local setup.py or pyproject.toml python projects).
    #
    # Finally, we must calculate the pinned requirement corresponding to each distribution we
    # resolved along with any environment markers that control which runtime environments the
    # requirement should be activated in.

    workspace = safe_mkdtemp()

    build_requests, download_results = _download_internal(
        interpreters=interpreters,
        platforms=platforms,
        requirements=requirements,
        requirement_files=requirement_files,
        constraint_files=constraint_files,
        allow_prereleases=allow_prereleases,
        transitive=transitive,
        indexes=indexes,
        find_links=find_links,
        cache=cache,
        build=build,
        use_wheel=use_wheel,
        manylinux=manylinux,
        dest=workspace,
        max_parallel_jobs=max_parallel_jobs)

    install_requests = []
    if download_results is not None:
        for download_result in download_results:
            build_requests.extend(download_result.build_requests())
            install_requests.extend(download_result.install_requests())

    build_and_install_request = BuildAndInstallRequest(
        build_requests=build_requests,
        install_requests=install_requests,
        indexes=indexes,
        find_links=find_links,
        cache=cache,
        compile=compile)

    ignore_errors = ignore_errors or not transitive
    return list(
        build_and_install_request.install_distributions(
            ignore_errors=ignore_errors,
            workspace=workspace,
            max_parallel_jobs=max_parallel_jobs))
Esempio n. 27
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    interpreters = [
      get_interpreter(interpreter,
                      options.interpreter_cache_dir,
                      options.repos,
                      options.use_wheel)
      for interpreter in options.python or [None]
    ]

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = _lowest_version_interpreter(interpreters)
  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platform,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed)

      for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
Esempio n. 28
0
File: pex.py Progetto: ttreptow/pex
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):
        interpreters = [
            get_interpreter(interpreter, options.interpreter_cache_dir,
                            options.repos, options.use_wheel)
            for interpreter in options.python or [None]
        ]

    if options.interpreter_constraint:
        # NB: options.python and interpreter constraints cannot be used together, so this will not
        # affect usages of the interpreter(s) specified by the "--python" command line flag.
        constraints = options.interpreter_constraint
        validate_constraints(constraints)
        rc_variables = Variables.from_rc(rc=options.rc_file)
        pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
        interpreters = find_compatible_interpreters(pex_python_path,
                                                    constraints)

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters)

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(
                resolvables,
                interpreters=interpreters,
                platforms=options.platform,
                cache=options.cache_dir,
                cache_ttl=options.cache_ttl,
                allow_prereleases=resolver_option_builder.prereleases_allowed)

            for dist in resolveds:
                log('  %s' % dist, v=options.verbosity)
                pex_builder.add_distribution(dist)
                pex_builder.add_requirement(dist.as_requirement())
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 29
0
    def _extract(
        self,
        pex,  # type: PEX
        options,  # type: Namespace
    ):
        # type: (...) -> Result
        if not options.serve and not options.dest_dir:
            return Error("Specify a --find-links directory to extract wheels to.")

        dest_dir = (
            os.path.abspath(os.path.expanduser(options.dest_dir))
            if options.dest_dir
            else safe_mkdtemp()
        )
        safe_mkdir(dest_dir)

        if options.sources:
            self._extract_sdist(pex, dest_dir)

        def spawn_extract(distribution):
            # type: (Distribution) -> SpawnedJob[Text]
            job = spawn_python_job(
                args=["-m", "wheel", "pack", "--dest-dir", dest_dir, distribution.location],
                interpreter=pex.interpreter,
                expose=["wheel"],
                stdout=subprocess.PIPE,
            )
            return SpawnedJob.stdout(
                job, result_func=lambda out: "{}: {}".format(distribution, out.decode())
            )

        with self._distributions_output(pex, options) as (distributions, output):
            errors = []
            for result in execute_parallel(distributions, spawn_extract, error_handler=Retain()):
                if isinstance(result, tuple):
                    distribution, error = result
                    errors.append(distribution)
                    output.write(
                        "Failed to build a wheel for {distribution}: {error}\n".format(
                            distribution=distribution, error=error
                        )
                    )
                else:
                    output.write(result)
            if errors:
                return Error(
                    "Failed to build wheels for {count} {distributions}.".format(
                        count=len(errors), distributions=pluralize(errors, "distribution")
                    )
                )

        if not options.serve:
            return Ok()

        repo = FindLinksRepo.serve(
            interpreter=pex.interpreter, port=options.port, directory=dest_dir
        )
        output.write(
            "Serving find-links repo of {pex} via {find_links} at http://localhost:{port}\n".format(
                pex=os.path.normpath(pex.path()), find_links=dest_dir, port=repo.port
            )
        )
        if options.pid_file:
            with safe_open(options.pid_file, "w") as fp:
                fp.write("{}:{}".format(repo.pid, repo.port))
        try:
            return Result(exit_code=repo.join(), message=" ".join(repo.cmd))
        except KeyboardInterrupt:
            repo.kill()
            return Ok("Shut down server for find links repo at {}.".format(dest_dir))
Esempio n. 30
0
def test_issues_789_demo(pex_project_dir):
  tmpdir = safe_mkdtemp()

  # 1. Imagine we've pre-resolved the requirements needed in our wheel house.
  requirements = [
    'ansicolors',
    'isort',
    'setuptools'  # N.B.: isort doesn't declare its setuptools dependency.
  ]

  wheelhouse = os.path.join(tmpdir, 'wheelhouse')
  get_pip().spawn_download_distributions(
    download_dir=wheelhouse,
    requirements=requirements
  ).wait()

  # 2. Also imagine this configuration is passed to a tool (PEX or a wrapper as in this test
  # example) via the CLI or other configuration data sources. For example, Pants has a `PythonSetup`
  # that combines with BUILD target data to get you this sort of configuration info outside pex.
  resolver_settings = dict(
    indexes=[],  # Turn off pypi.
    find_links=[wheelhouse],  # Use our wheel house.
    build=False,  # Use only pre-built wheels.
  )

  # 3. That same configuration was used to build a standard pex:
  resolver_args = []
  if len(resolver_settings['find_links']) == 0:
    resolver_args.append('--no-index')
  else:
    for index in resolver_settings['indexes']:
      resolver_args.extend(['--index', index])

  for repo in resolver_settings['find_links']:
    resolver_args.extend(['--find-links', repo])

  resolver_args.append('--build' if resolver_settings['build'] else '--no-build')

  project_code_dir = os.path.join(tmpdir, 'project_code_dir')
  with safe_open(os.path.join(project_code_dir, 'colorized_isort.py'), 'w') as fp:
    fp.write(dedent("""\
      import colors
      import os
      import subprocess
      import sys


      def run():
        env = os.environ.copy()
        env.update(PEX_MODULE='isort')
        isort_process = subprocess.Popen(
          sys.argv,
          env=env,
          stdout = subprocess.PIPE,
          stderr = subprocess.PIPE
        )
        stdout, stderr = isort_process.communicate()
        print(colors.green(stdout.decode('utf-8')))
        print(colors.red(stderr.decode('utf-8')))
        sys.exit(isort_process.returncode)
    """))

  colorized_isort_pex = os.path.join(tmpdir, 'colorized_isort.pex')
  args = [
    '--sources-directory', project_code_dir,
    '--entry-point', 'colorized_isort:run',
    '--output-file', colorized_isort_pex
  ]
  result = run_pex_command(args + resolver_args + requirements)
  result.assert_success()

  # 4. Now the tool builds a "dehydrated" PEX using the standard pex + resolve settings as the
  # template.
  ptex_cache = os.path.join(tmpdir, '.ptex')

  colorized_isort_pex_info = PexInfo.from_pex(colorized_isort_pex)
  colorized_isort_pex_info.pex_root = ptex_cache

  # Force the standard pex to extract its code. An external tool like Pants would already know the
  # orignal source code file paths, but we need to discover here.
  colorized_isort_pex_code_dir = os.path.join(
    colorized_isort_pex_info.zip_unsafe_cache,
    colorized_isort_pex_info.code_hash
  )
  env = os.environ.copy()
  env.update(PEX_ROOT=ptex_cache, PEX_INTERPRETER='1', PEX_FORCE_LOCAL='1')
  subprocess.check_call([colorized_isort_pex, '-c', ''], env=env)

  colorized_isort_ptex_code_dir = os.path.join(tmpdir, 'colorized_isort_ptex_code_dir')
  safe_mkdir(colorized_isort_ptex_code_dir)

  code = []
  for root, dirs, files in os.walk(colorized_isort_pex_code_dir):
    rel_root = os.path.relpath(root, colorized_isort_pex_code_dir)
    for f in files:
      # Don't ship compiled python from the code extract above, the target interpreter will not
      # match ours in general.
      if f.endswith('.pyc'):
        continue
      rel_path = os.path.normpath(os.path.join(rel_root, f))
      # The root __main__.py is special for any zipapp including pex, let it write its own
      # __main__.py bootstrap. Similarly. PEX-INFO is special to pex and we want the PEX-INFO for
      # The ptex pex, not the pex being ptexed.
      if rel_path in ('__main__.py', PexInfo.PATH):
        continue
      os.symlink(os.path.join(root, f), os.path.join(colorized_isort_ptex_code_dir, rel_path))
      code.append(rel_path)

  ptex_code_dir = os.path.join(tmpdir, 'ptex_code_dir')

  ptex_info = dict(code=code, resolver_settings=resolver_settings)
  with safe_open(os.path.join(ptex_code_dir, 'PTEX-INFO'), 'w') as fp:
    json.dump(ptex_info, fp)

  with safe_open(os.path.join(ptex_code_dir, 'IPEX-INFO'), 'w') as fp:
    fp.write(colorized_isort_pex_info.dump())

  with safe_open(os.path.join(ptex_code_dir, 'ptex.py'), 'w') as fp:
    fp.write(dedent("""\
      import json
      import os
      import sys

      from pex import resolver
      from pex.common import open_zip
      from pex.pex_builder import PEXBuilder
      from pex.pex_info import PexInfo
      from pex.util import CacheHelper
      from pex.variables import ENV

      self = sys.argv[0]
      ipex_file = '{}.ipex'.format(os.path.splitext(self)[0])

      if not os.path.isfile(ipex_file):
        print('Hydrating {} to {}'.format(self, ipex_file))

        ptex_pex_info = PexInfo.from_pex(self)
        code_root = os.path.join(ptex_pex_info.zip_unsafe_cache, ptex_pex_info.code_hash)
        with open_zip(self) as zf:
          # Populate the pex with the pinned requirements and distribution names & hashes.
          ipex_info = PexInfo.from_json(zf.read('IPEX-INFO'))
          ipex_builder = PEXBuilder(pex_info=ipex_info)

          # Populate the pex with the needed code.
          ptex_info = json.loads(zf.read('PTEX-INFO').decode('utf-8'))
          for path in ptex_info['code']:
            ipex_builder.add_source(os.path.join(code_root, path), path)

        # Perform a fully pinned intransitive resolve to hydrate the install cache (not the
        # pex!).
        resolver_settings = ptex_info['resolver_settings']
        resolved_distributions = resolver.resolve(
          requirements=[str(req) for req in ipex_info.requirements],
          cache=ipex_info.pex_root,
          transitive=False,
          **resolver_settings
        )

        ipex_builder.build(ipex_file)

      os.execv(ipex_file, [ipex_file] + sys.argv[1:])
    """))

  colorized_isort_ptex = os.path.join(tmpdir, 'colorized_isort.ptex')

  result = run_pex_command([
    '--not-zip-safe',
    '--always-write-cache',
    '--pex-root', ptex_cache,
    pex_project_dir,
    '--sources-directory', ptex_code_dir,
    '--sources-directory', colorized_isort_ptex_code_dir,
    '--entry-point', 'ptex',
    '--output-file', colorized_isort_ptex
  ])
  result.assert_success()

  subprocess.check_call([colorized_isort_ptex, '--version'])
  with pytest.raises(CalledProcessError):
    subprocess.check_call([colorized_isort_ptex, '--not-a-flag'])

  safe_rmtree(ptex_cache)

  # The dehydrated pex now fails since it lost its hydration from the cache.
  with pytest.raises(CalledProcessError):
    subprocess.check_call([colorized_isort_ptex, '--version'])
Esempio n. 31
0
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreter', V=2):
        interpreter = interpreter_from_options(options)

    if interpreter is None:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl,
                                   **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 32
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed('Resolving interpreters', V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(
                        full_path_or_basename)
                    if interpreter is None:
                        die('Failed to find interpreter: %s' %
                            full_path_or_basename)
                    return interpreter

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed('Resolving interpreters', V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
                rc_variables = Variables.from_rc(rc=options.rc_file)
                pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None)
            else:
                pex_python_path = None
            try:
                interpreters = list(
                    iter_compatible_interpreters(pex_python_path, constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message(
                        'Could not find a compatible interpreter.'),
                    CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters) if interpreters else None

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed(
            'Resolving distributions ({})'.format(reqs +
                                                  options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert)

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=options.platforms,
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors)

            for resolved_dist in resolveds:
                log('  %s -> %s' %
                    (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity)
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Esempio n. 33
0
    def install_distributions(self,
                              ignore_errors=False,
                              workspace=None,
                              max_parallel_jobs=None):
        if not any((self._build_requests, self._install_requests)):
            # Nothing to build or install.
            return []

        cache = self._cache or workspace or safe_mkdtemp()

        built_wheels_dir = os.path.join(cache, 'built_wheels')
        spawn_wheel_build = functools.partial(self._spawn_wheel_build,
                                              built_wheels_dir)

        installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
        spawn_install = functools.partial(self._spawn_install,
                                          installed_wheels_dir)

        to_install = self._install_requests[:]
        to_calculate_requirements_for = []

        # 1. Build local projects and sdists.
        if self._build_requests:
            with TRACER.timed('Building distributions for:'
                              '\n  {}'.format('\n  '.join(
                                  map(str, self._build_requests)))):

                build_requests, install_requests = self._categorize_build_requests(
                    build_requests=self._build_requests,
                    dist_root=built_wheels_dir)
                to_install.extend(install_requests)

                for build_result in execute_parallel(
                        inputs=build_requests,
                        spawn_func=spawn_wheel_build,
                        raise_type=Untranslateable,
                        max_jobs=max_parallel_jobs):
                    to_install.extend(build_result.finalize_build())

        # 2. Install wheels in individual chroots.

        # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
        # downloaded / built for each interpreter or platform.
        install_requests_by_wheel_file = OrderedDict()
        for install_request in to_install:
            install_requests = install_requests_by_wheel_file.setdefault(
                install_request.wheel_file, [])
            install_requests.append(install_request)

        representative_install_requests = [
            requests[0]
            for requests in install_requests_by_wheel_file.values()
        ]

        def add_requirements_requests(install_result):
            install_requests = install_requests_by_wheel_file[
                install_result.request.wheel_file]
            to_calculate_requirements_for.extend(
                install_result.finalize_install(install_requests))

        with TRACER.timed('Installing:'
                          '\n  {}'.format('\n  '.join(
                              map(str, representative_install_requests)))):

            install_requests, install_results = self._categorize_install_requests(
                install_requests=representative_install_requests,
                installed_wheels_dir=installed_wheels_dir)
            for install_result in install_results:
                add_requirements_requests(install_result)

            for install_result in execute_parallel(inputs=install_requests,
                                                   spawn_func=spawn_install,
                                                   raise_type=Untranslateable,
                                                   max_jobs=max_parallel_jobs):
                add_requirements_requests(install_result)

        # 3. Calculate the final installed requirements.
        with TRACER.timed('Calculating installed requirements for:'
                          '\n  {}'.format('\n  '.join(
                              map(str, to_calculate_requirements_for)))):
            distribution_requirements = DistributionRequirements.merged(
                execute_parallel(inputs=to_calculate_requirements_for,
                                 spawn_func=DistributionRequirements.Request.
                                 spawn_calculation,
                                 raise_type=Untranslateable,
                                 max_jobs=max_parallel_jobs))

        installed_distributions = OrderedSet()
        for requirements_request in to_calculate_requirements_for:
            for distribution in requirements_request.distributions:
                installed_distributions.add(
                    InstalledDistribution(
                        target=requirements_request.target,
                        requirement=distribution_requirements.to_requirement(
                            distribution),
                        distribution=distribution))

        if not ignore_errors:
            self._check_install(installed_distributions)
        return installed_distributions
Esempio n. 34
0
def build_pex(args, options):
  interpreter = interpreter_from_options(options)

  pex_builder = PEXBuilder(
      path=safe_mkdtemp(),
      interpreter=interpreter,
  )

  pex_info = pex_builder.info

  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  installer = WheelInstaller if options.use_wheel else EggInstaller

  interpreter = interpreter_from_options(options)

  fetchers = [Fetcher(options.repos)]

  if options.pypi:
    fetchers.append(PyPIFetcher())

  if options.indices:
    fetchers.extend(PyPIFetcher(index) for index in options.indices)

  translator = translator_from_options(options)

  if options.use_wheel:
    precedence = (WheelPackage, EggPackage, SourcePackage)
  else:
    precedence = (EggPackage, SourcePackage)

  with TRACER.timed('Resolving distributions'):
    resolveds = requirement_resolver(
        options.requirements,
        fetchers=fetchers,
        translator=translator,
        interpreter=interpreter,
        platform=options.platform,
        precedence=precedence,
        cache=options.cache_dir,
        cache_ttl=options.cache_ttl)

  for pkg in resolveds:
    log('  %s' % pkg, v=options.verbosity)
    pex_builder.add_distribution(pkg)
    pex_builder.add_requirement(pkg.as_requirement())

  for source_dir in options.source_dirs:
    try:
      bdist = installer(source_dir).bdist()
    except installer.Error:
      die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL)
    pex_builder.add_dist_location(bdist)

  if options.entry_point is not None:
    log('Setting entry point to %s' % options.entry_point, v=options.verbosity)
    pex_builder.info.entry_point = options.entry_point
  else:
    log('Creating environment PEX.', v=options.verbosity)

  return pex_builder