示例#1
0
    def _create_binary(self, binary_tgt, results_dir):
        """Create a .pex file for the specified binary target."""
        # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX
        # and PYTHON_SOURCES products, because those products are already-built pexes, and there's
        # no easy way to merge them into a single pex file (for example, they each have a __main__.py,
        # metadata, and so on, which the merging code would have to handle specially).
        interpreter = self.context.products.get_data(PythonInterpreter)
        with temporary_dir() as tmpdir:
            # Create the pex_info for the binary.
            run_info_dict = self.context.run_tracker.run_info.get_as_dict()
            build_properties = PexInfo.make_build_properties()
            build_properties.update(run_info_dict)
            pex_info = binary_tgt.pexinfo.copy()
            pex_info.build_properties = build_properties

            builder = PEXBuilder(path=tmpdir,
                                 interpreter=interpreter,
                                 pex_info=pex_info,
                                 copy=True)

            if binary_tgt.shebang:
                self.context.log.info(
                    'Found Python binary target {} with customized shebang, using it: {}'
                    .format(binary_tgt.name, binary_tgt.shebang))
                builder.set_shebang(binary_tgt.shebang)
            else:
                self.context.log.debug(
                    'No customized shebang found for {}'.format(
                        binary_tgt.name))

            # Find which targets provide sources and which specify requirements.
            source_tgts = []
            req_tgts = []
            for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE):
                if has_python_sources(tgt) or has_resources(tgt):
                    source_tgts.append(tgt)
                elif has_python_requirements(tgt):
                    req_tgts.append(tgt)
                # Add target's interpreter compatibility constraints to pex info.
                if is_python_target(tgt):
                    for constraint in tgt.compatibility:
                        builder.add_interpreter_constraint(constraint)

            # Dump everything into the builder's chroot.
            for tgt in source_tgts:
                dump_sources(builder, tgt, self.context.log)
            # We need to ensure that we are resolving for only the current platform if we are
            # including local python dist targets that have native extensions.
            build_for_current_platform_only_check(self.context.targets())
            dump_requirement_libs(builder,
                                  interpreter,
                                  req_tgts,
                                  self.context.log,
                                  platforms=binary_tgt.platforms)

            # Build the .pex file.
            pex_path = os.path.join(results_dir,
                                    '{}.pex'.format(binary_tgt.name))
            builder.build(pex_path)
            return pex_path
示例#2
0
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreter', V=2):
    interpreter = interpreter_from_options(options)

  if interpreter is None:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

  if options.cache_dir:
    resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
  else:
    resolver = Resolver(**resolver_kwargs)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolver.resolve(resolvables)
    except Unsatisfiable as e:
      die(e)

  for dist in resolveds:
    log('  %s' % dist, v=options.verbosity)
    pex_builder.add_distribution(dist)
    pex_builder.add_requirement(dist.as_requirement())

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
示例#3
0
def test_pex_builder_shebang():
  pb = PEXBuilder()
  pb.set_shebang('foobar')

  with temporary_dir() as td:
    target = os.path.join(td, 'foo.pex')
    pb.build(target)
    expected_preamble = b'#!foobar\n'
    with open(target, 'rb') as fp:
      assert fp.read(len(expected_preamble)) == expected_preamble
示例#4
0
def test_pex_builder_shebang():
    pb = PEXBuilder()
    pb.set_shebang('foobar')

    with temporary_dir() as td:
        target = os.path.join(td, 'foo.pex')
        pb.build(target)
        expected_preamble = b'#!foobar\n'
        with open(target, 'rb') as fp:
            assert fp.read(len(expected_preamble)) == expected_preamble
示例#5
0
文件: pex.py 项目: windie/heron
def build_pex(args, options, resolver_option_builder, interpreter=None):
    if interpreter is None:
        with TRACER.timed('Resolving interpreter', V=2):
            interpreter = interpreter_from_options(options)

    if interpreter is None:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl,
                                   **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#6
0
    def _configure_shebang(self, targets, path, interpreter, pex_info):
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info,
                             copy=True)

        binary_targets = [
            target for target in targets if isinstance(target, PythonBinary)
        ]
        if len(binary_targets) == 1 and binary_targets[0].shebang:
            builder.set_shebang(binary_targets[0].shebang)
示例#7
0
文件: pex.py 项目: twitter/heron
def build_pex(args, options, resolver_option_builder, interpreter=None):
    if interpreter is None:
        with TRACER.timed("Resolving interpreter", V=2):
            interpreter = interpreter_from_options(options)

    if interpreter is None:
        die("Could not find compatible interpreter", CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

    for requirements_txt in options.requirement_files:
        resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=options.platform)

    if options.cache_dir:
        resolver = CachingResolver(options.cache_dir, options.cache_ttl, **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    with TRACER.timed("Resolving distributions"):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as e:
            die(e)

    for dist in resolveds:
        log("  %s" % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#8
0
  def _create_binary(self, binary_tgt, results_dir):
    """Create a .pex file for the specified binary target."""
    # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX
    # and PYTHON_SOURCES products, because those products are already-built pexes, and there's
    # no easy way to merge them into a single pex file (for example, they each have a __main__.py,
    # metadata, and so on, which the merging code would have to handle specially).
    interpreter = self.context.products.get_data(PythonInterpreter)
    with temporary_dir() as tmpdir:
      # Create the pex_info for the binary.
      run_info_dict = self.context.run_tracker.run_info.get_as_dict()
      build_properties = PexInfo.make_build_properties()
      build_properties.update(run_info_dict)
      pex_info = binary_tgt.pexinfo.copy()
      pex_info.build_properties = build_properties

      builder = PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True)

      if binary_tgt.shebang:
        self.context.log.info('Found Python binary target {} with customized shebang, using it: {}'
                                .format(binary_tgt.name, binary_tgt.shebang))
        builder.set_shebang(binary_tgt.shebang)
      else:
        self.context.log.debug('No customized shebang found for {}'.format(binary_tgt.name))

      # Find which targets provide sources and which specify requirements.
      source_tgts = []
      req_tgts = []
      for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE):
        if has_python_sources(tgt) or has_resources(tgt):
          source_tgts.append(tgt)
        elif has_python_requirements(tgt):
          req_tgts.append(tgt)
        # Add target's interpreter compatibility constraints to pex info.
        if is_python_target(tgt):
          for constraint in tgt.compatibility:
            builder.add_interpreter_constraint(constraint)

      # Dump everything into the builder's chroot.
      for tgt in source_tgts:
        dump_sources(builder, tgt, self.context.log)
      # We need to ensure that we are resolving for only the current platform if we are
      # including local python dist targets that have native extensions.
      self._python_native_code_settings.check_build_for_current_platform_only(self.context.targets())
      dump_requirement_libs(builder, interpreter, req_tgts, self.context.log,
                            platforms=binary_tgt.platforms)

      # Build the .pex file.
      pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name))
      builder.build(pex_path)
      return pex_path
示例#9
0
def build_pex(args):
    with TRACER.timed('Resolving interpreter', V=2):
        interpreter = _establish_interpreter(args)

    if interpreter is None:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=_PREAMBLE)

    pex_info = pex_builder.info

    pex_info.zip_safe = False
    pex_info.always_write_cache = True
    pex_info.inherit_path = False

    resolver_option_builder = _establish_resolver_options(args)
    reqs = args.reqs
    resolvables = [Resolvable.get(req, resolver_option_builder) for req in reqs]

    for requirements_txt in args.requirement_files:
        resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

    resolver_kwargs = dict(interpreter=interpreter, platform=args.platform)
    _add_spex_deps(resolvables, pex_builder, resolver_option_builder=resolver_option_builder)

    if not args.disable_cache:
        resolver = CachingResolver(args.cache_dir, args.cache_ttl, **resolver_kwargs)
    else:
        resolver = Resolver(**resolver_kwargs)

    resolveds = []
    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolver.resolve(resolvables)
        except Unsatisfiable as exception:
            die(exception)

    for dist in resolveds:
        log('  %s' % dist, verbose=args.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())

    pex_builder.set_entry_point('spex:spex')

    if args.python_shebang:
        pex_builder.set_shebang(args.python_shebang)

    return pex_builder
示例#10
0
文件: pex.py 项目: t-cas/pex
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):
        interpreters = [
            get_interpreter(interpreter, options.interpreter_cache_dir,
                            options.repos, options.use_wheel)
            for interpreter in options.python or [None]
        ]

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = _lowest_version_interpreter(interpreters)
    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(resolvables,
                                      interpreters=interpreters,
                                      platforms=options.platform,
                                      cache=options.cache_dir,
                                      cache_ttl=options.cache_ttl)

            for dist in resolveds:
                log('  %s' % dist, v=options.verbosity)
                pex_builder.add_distribution(dist)
                pex_builder.add_requirement(dist.as_requirement())
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#11
0
 def builder(shebang):
   pb = PEXBuilder()
   pb.set_shebang(shebang)
   return pb
示例#12
0
def build_pex(reqs, options):
    interpreters = None  # Default to the current interpreter.

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed('Resolving interpreters', V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.exists(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(
                        full_path_or_basename)
                    if interpreter is None:
                        die('Failed to find interpreter: %s' %
                            full_path_or_basename)
                    return interpreter

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed('Resolving interpreters', V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
                rc_variables = Variables.from_rc(rc=options.rc_file)
                pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None)
            else:
                pex_python_path = None
            interpreters = list(
                iter_compatible_interpreters(pex_python_path, constraints))
            if not interpreters:
                die('Could not find compatible interpreter',
                    CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters) if interpreters else None

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    with TRACER.timed(
            'Resolving distributions ({})'.format(reqs +
                                                  options.requirement_files)):
        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=options.platforms,
                indexes=indexes,
                find_links=options.find_links,
                cache=options.cache_dir,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors)

            for resolved_dist in resolveds:
                log('  %s -> %s' %
                    (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity)
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#13
0
def main():
    parser = optparse.OptionParser(usage="usage: %prog [options] output")
    parser.add_option("--entry-point", default="__main__")
    parser.add_option("--directory", action="store_true", default=False)
    parser.add_option(
        "--no-zip-safe", action="store_false", dest="zip_safe", default=True
    )
    parser.add_option("--python", default="")
    parser.add_option("--python-version", default="")
    parser.add_option("--python-shebang", default=None)
    parser.add_option("--preload", action="append", default=[])
    options, args = parser.parse_args()
    if len(args) == 1:
        output = args[0]
    else:
        parser.error("'output' positional argument is required")
        return 1

    # The manifest is passed via stdin, as it can sometimes get too large
    # to be passed as a CLA.
    manifest = json.load(sys.stdin)

    # The version of pkg_resources.py (from setuptools) on some distros is
    # too old for PEX.  So we keep a recent version in the buck repo and
    # force it into the process by constructing a custom PythonInterpreter
    # instance using it.
    if not options.python:
        options.python = sys.executable
        identity = PythonIdentity.get()
    elif not options.python_version:
        # Note: this is expensive (~500ms). prefer passing --python-version when possible.
        identity = PythonInterpreter.from_binary(options.python).identity
    else:
        # Convert "CPython 2.7" to "CPython 2 7 0"
        python_version = options.python_version.replace(".", " ").split()
        if len(python_version) == 3:
            python_version.append("0")
        identity = PythonIdentity.from_id_string(" ".join(python_version))

    interpreter = PythonInterpreter(options.python, identity, extras={})

    pex_builder = PEXBuilder(
        path=output if options.directory else None, interpreter=interpreter
    )

    if options.python_shebang is not None:
        pex_builder.set_shebang(options.python_shebang)

    # Set whether this PEX as zip-safe, meaning everything will stayed zipped up
    # and we'll rely on python's zip-import mechanism to load modules from
    # the PEX.  This may not work in some situations (e.g. native
    # libraries, libraries that want to find resources via the FS).
    pex_builder.info.zip_safe = options.zip_safe

    # Set the starting point for this PEX.
    pex_builder.info.entry_point = options.entry_point

    # Copy in our version of `pkg_resources` & `_markerlib`.
    copy_package(pex_builder, "pkg_resources", prefix=pex_builder.BOOTSTRAP_DIR)
    copy_package(pex_builder, "_markerlib", prefix=pex_builder.BOOTSTRAP_DIR)

    # Add the sources listed in the manifest.
    for dst, src in manifest["modules"].items():
        # NOTE(agallagher): calls the `add_source` and `add_resource` below
        # hard-link the given source into the PEX temp dir.  Since OS X and
        # Linux behave different when hard-linking a source that is a
        # symbolic link (Linux does *not* follow symlinks), resolve any
        # layers of symlinks here to get consistent behavior.
        try:
            pex_builder.add_source(dereference_symlinks(src), dst)
        except OSError as e:
            raise Exception("Failed to add {}: {}".format(src, e))

    # Add resources listed in the manifest.
    for dst, src in manifest["resources"].items():
        # NOTE(agallagher): see rationale above.
        pex_builder.add_resource(dereference_symlinks(src), dst)

    # Add resources listed in the manifest.
    for dst, src in manifest["nativeLibraries"].items():
        # NOTE(agallagher): see rationale above.
        pex_builder.add_resource(dereference_symlinks(src), dst)

    if options.directory:
        pex_builder.freeze(code_hash=False, bytecode_compile=False)
    else:
        pex_builder.build(output)
示例#14
0
文件: pex.py 项目: yuhonghong7035/pex
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    def to_python_interpreter(full_path_or_basename):
      if os.path.exists(full_path_or_basename):
        return PythonInterpreter.from_binary(full_path_or_basename)
      else:
        interpreter = PythonInterpreter.from_env(full_path_or_basename)
        if interpreter is None:
          die('Failed to find interpreter: %s' % full_path_or_basename)
        return interpreter

    interpreters = [to_python_interpreter(interp) for interp in options.python or [sys.executable]]

  if options.interpreter_constraint:
    # NB: options.python and interpreter constraints cannot be used together, so this will not
    # affect usages of the interpreter(s) specified by the "--python" command line flag.
    constraints = options.interpreter_constraint
    validate_constraints(constraints)
    rc_variables = Variables.from_rc(rc=options.rc_file)
    pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
    interpreters = find_compatible_interpreters(pex_python_path, constraints)

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = min(interpreters)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  def walk_and_do(fn, src_dir):
    src_dir = os.path.normpath(src_dir)
    for root, dirs, files in os.walk(src_dir):
      for f in files:
        src_file_path = os.path.join(root, f)
        dst_path = os.path.relpath(src_file_path, src_dir)
        fn(src_file_path, dst_path)

  for directory in options.sources_directory:
    walk_and_do(pex_builder.add_source, directory)

  for directory in options.resources_directory:
    walk_and_do(pex_builder.add_resource, directory)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path
  if options.interpreter_constraint:
    for ic in options.interpreter_constraint:
      pex_builder.add_interpreter_constraint(ic)

  resolvables = resolvables_from_iterable(args, resolver_option_builder, interpreter=interpreter)

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt,
                                              builder=resolver_option_builder,
                                              interpreter=interpreter))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt,
                                    builder=resolver_option_builder,
                                    interpreter=interpreter):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platforms,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed,
                                use_manylinux=options.use_manylinux)

      for resolved_dist in resolveds:
        log('  %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),
            V=options.verbosity)
        pex_builder.add_distribution(resolved_dist.distribution)
        pex_builder.add_requirement(resolved_dist.requirement)
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
示例#15
0
文件: make_pex.py 项目: shs96c/buck
def main():
    parser = optparse.OptionParser(usage="usage: %prog [options] output")
    parser.add_option("--entry-point", default="__main__")
    parser.add_option("--directory", action="store_true", default=False)
    parser.add_option(
        "--no-zip-safe", action="store_false", dest="zip_safe", default=True
    )
    parser.add_option("--python", default="")
    parser.add_option("--python-version", default="")
    parser.add_option("--python-shebang", default=None)
    parser.add_option("--preload", action="append", default=[])
    options, args = parser.parse_args()
    if len(args) == 1:
        output = args[0]
    else:
        parser.error("'output' positional argument is required")
        return 1

    # The manifest is passed via stdin, as it can sometimes get too large
    # to be passed as a CLA.
    manifest = json.load(sys.stdin)

    # The version of pkg_resources.py (from setuptools) on some distros is
    # too old for PEX.  So we keep a recent version in the buck repo and
    # force it into the process by constructing a custom PythonInterpreter
    # instance using it.
    if not options.python:
        options.python = sys.executable
        identity = PythonIdentity.get()
    elif not options.python_version:
        # Note: this is expensive (~500ms). prefer passing --python-version when possible.
        identity = PythonInterpreter.from_binary(options.python).identity
    else:
        # Convert "CPython 2.7" to "CPython 2 7 0"
        python_version = options.python_version.replace(".", " ").split()
        if len(python_version) == 3:
            python_version.append("0")
        identity = PythonIdentity.from_id_string(" ".join(python_version))

    interpreter = PythonInterpreter(options.python, identity, extras={})

    pex_builder = PEXBuilder(
        path=output if options.directory else None, interpreter=interpreter
    )

    if options.python_shebang is not None:
        pex_builder.set_shebang(options.python_shebang)

    # Set whether this PEX as zip-safe, meaning everything will stayed zipped up
    # and we'll rely on python's zip-import mechanism to load modules from
    # the PEX.  This may not work in some situations (e.g. native
    # libraries, libraries that want to find resources via the FS).
    pex_builder.info.zip_safe = options.zip_safe

    # Set the starting point for this PEX.
    pex_builder.info.entry_point = options.entry_point

    # Copy in our version of `pkg_resources` & `_markerlib`.
    copy_package(pex_builder, "pkg_resources", prefix=pex_builder.BOOTSTRAP_DIR)
    copy_package(pex_builder, "_markerlib", prefix=pex_builder.BOOTSTRAP_DIR)

    # Add the sources listed in the manifest.
    for dst, src in manifest["modules"].iteritems():
        # NOTE(agallagher): calls the `add_source` and `add_resource` below
        # hard-link the given source into the PEX temp dir.  Since OS X and
        # Linux behave different when hard-linking a source that is a
        # symbolic link (Linux does *not* follow symlinks), resolve any
        # layers of symlinks here to get consistent behavior.
        try:
            pex_builder.add_source(dereference_symlinks(src), dst)
        except OSError as e:
            raise Exception("Failed to add {}: {}".format(src, e))

    # Add resources listed in the manifest.
    for dst, src in manifest["resources"].iteritems():
        # NOTE(agallagher): see rationale above.
        pex_builder.add_resource(dereference_symlinks(src), dst)

    # Add resources listed in the manifest.
    for dst, src in manifest["nativeLibraries"].iteritems():
        # NOTE(agallagher): see rationale above.
        pex_builder.add_resource(dereference_symlinks(src), dst)

    if options.directory:
        pex_builder.freeze(code_hash=False, bytecode_compile=False)
    else:
        pex_builder.build(output)
示例#16
0
def build_pex(pex_filename: str, local_only: bool) -> str:
    pex_builder = PEXBuilder(include_tools=True)

    pex_builder.info.inherit_path = InheritPath.FALLBACK

    pex_builder.set_entry_point('daml_dit_if.main:main')
    pex_builder.set_shebang('/usr/bin/env python3')

    platforms = [parsed_platform('current')]

    if local_only:
        LOG.warn('Local-only build. THIS DIT WILL NOT RUN IN DAML HUB.')
    else:
        platforms = [
            *platforms,
            parsed_platform('manylinux2014_x86_64-cp-38-cp38')
        ]

    daml_dit_if_bundled = False

    try:
        if os.path.isfile(PYTHON_REQUIREMENT_FILE):
            LOG.info(
                f'Bundling dependencies from {PYTHON_REQUIREMENT_FILE}...')
            requirement_files = [PYTHON_REQUIREMENT_FILE]
        else:
            LOG.info(
                f'No dependency file found ({PYTHON_REQUIREMENT_FILE}), no dependencies will be bundled.'
            )
            requirement_files = []

        resolveds = resolve_multi(requirements=[],
                                  requirement_files=requirement_files,
                                  platforms=platforms)

        for resolved_dist in resolveds:

            if resolved_dist.distribution.project_name == IF_PROJECT_NAME \
               and not daml_dit_if_bundled:

                LOG.warn(
                    f'Bundling {IF_PROJECT_NAME} in output DIT file. This will'
                    f' override the version provided by Daml Hub, potentially'
                    f' compromising compatibility of this integration with'
                    f' future updates to Daml Hub. Use this option with caution.'
                )
                daml_dit_if_bundled = True

            LOG.debug("req: %s", resolved_dist.distribution)
            LOG.debug("     -> target: %s", resolved_dist.target)

            pex_builder.add_distribution(resolved_dist.distribution)
            if resolved_dist.direct_requirement:
                LOG.info("direct_req: %s", resolved_dist.direct_requirement)
                LOG.debug("     -> target: %s", resolved_dist.target)

                pex_builder.add_requirement(resolved_dist.direct_requirement)

    except Unsatisfiable as e:
        die(f'Unsatifiable dependency error: {e}')

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)

                LOG.debug("Adding source file: %r, %r", src_file_path,
                          dst_path)

                fn(src_file_path, dst_path)

    walk_and_do(pex_builder.add_source, 'src/')

    pex_builder.freeze(bytecode_compile=True)

    # Entry point verification is disabled because ddit does not
    # formally depend on the integration framework, and it is not
    # necessarily available at integration build time. Because entry
    # point verification happens in ddit's environment and the
    # entrypoint is in the framework, this causes entry point
    # verification to fail unless some other agent has installed
    # daml-dit-if into ddit's environment.
    #
    # Virtual environments provide a way to work around this (and are
    # used in 'ddit run') but the PEX API does not allow a virtual
    # environment to be specified at build time. If this ever changes,
    # the build subcommand should be modified to prepare a virtual
    # enviroment for the build that contains the appropriate version
    # of daml-dit-if and entrypoint verification should be re-enabled.
    pex = PEX(pex_builder.path(),
              interpreter=pex_builder.interpreter,
              verify_entry_point=False)

    LOG.info('Building intermediate PEX file...')

    LOG.debug('PEX info: %r', pex_builder.info)

    pex_builder.build(pex_filename,
                      bytecode_compile=True,
                      deterministic_timestamp=True)

    if daml_dit_if_bundled:
        return 'python-direct'
    else:
        return 'python-direct-hub-if'
示例#17
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = options.python_path  # If None, this will result in using $PATH.
    # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag.
    if pex_python_path is None and (options.rc_file
                                    or not ENV.PEX_IGNORE_RCFILES):
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interp = PythonInterpreter.from_env(full_path_or_basename)
                    if interp is None:
                        die("Failed to find interpreter: %s" %
                            full_path_or_basename)
                    return interp

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(
                    iter_compatible_interpreters(
                        path=pex_python_path,
                        interpreter_constraints=constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message(
                        "Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
                "Searching for local interpreters matching {}".format(
                    ", ".join(map(str, platforms)))):
            candidate_interpreters = OrderedSet(
                iter_compatible_interpreters(path=pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms)
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log("Resolved {} for platform {}".format(
                            candidate_interpreter, resolved_platform))
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform"
                    if len(platforms) == 1 else "these platforms",
                ))

    interpreter = (PythonInterpreter.latest_release_of_min_compatible_version(
        interpreters) if interpreters else None)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(
        path=safe_mkdtemp(),
        interpreter=interpreter,
        preamble=preamble,
        copy_mode=CopyMode.SYMLINK,
        include_tools=options.include_tools or options.venv,
    )

    if options.resources_directory:
        pex_warnings.warn(
            "The `-R/--resources-directory` option is deprecated. Resources should be added via "
            "`-D/--sources-directory` instead.")

    for directory in OrderedSet(options.sources_directory +
                                options.resources_directory):
        src_dir = os.path.normpath(directory)
        for root, _, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                pex_builder.add_source(src_file_path, dst_path)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.venv = bool(options.venv)
    pex_info.venv_bin_path = options.venv
    pex_info.venv_copies = options.venv_copies
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = InheritPath.for_value(options.inherit_path)
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    indexes = compute_indexes(options)

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed(
            "Resolving distributions ({})".format(reqs +
                                                  options.requirement_files)):
        if options.cache_ttl:
            pex_warnings.warn(
                "The --cache-ttl option is deprecated and no longer has any effect."
            )
        if options.headers:
            pex_warnings.warn(
                "The --header option is deprecated and no longer has any effect."
            )

        network_configuration = NetworkConfiguration(
            retries=options.retries,
            timeout=options.timeout,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            if options.pex_repository:
                with TRACER.timed("Resolving requirements from PEX {}.".format(
                        options.pex_repository)):
                    resolveds = resolve_from_pex(
                        pex=options.pex_repository,
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        network_configuration=network_configuration,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        manylinux=options.manylinux,
                        ignore_errors=options.ignore_errors,
                    )
            else:
                with TRACER.timed("Resolving requirements."):
                    resolveds = resolve_multi(
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        allow_prereleases=options.allow_prereleases,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        indexes=indexes,
                        find_links=options.find_links,
                        resolver_version=ResolverVersion.for_value(
                            options.resolver_version),
                        network_configuration=network_configuration,
                        cache=cache,
                        build=options.build,
                        use_wheel=options.use_wheel,
                        compile=options.compile,
                        manylinux=options.manylinux,
                        max_parallel_jobs=options.max_parallel_jobs,
                        ignore_errors=options.ignore_errors,
                    )

            for resolved_dist in resolveds:
                pex_builder.add_distribution(resolved_dist.distribution)
                if resolved_dist.direct_requirement:
                    pex_builder.add_requirement(
                        resolved_dist.direct_requirement)
        except Unsatisfiable as e:
            die(str(e))

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#18
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = None  # Defaults to $PATH
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(full_path_or_basename)
                    if interpreter is None:
                        die("Failed to find interpreter: %s" % full_path_or_basename)
                    return interpreter

            interpreters = [to_python_interpreter(interp) for interp in options.python]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message("Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
            "Searching for local interpreters matching {}".format(", ".join(map(str, platforms)))
        ):
            candidate_interpreters = OrderedSet(iter_compatible_interpreters(pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms
                )
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log(
                            "Resolved {} for platform {}".format(
                                candidate_interpreter, resolved_platform
                            )
                        )
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform" if len(platforms) == 1 else "these platforms",
                )
            )

    interpreter = min(interpreters) if interpreters else None
    if options.use_first_matching_interpreter and interpreters:
        if len(interpreters) > 1:
            unused_interpreters = set(interpreters) - {interpreter}
            TRACER.log(
                "Multiple interpreters resolved, but only using {} because "
                "`--use-first-matching-interpreter` was used. These interpreters were matched but "
                "will not be used: {}".format(
                    interpreter.binary,
                    ", ".join(interpreter.binary for interpreter in sorted(unused_interpreters)),
                )
            )
        interpreters = [interpreter]

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    # If we're only building the PEX for the first of many interpreters due to
    # `--use-first-matching-interpreter` selection, we do not want to enable those same interpreter
    # constraints at runtime, where they could lead to a different interpreter being selected
    # leading to a failure to execute the PEX. Instead we rely on the shebang set by that single
    # interpreter to pick out a similar interpreter at runtime (for a CPython interpreter, the
    # shebang will be `#!/usr/bin/env pythonX.Y` which should generally be enough to select a
    # matching interpreter. To be clear though, there are many corners this will not work for
    # including mismatching abi (python2.7m vs python2.7mu) when the PEX contains platform specific
    # wheels, etc.
    if options.interpreter_constraint and not options.use_first_matching_interpreter:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=list(platforms),
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors,
            )

            for resolved_dist in resolveds:
                log(
                    "  %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity,
                )
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#19
0
class PexBuilderWrapper:
    """Wraps PEXBuilder to provide an API that consumes targets and other BUILD file entities."""
    class Factory(Subsystem):
        options_scope = "pex-builder-wrapper"

        @classmethod
        def register_options(cls, register):
            super(PexBuilderWrapper.Factory, cls).register_options(register)
            # TODO: make an analogy to cls.register_jvm_tool that can be overridden for python subsystems
            # by a python_requirement_library() target, not just via pants.ini!
            register(
                "--setuptools-version",
                advanced=True,
                default="40.6.3",
                fingerprint=True,
                help=
                "The setuptools version to include in the pex if namespace packages need "
                "to be injected.",
            )
            register(
                "--pex-version",
                advanced=True,
                default=pex_version,
                fingerprint=True,
                help="The pex version to include in any generated ipex files. "
                "NOTE: This should ideally be the same as the pex version which pants "
                f"itself depends on, which right now is {pex_version}.",
            )

        @classmethod
        def subsystem_dependencies(cls):
            return super(PexBuilderWrapper.Factory,
                         cls).subsystem_dependencies() + (
                             PythonRepos,
                             PythonSetup,
                         )

        @classmethod
        def create(cls, builder, log=None, generate_ipex=False):
            options = cls.global_instance().get_options()
            setuptools_requirement = f"setuptools=={options.setuptools_version}"
            pex_requirement = f"pex=={options.pex_version}"

            log = log or logging.getLogger(__name__)

            return PexBuilderWrapper(
                builder=builder,
                python_repos_subsystem=PythonRepos.global_instance(),
                python_setup_subsystem=PythonSetup.global_instance(),
                setuptools_requirement=PythonRequirement(
                    setuptools_requirement),
                pex_requirement=PythonRequirement(pex_requirement),
                log=log,
                generate_ipex=generate_ipex,
            )

    def __init__(
        self,
        builder: PEXBuilder,
        python_repos_subsystem: PythonRepos,
        python_setup_subsystem: PythonSetup,
        setuptools_requirement: PythonRequirement,
        pex_requirement: PythonRequirement,
        log,
        generate_ipex: bool = False,
    ):
        assert log is not None

        self._builder = builder
        self._python_repos_subsystem = python_repos_subsystem
        self._python_setup_subsystem = python_setup_subsystem
        self._setuptools_requirement = setuptools_requirement
        self._pex_requirement = pex_requirement
        self._log = log

        self._distributions: Dict[str, Distribution] = {}
        self._frozen = False

        self._generate_ipex = generate_ipex
        # If we generate a .ipex, we need to ensure all the code we copy into the underlying PEXBuilder
        # is also added to the new PEXBuilder created in `._shuffle_original_build_info_into_ipex()`.
        self._all_added_sources_resources: List[Path] = []
        # If we generate a dehydrated "ipex" file, we need to make sure that it is aware of any special
        # find_links repos attached to any single requirement, so it can later resolve those
        # requirements when it is first bootstrapped, using the same resolve options.
        self._all_find_links: OrderedSet[str] = OrderedSet()

    def add_requirement_libs_from(self, req_libs, platforms=None):
        """Multi-platform dependency resolution for PEX files.

        :param builder: Dump the requirements into this builder.
        :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
        :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
        :param log: Use this logger.
        :param platforms: A list of :class:`Platform`s to resolve requirements for.
                                            Defaults to the platforms specified by PythonSetup.
        """
        reqs = [req for req_lib in req_libs for req in req_lib.requirements]
        self.add_resolved_requirements(reqs, platforms=platforms)

    class SingleDistExtractionError(Exception):
        pass

    def extract_single_dist_for_current_platform(self, reqs,
                                                 dist_key) -> Distribution:
        """Resolve a specific distribution from a set of requirements matching the current platform.

        :param list reqs: A list of :class:`PythonRequirement` to resolve.
        :param str dist_key: The value of `distribution.key` to match for a `distribution` from the
                                                 resolved requirements.
        :return: The single :class:`pkg_resources.Distribution` matching `dist_key`.
        :raises: :class:`self.SingleDistExtractionError` if no dists or multiple dists matched the
                 given `dist_key`.
        """
        distributions = self.resolve_distributions(reqs, platforms=["current"])
        try:
            matched_dist = assert_single_element(
                dist for dists in distributions.values() for dist in dists
                if dist.key == dist_key)
        except (StopIteration, ValueError) as e:
            raise self.SingleDistExtractionError(
                f"Exactly one dist was expected to match name {dist_key} in requirements {reqs}: {e!r}"
            )
        return matched_dist

    def resolve_distributions(
        self,
        reqs: List[PythonRequirement],
        platforms: Optional[List[Platform]] = None,
    ) -> Dict[str, List[Distribution]]:
        """Multi-platform dependency resolution.

        :param reqs: A list of :class:`PythonRequirement` to resolve.
        :param platforms: A list of platform strings to resolve requirements for.
                          Defaults to the platforms specified by PythonSetup.
        :returns: A tuple `(map, transitive_reqs)`, where `map` is a dict mapping distribution name
                  to a list of resolved distributions, and `reqs` contains all transitive ==
                  requirements
                  needed to resolve the initial given requirements `reqs` for the given platforms.
        """
        deduped_reqs = OrderedSet(reqs)
        find_links: OrderedSet[str] = OrderedSet()
        for req in deduped_reqs:
            self._log.debug(f"  Dumping requirement: {req}")
            self._builder.add_requirement(str(req.requirement))
            if req.repository:
                find_links.add(req.repository)

        # Resolve the requirements into distributions.
        distributions = self._resolve_multi(
            self._builder.interpreter,
            list(deduped_reqs),
            platforms,
            list(find_links),
        )
        return distributions

    def add_resolved_requirements(
        self,
        reqs: List[PythonRequirement],
        platforms: Optional[List[Platform]] = None,
        override_ipex_build_do_actually_add_distribution: bool = False,
    ) -> None:
        """Multi-platform dependency resolution for PEX files.

        :param builder: Dump the requirements into this builder.
        :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
        :param reqs: A list of :class:`PythonRequirement` to resolve.
        :param log: Use this logger.
        :param platforms: A list of :class:`Platform`s to resolve requirements for.
                                            Defaults to the platforms specified by PythonSetup.
        :param bool override_ipex_build_do_actually_add_distribution: When this PexBuilderWrapper is configured with
                                                                        generate_ipex=True, this method won't add any distributions to
                                                                        the output pex. The internal implementation of this class adds a
                                                                        pex dependency to the output ipex file, and therefore needs to
                                                                        override the default behavior of this method.
        """
        distributions = self.resolve_distributions(reqs, platforms=platforms)
        locations: Set[str] = set()
        for platform, dists in distributions.items():
            for dist in dists:
                if dist.location not in locations:
                    if self._generate_ipex and not override_ipex_build_do_actually_add_distribution:
                        self._log.debug(
                            f"  *AVOIDING* dumping distribution into ipex: .../{os.path.basename(dist.location)}"
                        )
                        self._register_distribution(dist)
                    else:
                        self._log.debug(
                            f"  Dumping distribution: .../{os.path.basename(dist.location)}"
                        )
                        self.add_distribution(dist)
                locations.add(dist.location)

    def _resolve_multi(
        self,
        interpreter: PythonInterpreter,
        requirements: List[PythonRequirement],
        platforms: Optional[List[Platform]],
        find_links: Optional[List[str]],
    ) -> Dict[str, List[Distribution]]:
        """Multi-platform dependency resolution for PEX files.

        Returns a tuple containing a list of distributions that must be included in order to satisfy a
        set of requirements, and the transitive == requirements for those distributions. This may
        involve distributions for multiple platforms.

        :param interpreter: The :class:`PythonInterpreter` to resolve for.
        :param requirements: A list of :class:`PythonRequirement` objects to resolve.
        :param platforms: A list of :class:`Platform`s to resolve for.
        :param find_links: Additional paths to search for source packages during resolution.
        :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
                         to satisfy the requirements on that platform.
        """
        python_setup = self._python_setup_subsystem
        python_repos = self._python_repos_subsystem
        platforms = platforms or python_setup.platforms

        find_links = list(find_links) if find_links else []
        find_links.extend(python_repos.repos)

        # Individual requirements from pants may have a `repository` link attached to them, which is
        # extracted in `self.resolve_distributions()`. When generating a .ipex file with
        # `generate_ipex=True`, we want to ensure these repos are known to the ipex launcher when it
        # tries to resolve all the requirements from BOOTSTRAP-PEX-INFO.
        self._all_find_links.update(OrderedSet(find_links))

        distributions: Dict[str, List[Distribution]] = defaultdict(list)

        for platform in platforms:
            requirements_cache_dir = os.path.join(
                python_setup.resolver_cache_dir, str(interpreter.identity))
            resolved_dists = resolve(
                requirements=[str(req.requirement) for req in requirements],
                interpreter=interpreter,
                platform=platform,
                indexes=python_repos.indexes,
                find_links=find_links,
                cache=requirements_cache_dir,
                allow_prereleases=python_setup.resolver_allow_prereleases,
                manylinux=python_setup.manylinux,
            )
            for resolved_dist in resolved_dists:
                distributions[platform].append(resolved_dist.distribution)

        return distributions

    def _create_source_dumper(self, tgt: Target) -> Callable[[str], None]:
        buildroot = get_buildroot()

        def get_chroot_path(relpath: str) -> str:
            if type(tgt) == Files:
                # Loose `Files`, as opposed to `Resources` or `PythonTarget`s, have no (implied) package
                # structure and so we chroot them relative to the build root so that they can be accessed
                # via the normal Python filesystem APIs just as they would be accessed outside the
                # chrooted environment. NB: This requires we mark the pex as not zip safe so
                # these `Files` can still be accessed in the context of a built pex distribution.
                self._builder.info.zip_safe = False
                return relpath
            return str(Path(relpath).relative_to(tgt.target_base))

        def dump_source(relpath: str) -> None:
            source_path = str(Path(buildroot, relpath))
            dest_path = get_chroot_path(relpath)

            self._all_added_sources_resources.append(Path(dest_path))
            if has_resources(tgt):
                self._builder.add_resource(filename=source_path,
                                           env_filename=dest_path)
            else:
                self._builder.add_source(filename=source_path,
                                         env_filename=dest_path)

        return dump_source

    def add_sources_from(self, tgt: Target) -> None:
        dump_source = self._create_source_dumper(tgt)
        self._log.debug(f"  Dumping sources: {tgt}")
        for relpath in tgt.sources_relative_to_buildroot():
            try:
                dump_source(relpath)
            except OSError:
                self._log.error(
                    f"Failed to copy {relpath} for target {tgt.address.spec}")
                raise

        if getattr(tgt, "_resource_target_specs", None) or getattr(
                tgt, "_synthetic_resources_target", None):
            # No one should be on old-style resources any more.  And if they are,
            # switching to the new python pipeline will be a great opportunity to fix that.
            raise TaskError(
                f"Old-style resources not supported for target {tgt.address.spec}. Depend on resources() "
                "targets instead.")

    def _prepare_inits(self) -> Set[str]:
        chroot = self._builder.chroot()
        sources = chroot.get("source") | chroot.get("resource")
        missing_init_files = identify_missing_init_files(sources)
        if missing_init_files:
            with temporary_file(permissions=0o644) as ns_package:
                ns_package.write(
                    b'__import__("pkg_resources").declare_namespace(__name__)  # type: ignore[attr-defined]'
                )
                ns_package.flush()
                for missing_init_file in missing_init_files:
                    self._all_added_sources_resources.append(
                        Path(missing_init_file))
                    self._builder.add_source(filename=ns_package.name,
                                             env_filename=missing_init_file)
        return missing_init_files

    def set_emit_warnings(self, emit_warnings):
        self._builder.info.emit_warnings = emit_warnings

    def _set_major_minor_interpreter_constraint_for_ipex(
        self,
        info: PexInfo,
        identity: PythonIdentity,
    ) -> PexInfo:
        interpreter_name = identity.requirement.name
        major, minor, _patch = identity.version
        major_minor_only_constraint = f"{interpreter_name}=={major}.{minor}.*"
        return ipex_launcher.modify_pex_info(
            info, interpreter_constraints=[str(major_minor_only_constraint)])

    def _shuffle_underlying_pex_builder(self) -> Tuple[PexInfo, Path]:
        """Replace the original builder with a new one, and just pull files from the old chroot."""
        # Ensure that (the interpreter selected to resolve requirements when the ipex is first run) is
        # (the exact same interpreter we used to resolve those requirements here). This is the only (?)
        # way to ensure that the ipex bootstrap uses the *exact* same interpreter version.
        self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex(
            self._builder.info, self._builder.interpreter.identity)

        # Remove all the original top-level requirements in favor of the transitive == requirements.
        self._builder.info = ipex_launcher.modify_pex_info(self._builder.info,
                                                           requirements=[])
        transitive_reqs = [
            dist.as_requirement() for dist in self._distributions.values()
        ]
        self.add_direct_requirements(transitive_reqs)

        orig_info = self._builder.info.copy()

        orig_chroot = self._builder.chroot()

        # Mutate the PexBuilder object which is manipulated by this subsystem.
        self._builder = PEXBuilder(interpreter=self._builder.interpreter)
        self._builder.info = self._set_major_minor_interpreter_constraint_for_ipex(
            self._builder.info, self._builder.interpreter.identity)

        self._distributions = {}

        return (orig_info, Path(orig_chroot.path()))

    def _shuffle_original_build_info_into_ipex(self):
        """Create a "dehydrated" ipex file without any of its requirements, and specify that in two.

        *-INFO files.

        See ipex_launcher.py for details of how these files are used.
        """
        orig_pex_info, orig_chroot = self._shuffle_underlying_pex_builder()

        # Gather information needed to create IPEX-INFO.
        all_code = [str(src) for src in self._all_added_sources_resources]
        prefixed_code_paths = [
            os.path.join(ipex_launcher.APP_CODE_PREFIX, src)
            for src in all_code
        ]
        for src, prefixed in zip(all_code, prefixed_code_paths):
            # NB: Need to add under 'source' label for `self._prepare_inits()` to pick it up!
            self._builder.chroot().copy(os.path.join(str(orig_chroot), src),
                                        prefixed,
                                        label="source")

        python_repos = self._python_repos_subsystem
        python_setup = self._python_setup_subsystem

        # NB: self._all_find_links is updated on every call to self._resolve_multi(), and therefore
        # includes all of the links from python_repos.repos, as well as any links added within any
        # individual requirements from that resolve.

        resolver_settings = dict(
            indexes=list(python_repos.indexes),
            find_links=list(self._all_find_links),
            allow_prereleases=UnsetBool.coerce_bool(
                python_setup.resolver_allow_prereleases, default=True),
            manylinux=python_setup.manylinux,
        )

        # IPEX-INFO: A json mapping interpreted in ipex_launcher.py:
        # {
        #   "code": [<which source files to add to the "hydrated" pex when bootstrapped>],
        #   "resolver_settings": {<which indices to search for requirements from when bootstrapping>},
        # }
        ipex_info = dict(
            code=prefixed_code_paths,
            resolver_settings=resolver_settings,
        )
        with temporary_file(permissions=0o644) as ipex_info_file:
            ipex_info_file.write(json.dumps(ipex_info).encode())
            ipex_info_file.flush()
            self._builder.add_resource(filename=ipex_info_file.name,
                                       env_filename="IPEX-INFO")

        # BOOTSTRAP-PEX-INFO: The original PEX-INFO, which should be the PEX-INFO in the hydrated .pex
        #                     file that is generated when the .ipex is first executed.
        with temporary_file(permissions=0o644) as bootstrap_pex_info_file:
            bootstrap_pex_info_file.write(orig_pex_info.dump().encode())
            bootstrap_pex_info_file.flush()
            self._builder.add_resource(filename=bootstrap_pex_info_file.name,
                                       env_filename="BOOTSTRAP-PEX-INFO")

        # ipex.py: The special bootstrap script to hydrate the .ipex with the fully resolved
        #          requirements when it is first executed.
        # Extract the file contents of our custom app launcher script from the pants package.
        parent_module = module_dirname(module_dirname(ipex_launcher.__name__))
        ipex_launcher_provider = get_provider(parent_module)
        ipex_launcher_script = ipex_launcher_provider.get_resource_string(
            parent_module, "ipex/ipex_launcher.py")
        with temporary_file(permissions=0o644) as ipex_launcher_file:
            ipex_launcher_file.write(ipex_launcher_script)
            ipex_launcher_file.flush()
            # Our .ipex file will use our custom app launcher!
            self._builder.set_executable(ipex_launcher_file.name,
                                         env_filename="ipex.py")

        # The PEX-INFO we generate shouldn't have any requirements (except pex itself), or they will
        # fail to bootstrap because they were unable to find those distributions. Instead, the .pex file
        # produced when the .ipex is first executed will read and resolve all those requirements from
        # the BOOTSTRAP-PEX-INFO.
        self.add_resolved_requirements(
            [self._pex_requirement, self._setuptools_requirement],
            override_ipex_build_do_actually_add_distribution=True,
        )

    def freeze(self) -> None:
        if self._frozen:
            return

        if self._prepare_inits():
            dist = self._distributions.get("setuptools")
            if not dist:
                self.add_resolved_requirements([self._setuptools_requirement])

        if self._generate_ipex:
            self._shuffle_original_build_info_into_ipex()

        self._builder.freeze(bytecode_compile=False)
        self._frozen = True

    def set_entry_point(self, entry_point):
        self._builder.set_entry_point(entry_point)

    def build(self, safe_path):
        self.freeze()
        self._builder.build(safe_path,
                            bytecode_compile=False,
                            deterministic_timestamp=True)

    def set_shebang(self, shebang):
        self._builder.set_shebang(shebang)

    def add_interpreter_constraint(self, constraint):
        self._builder.add_interpreter_constraint(constraint)

    def add_interpreter_constraints_from(self, constraint_tgts):
        # TODO this would be a great place to validate the constraints and present a good error message
        # if they are incompatible because all the sources of the constraints are available.
        # See: https://github.com/pantsbuild/pex/blob/584b6e367939d24bc28aa9fa36eb911c8297dac8/pex/interpreter_constraints.py
        constraint_tuples = {
            self._python_setup_subsystem.compatibility_or_constraints(
                tgt.compatibility)
            for tgt in constraint_tgts
        }
        for constraint_tuple in constraint_tuples:
            for constraint in constraint_tuple:
                self.add_interpreter_constraint(constraint)

    def add_direct_requirements(self, reqs):
        for req in reqs:
            self._builder.add_requirement(str(req))

    def add_distribution(self, dist):
        self._builder.add_distribution(dist)
        self._register_distribution(dist)

    def add_dist_location(self, location):
        self._builder.add_dist_location(location)
        dist = DistributionHelper.distribution_from_path(location)
        self._register_distribution(dist)

    def _register_distribution(self, dist):
        self._distributions[dist.key] = dist

    def set_script(self, script):
        self._builder.set_script(script)
示例#20
0
文件: pex.py 项目: yunstanford/pex
def build_pex(args, options, resolver_option_builder):
    with TRACER.timed('Resolving interpreters', V=2):
        interpreters = [
            get_interpreter(interpreter, options.interpreter_cache_dir,
                            options.repos, options.use_wheel)
            for interpreter in options.python or [None]
        ]

    if options.interpreter_constraint:
        # NB: options.python and interpreter constraints cannot be used together, so this will not
        # affect usages of the interpreter(s) specified by the "--python" command line flag.
        constraints = options.interpreter_constraint
        validate_constraints(constraints)
        rc_variables = Variables.from_rc(rc=options.rc_file)
        pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
        interpreters = find_compatible_interpreters(pex_python_path,
                                                    constraints)

    if not interpreters:
        die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    interpreter = min(interpreters)

    pex_builder = PEXBuilder(path=safe_mkdtemp(),
                             interpreter=interpreter,
                             preamble=preamble)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.inherit_path = options.inherit_path
    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    resolvables = [
        Resolvable.get(arg, resolver_option_builder) for arg in args
    ]

    for requirements_txt in options.requirement_files:
        resolvables.extend(
            requirements_from_file(requirements_txt, resolver_option_builder))

    # pip states the constraints format is identical tor requirements
    # https://pip.pypa.io/en/stable/user_guide/#constraints-files
    for constraints_txt in options.constraint_files:
        constraints = []
        for r in requirements_from_file(constraints_txt,
                                        resolver_option_builder):
            r.is_constraint = True
            constraints.append(r)
        resolvables.extend(constraints)

    with TRACER.timed('Resolving distributions'):
        try:
            resolveds = resolve_multi(
                resolvables,
                interpreters=interpreters,
                platforms=options.platform,
                cache=options.cache_dir,
                cache_ttl=options.cache_ttl,
                allow_prereleases=resolver_option_builder.prereleases_allowed)

            for dist in resolveds:
                log('  %s' % dist, v=options.verbosity)
                pex_builder.add_distribution(dist)
                pex_builder.add_requirement(dist.as_requirement())
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die('Must specify at most one entry point or script.', INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
示例#21
0
文件: pex.py 项目: sixninetynine/pex
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    interpreters = [
      get_interpreter(interpreter,
                      options.interpreter_cache_dir,
                      options.repos,
                      options.use_wheel)
      for interpreter in options.python or [None]
    ]

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = _lowest_version_interpreter(interpreters)
  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.inherit_path = options.inherit_path

  resolvables = [Resolvable.get(arg, resolver_option_builder) for arg in args]

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt, resolver_option_builder))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt, resolver_option_builder):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platform,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed)

      for dist in resolveds:
        log('  %s' % dist, v=options.verbosity)
        pex_builder.add_distribution(dist)
        pex_builder.add_requirement(dist.as_requirement())
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder
示例#22
0
 def builder(shebang):
     # type: (str) -> PEXBuilder
     pb = PEXBuilder()
     pb.set_shebang(shebang)
     return pb
示例#23
0
文件: pex.py 项目: jsirois/pex
def build_pex(args, options, resolver_option_builder):
  with TRACER.timed('Resolving interpreters', V=2):
    def to_python_interpreter(full_path_or_basename):
      if os.path.exists(full_path_or_basename):
        return PythonInterpreter.from_binary(full_path_or_basename)
      else:
        interpreter = PythonInterpreter.from_env(full_path_or_basename)
        if interpreter is None:
          die('Failed to find interpreter: %s' % full_path_or_basename)
        return interpreter

    interpreters = [to_python_interpreter(interp) for interp in options.python or [sys.executable]]

  if options.interpreter_constraint:
    # NB: options.python and interpreter constraints cannot be used together, so this will not
    # affect usages of the interpreter(s) specified by the "--python" command line flag.
    constraints = options.interpreter_constraint
    validate_constraints(constraints)
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
      rc_variables = Variables.from_rc(rc=options.rc_file)
      pex_python_path = rc_variables.get('PEX_PYTHON_PATH', '')
    else:
      pex_python_path = ""
    interpreters = find_compatible_interpreters(pex_python_path, constraints)

  if not interpreters:
    die('Could not find compatible interpreter', CANNOT_SETUP_INTERPRETER)

  try:
    with open(options.preamble_file) as preamble_fd:
      preamble = preamble_fd.read()
  except TypeError:
    # options.preamble_file is None
    preamble = None

  interpreter = min(interpreters)

  pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

  def walk_and_do(fn, src_dir):
    src_dir = os.path.normpath(src_dir)
    for root, dirs, files in os.walk(src_dir):
      for f in files:
        src_file_path = os.path.join(root, f)
        dst_path = os.path.relpath(src_file_path, src_dir)
        fn(src_file_path, dst_path)

  for directory in options.sources_directory:
    walk_and_do(pex_builder.add_source, directory)

  for directory in options.resources_directory:
    walk_and_do(pex_builder.add_resource, directory)

  pex_info = pex_builder.info
  pex_info.zip_safe = options.zip_safe
  pex_info.pex_path = options.pex_path
  pex_info.always_write_cache = options.always_write_cache
  pex_info.ignore_errors = options.ignore_errors
  pex_info.emit_warnings = options.emit_warnings
  pex_info.inherit_path = options.inherit_path
  if options.interpreter_constraint:
    for ic in options.interpreter_constraint:
      pex_builder.add_interpreter_constraint(ic)

  resolvables = resolvables_from_iterable(args, resolver_option_builder, interpreter=interpreter)

  for requirements_txt in options.requirement_files:
    resolvables.extend(requirements_from_file(requirements_txt,
                                              builder=resolver_option_builder,
                                              interpreter=interpreter))

  # pip states the constraints format is identical tor requirements
  # https://pip.pypa.io/en/stable/user_guide/#constraints-files
  for constraints_txt in options.constraint_files:
    constraints = []
    for r in requirements_from_file(constraints_txt,
                                    builder=resolver_option_builder,
                                    interpreter=interpreter):
      r.is_constraint = True
      constraints.append(r)
    resolvables.extend(constraints)

  with TRACER.timed('Resolving distributions'):
    try:
      resolveds = resolve_multi(resolvables,
                                interpreters=interpreters,
                                platforms=options.platforms,
                                cache=options.cache_dir,
                                cache_ttl=options.cache_ttl,
                                allow_prereleases=resolver_option_builder.prereleases_allowed,
                                use_manylinux=options.use_manylinux)

      for resolved_dist in resolveds:
        log('  %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution),
            V=options.verbosity)
        pex_builder.add_distribution(resolved_dist.distribution)
        pex_builder.add_requirement(resolved_dist.requirement)
    except Unsatisfiable as e:
      die(e)

  if options.entry_point and options.script:
    die('Must specify at most one entry point or script.', INVALID_OPTIONS)

  if options.entry_point:
    pex_builder.set_entry_point(options.entry_point)
  elif options.script:
    pex_builder.set_script(options.script)

  if options.python_shebang:
    pex_builder.set_shebang(options.python_shebang)

  return pex_builder