def test_headers_as_dict(): # type: () -> None assert {} == NetworkConfiguration.create().headers_as_dict() assert {} == NetworkConfiguration.create(headers=[]).headers_as_dict() assert { "A_NAME": "B_VALUE", "C_NAME": "D_VALUE" } == NetworkConfiguration.create( headers=["A_NAME:B_VALUE", "C_NAME:D_VALUE"]).headers_as_dict()
def test_headers_bad(): # type: () -> None with pytest.raises(AssertionError) as exec_info: NetworkConfiguration.create( headers=["A_NAME:B_VALUE", "C_BAD", "D_NAME:E_VALUE", "F_BAD"]) message_lines = frozenset(str(exec_info.value).splitlines()) assert "C_BAD" in message_lines assert "F_BAD" in message_lines assert 4 == len(message_lines)
def create( cls, resolver_version=None, # type: Optional[ResolverVersion.Value] indexes=None, # type: Optional[List[str]] find_links=None, # type: Optional[List[str]] network_configuration=None, # type: Optional[NetworkConfiguration] ): # type: (...) -> PackageIndexConfiguration resolver_version = resolver_version or ResolverVersion.PIP_LEGACY network_configuration = network_configuration or NetworkConfiguration.create( ) # We must pass `--client-cert` via PIP_CLIENT_CERT to work around # https://github.com/pypa/pip/issues/5502. We can only do this by breaking Pip `--isolated` # mode. isolated = not network_configuration.client_cert return cls( resolver_version=resolver_version, network_configuration=network_configuration, args=cls._calculate_args( indexes=indexes, find_links=find_links, network_configuration=network_configuration), env=cls._calculate_env(network_configuration=network_configuration, isolated=isolated), isolated=isolated, )
def _calculate_args( indexes=None, # type: Optional[List[str]] find_links=None, # type: Optional[List[str]] network_configuration=None, # type: Optional[NetworkConfiguration] ): # type: (...) -> Iterator[str] # N.B.: `--cert` and `--client-cert` are passed via env var to work around: # https://github.com/pypa/pip/issues/5502 # See `_calculate_env`. trusted_hosts = [] def maybe_trust_insecure_host(url): url_info = urlparse.urlparse(url) if "http" == url_info.scheme: # Implicitly trust explicitly asked for http indexes and find_links repos instead of # requiring separate trust configuration. trusted_hosts.append(url_info.netloc) return url # N.B.: We interpret None to mean accept pip index defaults, [] to mean turn off all index # use. if indexes is not None: if len(indexes) == 0: yield "--no-index" else: all_indexes = deque(indexes) yield "--index-url" yield maybe_trust_insecure_host(all_indexes.popleft()) if all_indexes: for extra_index in all_indexes: yield "--extra-index-url" yield maybe_trust_insecure_host(extra_index) if find_links: for find_link_url in find_links: yield "--find-links" yield maybe_trust_insecure_host(find_link_url) for trusted_host in trusted_hosts: yield "--trusted-host" yield trusted_host network_configuration = network_configuration or NetworkConfiguration.create( ) yield "--retries" yield str(network_configuration.retries) yield "--timeout" yield str(network_configuration.timeout) if network_configuration.proxy: yield "--proxy" yield network_configuration.proxy
def _resolve_plugins(self) -> Iterable[str]: logger.info("Resolving new plugins...:\n {}".format("\n ".join( self._plugin_requirements))) resolved_dists = resolver.resolve( self._plugin_requirements, indexes=self._python_repos.indexes, find_links=self._python_repos.repos, interpreter=self._interpreter, cache=self.plugin_cache_dir, allow_prereleases=PANTS_SEMVER.is_prerelease, network_configuration=NetworkConfiguration.create( cert=self._global_options.options.ca_certs_path), ) return [ self._install_plugin(resolved_dist.distribution) for resolved_dist in resolved_dists ]
def __init__(self, network_configuration=None): # type: (Optional[NetworkConfiguration]) -> None network_configuration = network_configuration or NetworkConfiguration.create() self._timeout = network_configuration.timeout self._max_retries = network_configuration.retries ssl_context = ssl.create_default_context(cafile=network_configuration.cert) if network_configuration.client_cert: ssl_context.load_cert_chain(network_configuration.client_cert) proxies = None # type: Optional[Dict[str, str]] if network_configuration.proxy: proxies = { protocol: network_configuration.proxy for protocol in ("ftp", "http", "https") } self._handlers = (ProxyHandler(proxies), HTTPSHandler(context=ssl_context))
def build_pex(reqs, options, cache=None): interpreters = None # Default to the current interpreter. pex_python_path = options.python_path # If None, this will result in using $PATH. # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag. if pex_python_path is None and (options.rc_file or not ENV.PEX_IGNORE_RCFILES): rc_variables = Variables(rc=options.rc_file) pex_python_path = rc_variables.PEX_PYTHON_PATH # NB: options.python and interpreter constraints cannot be used together. if options.python: with TRACER.timed("Resolving interpreters", V=2): def to_python_interpreter(full_path_or_basename): if os.path.isfile(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interpreter = PythonInterpreter.from_env( full_path_or_basename) if interpreter is None: die("Failed to find interpreter: %s" % full_path_or_basename) return interpreter interpreters = [ to_python_interpreter(interp) for interp in options.python ] elif options.interpreter_constraint: with TRACER.timed("Resolving interpreters", V=2): constraints = options.interpreter_constraint validate_constraints(constraints) try: interpreters = list( iter_compatible_interpreters( path=pex_python_path, interpreter_constraints=constraints)) except UnsatisfiableInterpreterConstraintsError as e: die( e.create_message( "Could not find a compatible interpreter."), CANNOT_SETUP_INTERPRETER, ) platforms = OrderedSet(options.platforms) interpreters = interpreters or [] if options.platforms and options.resolve_local_platforms: with TRACER.timed( "Searching for local interpreters matching {}".format( ", ".join(map(str, platforms)))): candidate_interpreters = OrderedSet( iter_compatible_interpreters(path=pex_python_path)) candidate_interpreters.add(PythonInterpreter.get()) for candidate_interpreter in candidate_interpreters: resolved_platforms = candidate_interpreter.supported_platforms.intersection( platforms) if resolved_platforms: for resolved_platform in resolved_platforms: TRACER.log("Resolved {} for platform {}".format( candidate_interpreter, resolved_platform)) platforms.remove(resolved_platform) interpreters.append(candidate_interpreter) if platforms: TRACER.log( "Could not resolve a local interpreter for {}, will resolve only binary distributions " "for {}.".format( ", ".join(map(str, platforms)), "this platform" if len(platforms) == 1 else "these platforms", )) interpreter = (PythonInterpreter.latest_release_of_min_compatible_version( interpreters) if interpreters else None) try: with open(options.preamble_file) as preamble_fd: preamble = preamble_fd.read() except TypeError: # options.preamble_file is None preamble = None pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble) if options.resources_directory: pex_warnings.warn( "The `-R/--resources-directory` option is deprecated. Resources should be added via " "`-D/--sources-directory` instead.") for directory in OrderedSet(options.sources_directory + options.resources_directory): src_dir = os.path.normpath(directory) for root, _, files in os.walk(src_dir): for f in files: src_file_path = os.path.join(root, f) dst_path = os.path.relpath(src_file_path, src_dir) pex_builder.add_source(src_file_path, dst_path) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.unzip = options.unzip pex_info.pex_path = options.pex_path pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.emit_warnings = options.emit_warnings pex_info.inherit_path = InheritPath.for_value(options.inherit_path) pex_info.pex_root = options.runtime_pex_root pex_info.strip_pex_env = options.strip_pex_env if options.interpreter_constraint: for ic in options.interpreter_constraint: pex_builder.add_interpreter_constraint(ic) # NB: `None` means use the default (pypi) index, `[]` means use no indexes. indexes = None if options.indexes != [_PYPI] and options.indexes is not None: indexes = [str(index) for index in options.indexes] for requirements_pex in options.requirements_pexes: pex_builder.add_from_requirements_pex(requirements_pex) with TRACER.timed( "Resolving distributions ({})".format(reqs + options.requirement_files)): network_configuration = NetworkConfiguration.create( cache_ttl=options.cache_ttl, retries=options.retries, timeout=options.timeout, headers=options.headers, proxy=options.proxy, cert=options.cert, client_cert=options.client_cert, ) try: resolveds = resolve_multi( requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, allow_prereleases=options.allow_prereleases, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), indexes=indexes, find_links=options.find_links, network_configuration=network_configuration, cache=cache, build=options.build, use_wheel=options.use_wheel, compile=options.compile, manylinux=options.manylinux, max_parallel_jobs=options.max_parallel_jobs, ignore_errors=options.ignore_errors, ) for resolved_dist in resolveds: log( " %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution), V=options.verbosity, ) pex_builder.add_distribution(resolved_dist.distribution) pex_builder.add_requirement(resolved_dist.requirement) except Unsatisfiable as e: die(e) if options.entry_point and options.script: die("Must specify at most one entry point or script.", INVALID_OPTIONS) if options.entry_point: pex_builder.set_entry_point(options.entry_point) elif options.script: pex_builder.set_script(options.script) if options.python_shebang: pex_builder.set_shebang(options.python_shebang) return pex_builder
def configure_clp_pex_resolution(parser): # type: (OptionParser) -> None group = OptionGroup( parser, "Resolver options", "Tailor how to find, resolve and translate the packages that get put into the PEX " "environment.", ) group.add_option( "--pypi", "--no-pypi", "--no-index", action="callback", dest="indexes", default=[_PYPI], callback=process_pypi_option, help="Whether to use pypi to resolve dependencies; Default: use pypi", ) group.add_option( "--pex-path", dest="pex_path", type=str, default=None, help= "A colon separated list of other pex files to merge into the runtime environment.", ) group.add_option( "-f", "--find-links", "--repo", metavar="PATH/URL", action="callback", default=[], dest="find_links", callback=process_find_links, type=str, help= "Additional repository path (directory or URL) to look for requirements.", ) group.add_option( "-i", "--index", "--index-url", metavar="URL", action="callback", dest="indexes", callback=process_index_url, type=str, help="Additional cheeseshop indices to use to satisfy requirements.", ) default_net_config = NetworkConfiguration.create() group.add_option( "--cache-ttl", metavar="SECS", default=default_net_config.cache_ttl, type=int, help= "Set the maximum age of items in the HTTP cache in seconds. [Default: %default]", ) group.add_option( "--retries", default=default_net_config.retries, type=int, help= "Maximum number of retries each connection should attempt. [Default: %default]", ) group.add_option( "--timeout", metavar="SECS", default=default_net_config.timeout, type=int, help="Set the socket timeout in seconds. [Default: %default]", ) group.add_option( "-H", "--header", dest="headers", metavar="NAME:VALUE", default=[], type=str, action="append", help="Additional HTTP headers to include in all requests.", ) group.add_option( "--proxy", type=str, default=None, help="Specify a proxy in the form [user:passwd@]proxy.server:port.", ) group.add_option("--cert", metavar="PATH", type=str, default=None, help="Path to alternate CA bundle.") group.add_option( "--client-cert", metavar="PATH", type=str, default=None, help= "Path to an SSL client certificate which should be a single file containing the private " "key and the certificate in PEM format.", ) group.add_option( "--pre", "--no-pre", dest="allow_prereleases", default=False, action="callback", callback=parse_bool, help= "Whether to include pre-release and development versions of requirements; " "Default: only stable versions are used, unless explicitly requested", ) group.add_option( "--disable-cache", dest="disable_cache", default=False, action="store_true", help="Disable caching in the pex tool entirely.", ) group.add_option( "--cache-dir", dest="cache_dir", default=None, help="DEPRECATED: Use --pex-root instead. " "The local cache directory to use for speeding up requirement " "lookups. [Default: {}]".format(ENV.PEX_ROOT), ) group.add_option( "--wheel", "--no-wheel", "--no-use-wheel", dest="use_wheel", default=True, action="callback", callback=parse_bool, help="Whether to allow wheel distributions; Default: allow wheels", ) group.add_option( "--build", "--no-build", dest="build", default=True, action="callback", callback=parse_bool, help= "Whether to allow building of distributions from source; Default: allow builds", ) group.add_option( "--manylinux", "--no-manylinux", "--no-use-manylinux", dest="manylinux", type=str, default=_DEFAULT_MANYLINUX_STANDARD, action="callback", callback=process_manylinux, help=( "Whether to allow resolution of manylinux wheels for linux target " "platforms; Default: allow manylinux wheels compatible with {}". format(_DEFAULT_MANYLINUX_STANDARD)), ) group.add_option( "--transitive", "--no-transitive", "--intransitive", dest="transitive", default=True, action="callback", callback=process_transitive, help="Whether to transitively resolve requirements. Default: True", ) group.add_option( "-j", "--jobs", metavar="JOBS", dest="max_parallel_jobs", type=int, default=DEFAULT_MAX_JOBS, help= "The maximum number of parallel jobs to use when resolving, building and installing " "distributions. You might want to increase the maximum number of parallel jobs to " "potentially improve the latency of the pex creation process at the expense of other" "processes on your system. [Default: %default]", ) parser.add_option_group(group)
def build_pex(reqs, options, cache=None): interpreters = None # Default to the current interpreter. # NB: options.python and interpreter constraints cannot be used together. if options.python: with TRACER.timed('Resolving interpreters', V=2): def to_python_interpreter(full_path_or_basename): if os.path.isfile(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interpreter = PythonInterpreter.from_env(full_path_or_basename) if interpreter is None: die('Failed to find interpreter: %s' % full_path_or_basename) return interpreter interpreters = [to_python_interpreter(interp) for interp in options.python] elif options.interpreter_constraint: with TRACER.timed('Resolving interpreters', V=2): constraints = options.interpreter_constraint validate_constraints(constraints) if options.rc_file or not ENV.PEX_IGNORE_RCFILES: rc_variables = Variables.from_rc(rc=options.rc_file) pex_python_path = rc_variables.get('PEX_PYTHON_PATH', None) else: pex_python_path = None try: interpreters = list(iter_compatible_interpreters(pex_python_path, constraints)) except UnsatisfiableInterpreterConstraintsError as e: die(e.create_message('Could not find a compatible interpreter.'), CANNOT_SETUP_INTERPRETER) try: with open(options.preamble_file) as preamble_fd: preamble = preamble_fd.read() except TypeError: # options.preamble_file is None preamble = None interpreter = min(interpreters) if interpreters else None pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble) def walk_and_do(fn, src_dir): src_dir = os.path.normpath(src_dir) for root, dirs, files in os.walk(src_dir): for f in files: src_file_path = os.path.join(root, f) dst_path = os.path.relpath(src_file_path, src_dir) fn(src_file_path, dst_path) for directory in options.sources_directory: walk_and_do(pex_builder.add_source, directory) for directory in options.resources_directory: walk_and_do(pex_builder.add_resource, directory) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.unzip = options.unzip pex_info.pex_path = options.pex_path pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.emit_warnings = options.emit_warnings pex_info.inherit_path = options.inherit_path pex_info.pex_root = options.runtime_pex_root pex_info.strip_pex_env = options.strip_pex_env if options.interpreter_constraint: for ic in options.interpreter_constraint: pex_builder.add_interpreter_constraint(ic) # NB: `None` means use the default (pypi) index, `[]` means use no indexes. indexes = None if options.indexes != [_PYPI] and options.indexes is not None: indexes = [str(index) for index in options.indexes] for requirements_pex in options.requirements_pexes: pex_builder.add_from_requirements_pex(requirements_pex) with TRACER.timed('Resolving distributions ({})'.format(reqs + options.requirement_files)): network_configuration = NetworkConfiguration.create(cache_ttl=options.cache_ttl, retries=options.retries, timeout=options.timeout, headers=options.headers, proxy=options.proxy, cert=options.cert, client_cert=options.client_cert) try: resolveds = resolve_multi(requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, allow_prereleases=options.allow_prereleases, transitive=options.transitive, interpreters=interpreters, platforms=options.platforms, indexes=indexes, find_links=options.find_links, network_configuration=network_configuration, cache=cache, build=options.build, use_wheel=options.use_wheel, compile=options.compile, manylinux=options.manylinux, max_parallel_jobs=options.max_parallel_jobs, ignore_errors=options.ignore_errors) for resolved_dist in resolveds: log(' %s -> %s' % (resolved_dist.requirement, resolved_dist.distribution), V=options.verbosity) pex_builder.add_distribution(resolved_dist.distribution) pex_builder.add_requirement(resolved_dist.requirement) except Unsatisfiable as e: die(e) if options.entry_point and options.script: die('Must specify at most one entry point or script.', INVALID_OPTIONS) if options.entry_point: pex_builder.set_entry_point(options.entry_point) elif options.script: pex_builder.set_script(options.script) if options.python_shebang: pex_builder.set_shebang(options.python_shebang) return pex_builder
def configure_clp_pex_resolution(parser): group = OptionGroup( parser, 'Resolver options', 'Tailor how to find, resolve and translate the packages that get put into the PEX ' 'environment.') group.add_option( '--pypi', '--no-pypi', '--no-index', action='callback', dest='indexes', default=[_PYPI], callback=process_pypi_option, help='Whether to use pypi to resolve dependencies; Default: use pypi') group.add_option( '--pex-path', dest='pex_path', type=str, default=None, help='A colon separated list of other pex files to merge into the runtime environment.') group.add_option( '-f', '--find-links', '--repo', metavar='PATH/URL', action='callback', default=[], dest='find_links', callback=process_find_links, type=str, help='Additional repository path (directory or URL) to look for requirements.') group.add_option( '-i', '--index', '--index-url', metavar='URL', action='callback', dest='indexes', callback=process_index_url, type=str, help='Additional cheeseshop indices to use to satisfy requirements.') default_net_config = NetworkConfiguration.create() group.add_option( '--cache-ttl', metavar='SECS', default=default_net_config.cache_ttl, type=int, help='Set the maximum age of items in the HTTP cache in seconds. [Default: %default]') group.add_option( '--retries', default=default_net_config.retries, type=int, help='Maximum number of retries each connection should attempt. [Default: %default]') group.add_option( '--timeout', metavar='SECS', default=default_net_config.timeout, type=int, help='Set the socket timeout in seconds. [Default: %default]') group.add_option( '-H', '--header', dest='headers', metavar='NAME:VALUE', default=[], type=str, action='append', help='Additional HTTP headers to include in all requests.') group.add_option( '--proxy', type=str, default=None, help='Specify a proxy in the form [user:passwd@]proxy.server:port.') group.add_option( '--cert', metavar='PATH', type=str, default=None, help='Path to alternate CA bundle.') group.add_option( '--client-cert', metavar='PATH', type=str, default=None, help='Path to an SSL client certificate which should be a single file containing the private ' 'key and the certificate in PEM format.') group.add_option( '--pre', '--no-pre', dest='allow_prereleases', default=False, action='callback', callback=parse_bool, help='Whether to include pre-release and development versions of requirements; ' 'Default: only stable versions are used, unless explicitly requested') group.add_option( '--disable-cache', dest='disable_cache', default=False, action='store_true', help='Disable caching in the pex tool entirely.') group.add_option( '--cache-dir', dest='cache_dir', default=None, help='DEPRECATED: Use --pex-root instead. ' 'The local cache directory to use for speeding up requirement ' 'lookups. [Default: {}]'.format(ENV.PEX_ROOT)) group.add_option( '--wheel', '--no-wheel', '--no-use-wheel', dest='use_wheel', default=True, action='callback', callback=parse_bool, help='Whether to allow wheel distributions; Default: allow wheels') group.add_option( '--build', '--no-build', dest='build', default=True, action='callback', callback=parse_bool, help='Whether to allow building of distributions from source; Default: allow builds') group.add_option( '--manylinux', '--no-manylinux', '--no-use-manylinux', dest='manylinux', type=str, default=_DEFAULT_MANYLINUX_STANDARD, action='callback', callback=process_manylinux, help=('Whether to allow resolution of manylinux wheels for linux target ' 'platforms; Default: allow manylinux wheels compatible with {}' .format(_DEFAULT_MANYLINUX_STANDARD))) group.add_option( '--transitive', '--no-transitive', '--intransitive', dest='transitive', default=True, action='callback', callback=process_transitive, help='Whether to transitively resolve requirements. Default: True') group.add_option( '-j', '--jobs', metavar='JOBS', dest='max_parallel_jobs', type=int, default=DEFAULT_MAX_JOBS, help='The maximum number of parallel jobs to use when resolving, building and installing ' 'distributions. You might want to increase the maximum number of parallel jobs to ' 'potentially improve the latency of the pex creation process at the expense of other' 'processes on your system. [Default: %default]') parser.add_option_group(group)
def build_pex(reqs, options, cache=None): interpreters = None # Default to the current interpreter. pex_python_path = None # Defaults to $PATH if options.rc_file or not ENV.PEX_IGNORE_RCFILES: rc_variables = Variables(rc=options.rc_file) pex_python_path = rc_variables.PEX_PYTHON_PATH # NB: options.python and interpreter constraints cannot be used together. if options.python: with TRACER.timed("Resolving interpreters", V=2): def to_python_interpreter(full_path_or_basename): if os.path.isfile(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interpreter = PythonInterpreter.from_env(full_path_or_basename) if interpreter is None: die("Failed to find interpreter: %s" % full_path_or_basename) return interpreter interpreters = [to_python_interpreter(interp) for interp in options.python] elif options.interpreter_constraint: with TRACER.timed("Resolving interpreters", V=2): constraints = options.interpreter_constraint validate_constraints(constraints) try: interpreters = list(iter_compatible_interpreters(pex_python_path, constraints)) except UnsatisfiableInterpreterConstraintsError as e: die( e.create_message("Could not find a compatible interpreter."), CANNOT_SETUP_INTERPRETER, ) platforms = OrderedSet(options.platforms) interpreters = interpreters or [] if options.platforms and options.resolve_local_platforms: with TRACER.timed( "Searching for local interpreters matching {}".format(", ".join(map(str, platforms))) ): candidate_interpreters = OrderedSet(iter_compatible_interpreters(pex_python_path)) candidate_interpreters.add(PythonInterpreter.get()) for candidate_interpreter in candidate_interpreters: resolved_platforms = candidate_interpreter.supported_platforms.intersection( platforms ) if resolved_platforms: for resolved_platform in resolved_platforms: TRACER.log( "Resolved {} for platform {}".format( candidate_interpreter, resolved_platform ) ) platforms.remove(resolved_platform) interpreters.append(candidate_interpreter) if platforms: TRACER.log( "Could not resolve a local interpreter for {}, will resolve only binary distributions " "for {}.".format( ", ".join(map(str, platforms)), "this platform" if len(platforms) == 1 else "these platforms", ) ) interpreter = min(interpreters) if interpreters else None if options.use_first_matching_interpreter and interpreters: if len(interpreters) > 1: unused_interpreters = set(interpreters) - {interpreter} TRACER.log( "Multiple interpreters resolved, but only using {} because " "`--use-first-matching-interpreter` was used. These interpreters were matched but " "will not be used: {}".format( interpreter.binary, ", ".join(interpreter.binary for interpreter in sorted(unused_interpreters)), ) ) interpreters = [interpreter] try: with open(options.preamble_file) as preamble_fd: preamble = preamble_fd.read() except TypeError: # options.preamble_file is None preamble = None pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble) def walk_and_do(fn, src_dir): src_dir = os.path.normpath(src_dir) for root, dirs, files in os.walk(src_dir): for f in files: src_file_path = os.path.join(root, f) dst_path = os.path.relpath(src_file_path, src_dir) fn(src_file_path, dst_path) for directory in options.sources_directory: walk_and_do(pex_builder.add_source, directory) for directory in options.resources_directory: walk_and_do(pex_builder.add_resource, directory) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.unzip = options.unzip pex_info.pex_path = options.pex_path pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.emit_warnings = options.emit_warnings pex_info.inherit_path = options.inherit_path pex_info.pex_root = options.runtime_pex_root pex_info.strip_pex_env = options.strip_pex_env # If we're only building the PEX for the first of many interpreters due to # `--use-first-matching-interpreter` selection, we do not want to enable those same interpreter # constraints at runtime, where they could lead to a different interpreter being selected # leading to a failure to execute the PEX. Instead we rely on the shebang set by that single # interpreter to pick out a similar interpreter at runtime (for a CPython interpreter, the # shebang will be `#!/usr/bin/env pythonX.Y` which should generally be enough to select a # matching interpreter. To be clear though, there are many corners this will not work for # including mismatching abi (python2.7m vs python2.7mu) when the PEX contains platform specific # wheels, etc. if options.interpreter_constraint and not options.use_first_matching_interpreter: for ic in options.interpreter_constraint: pex_builder.add_interpreter_constraint(ic) # NB: `None` means use the default (pypi) index, `[]` means use no indexes. indexes = None if options.indexes != [_PYPI] and options.indexes is not None: indexes = [str(index) for index in options.indexes] for requirements_pex in options.requirements_pexes: pex_builder.add_from_requirements_pex(requirements_pex) with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)): network_configuration = NetworkConfiguration.create( cache_ttl=options.cache_ttl, retries=options.retries, timeout=options.timeout, headers=options.headers, proxy=options.proxy, cert=options.cert, client_cert=options.client_cert, ) try: resolveds = resolve_multi( requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, allow_prereleases=options.allow_prereleases, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), indexes=indexes, find_links=options.find_links, network_configuration=network_configuration, cache=cache, build=options.build, use_wheel=options.use_wheel, compile=options.compile, manylinux=options.manylinux, max_parallel_jobs=options.max_parallel_jobs, ignore_errors=options.ignore_errors, ) for resolved_dist in resolveds: log( " %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution), V=options.verbosity, ) pex_builder.add_distribution(resolved_dist.distribution) pex_builder.add_requirement(resolved_dist.requirement) except Unsatisfiable as e: die(e) if options.entry_point and options.script: die("Must specify at most one entry point or script.", INVALID_OPTIONS) if options.entry_point: pex_builder.set_entry_point(options.entry_point) elif options.script: pex_builder.set_script(options.script) if options.python_shebang: pex_builder.set_shebang(options.python_shebang) return pex_builder
def configure_clp_pex_resolution(parser): # type: (ArgumentParser) -> None group = parser.add_argument_group( "Resolver options", "Tailor how to find, resolve and translate the packages that get put into the PEX " "environment.", ) group.add_argument( "--resolver-version", dest="resolver_version", default=ResolverVersion.PIP_LEGACY.value, choices=[choice.value for choice in ResolverVersion.values], help="The dependency resolver version to use. Read more at " "https://pip.pypa.io/en/stable/user_guide/#resolver-changes-2020", ) group.add_argument( "--pypi", "--no-pypi", "--no-index", dest="pypi", action=HandleBoolAction, default=True, help="Whether to use PyPI to resolve dependencies.", ) group.add_argument( "--pex-path", dest="pex_path", type=str, default=None, help= "A colon separated list of other pex files to merge into the runtime environment.", ) group.add_argument( "-f", "--find-links", "--repo", metavar="PATH/URL", action="append", dest="find_links", type=str, default=[], help= "Additional repository path (directory or URL) to look for requirements.", ) group.add_argument( "-i", "--index", "--index-url", metavar="URL", action="append", dest="indexes", type=str, help="Additional cheeseshop indices to use to satisfy requirements.", ) parser.add_argument( "--pex-repository", dest="pex_repository", metavar="FILE", default=None, type=str, help= ("Resolve requirements from the given PEX file instead of from --index servers or " "--find-links repos."), ) default_net_config = NetworkConfiguration.create() group.add_argument( "--cache-ttl", metavar="DEPRECATED", default=None, type=int, help="Deprecated: No longer used.", ) group.add_argument( "--retries", default=default_net_config.retries, type=int, help="Maximum number of retries each connection should attempt.", ) group.add_argument( "--timeout", metavar="SECS", default=default_net_config.timeout, type=int, help="Set the socket timeout in seconds.", ) group.add_argument( "-H", "--header", dest="headers", metavar="DEPRECATED", default=None, type=str, action="append", help="Deprecated: No longer used.", ) group.add_argument( "--proxy", type=str, default=None, help="Specify a proxy in the form [user:passwd@]proxy.server:port.", ) group.add_argument("--cert", metavar="PATH", type=str, default=None, help="Path to alternate CA bundle.") group.add_argument( "--client-cert", metavar="PATH", type=str, default=None, help= "Path to an SSL client certificate which should be a single file containing the private " "key and the certificate in PEM format.", ) group.add_argument( "--pre", "--no-pre", dest="allow_prereleases", default=False, action=HandleBoolAction, help= "Whether to include pre-release and development versions of requirements.", ) group.add_argument( "--disable-cache", dest="disable_cache", default=False, action="store_true", help="Disable caching in the pex tool entirely.", ) group.add_argument( "--cache-dir", dest="cache_dir", default=None, help="DEPRECATED: Use --pex-root instead. " "The local cache directory to use for speeding up requirement lookups.", ) group.add_argument( "--wheel", "--no-wheel", "--no-use-wheel", dest="use_wheel", default=True, action=HandleBoolAction, help="Whether to allow wheel distributions.", ) group.add_argument( "--build", "--no-build", dest="build", default=True, action=HandleBoolAction, help="Whether to allow building of distributions from source.", ) group.add_argument( "--manylinux", "--no-manylinux", "--no-use-manylinux", dest="manylinux", type=str, default=_DEFAULT_MANYLINUX_STANDARD, action=ManylinuxAction, help= "Whether to allow resolution of manylinux wheels for linux target platforms.", ) group.add_argument( "--transitive", "--no-transitive", "--intransitive", dest="transitive", default=True, action=HandleTransitiveAction, help="Whether to transitively resolve requirements.", ) group.add_argument( "-j", "--jobs", metavar="JOBS", dest="max_parallel_jobs", type=int, default=DEFAULT_MAX_JOBS, help= "The maximum number of parallel jobs to use when resolving, building and installing " "distributions. You might want to increase the maximum number of parallel jobs to " "potentially improve the latency of the pex creation process at the expense of other" "processes on your system.", )
def _calculate_package_index_options(self, indexes=None, find_links=None, network_configuration=None): trusted_hosts = [] def maybe_trust_insecure_host(url): url_info = urlparse.urlparse(url) if 'http' == url_info.scheme: # Implicitly trust explicitly asked for http indexes and find_links repos instead of # requiring separate trust configuration. trusted_hosts.append(url_info.netloc) return url # N.B.: We interpret None to mean accept pip index defaults, [] to mean turn off all index use. if indexes is not None: if len(indexes) == 0: yield '--no-index' else: all_indexes = deque(indexes) yield '--index-url' yield maybe_trust_insecure_host(all_indexes.popleft()) if all_indexes: for extra_index in all_indexes: yield '--extra-index-url' yield maybe_trust_insecure_host(extra_index) if find_links: for find_link_url in find_links: yield '--find-links' yield maybe_trust_insecure_host(find_link_url) for trusted_host in trusted_hosts: yield '--trusted-host' yield trusted_host network_configuration = network_configuration or NetworkConfiguration.create( ) # N.B.: Pip sends `Cache-Control: max-age=0` by default which turns of HTTP caching as per the # spec: yield '--header' yield 'Cache-Control:max-age={}'.format( network_configuration.cache_ttl) for header in network_configuration.headers: yield '--header' yield header yield '--retries' yield str(network_configuration.retries) yield '--timeout' yield str(network_configuration.timeout) if network_configuration.proxy: yield '--proxy' yield network_configuration.proxy if network_configuration.cert: yield '--cert' yield network_configuration.cert if network_configuration.client_cert: yield '--client-cert' yield network_configuration.client_cert