def convert(self, value, param, ctx): """ Opens the parameter value as a URL using ``urllib.request.urlopen``. A custom User-Agent header is used and a ten-second timeout is set, but otherwise no alterations are made to the defaults (i.e. no authentication, no cookies). Any error causes the command to fail. """ try: response = requests.get(value, timeout=10, headers={"User-Agent": self.USER_AGENT}) if ctx is not None: ctx.call_on_close(safecall(response.close)) response.raise_for_status() except requests.exceptions.ConnectionError: self.fail("Connection error ({})".format(value), param, ctx) except requests.exceptions.Timeout: self.fail("Time out ({})".format(value), param, ctx) except requests.exceptions.TooManyRedirects: self.fail("Too many redirects ({})".format(value), param, ctx) except requests.exceptions.HTTPError: self.fail("HTTP {} {} ({})".format(response.status_code, response.reason, value), param, ctx) except requests.exceptions.RequestException: self.fail("Request error ({})".format(value), param, ctx) return response
def cli( ctx, verbose, quiet, dry_run, pre, rebuild, find_links, index_url, extra_index_url, cert, client_cert, trusted_host, header, index, emit_trusted_host, annotate, upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes, src_files, max_rounds, build_isolation, ): """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet if len(src_files) == 0: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE, ) elif os.path.exists("setup.py"): src_files = ("setup.py", ) else: raise click.BadParameter(("If you do not specify an input file, " "the default is {} or setup.py" ).format(DEFAULT_REQUIREMENTS_FILE)) if not output_file: # An output file must be provided for stdin if src_files == ("-", ): raise click.BadParameter( "--output-file is required if input is from stdin") # Use default requirements output file if there is a setup.py the source file elif src_files == ("setup.py", ): file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE # An output file must be provided if there are multiple source files elif len(src_files) > 1: raise click.BadParameter( "--output-file is required if two or more input files are given." ) # Otherwise derive the output file from the source file else: base_name = src_files[0].rsplit(".", 1)[0] file_name = base_name + ".txt" output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True) # Close the file at the end of the context execution ctx.call_on_close(safecall(output_file.close_intelligently)) if upgrade and upgrade_packages: raise click.BadParameter( "Only one of --upgrade or --upgrade-package can be provided as an argument." ) ### # Setup ### pip_command = get_pip_command() pip_args = [] if find_links: for link in find_links: pip_args.extend(["-f", link]) if index_url: pip_args.extend(["-i", index_url]) if extra_index_url: for extra_index in extra_index_url: pip_args.extend(["--extra-index-url", extra_index]) if cert: pip_args.extend(["--cert", cert]) if client_cert: pip_args.extend(["--client-cert", client_cert]) if pre: pip_args.extend(["--pre"]) if trusted_host: for host in trusted_host: pip_args.extend(["--trusted-host", host]) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session, build_isolation) upgrade_install_reqs = {} # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): ireqs = parse_requirements( output_file.name, finder=repository.finder, session=repository.session, options=pip_options, ) # Exclude packages from --upgrade-package/-P from # the existing pins: We want to upgrade. upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { key_from_req(install_req.req): install_req for install_req in upgrade_reqs_gen } existing_pins = { key_from_req(ireq.req): ireq for ireq in ireqs if is_pinned_requirement(ireq) and key_from_req(ireq.req) not in upgrade_install_reqs } repository = LocalRequirementsRepository(existing_pins, repository) ### # Parsing/collecting initial requirements ### constraints = [] for src_file in src_files: is_setup_file = os.path.basename(src_file) == "setup.py" if is_setup_file or src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin # to a temporary file and have pip read that. also used for # reading requirements from install_requires in setup.py. tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) if is_setup_file: from distutils.core import run_setup dist = run_setup(src_file) tmpfile.write("\n".join(dist.install_requires)) else: tmpfile.write(sys.stdin.read()) tmpfile.flush() constraints.extend( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options, )) else: constraints.extend( parse_requirements( src_file, finder=repository.finder, session=repository.session, options=pip_options, )) constraints.extend(upgrade_install_reqs.values()) # Filter out pip environment markers which do not match (PEP496) constraints = [ req for req in constraints if req.markers is None or req.markers.evaluate() ] log.debug("Using indexes:") # remove duplicate index urls before processing repository.finder.index_urls = list(dedup(repository.finder.index_urls)) for index_url in repository.finder.index_urls: log.debug(" {}".format(index_url)) if repository.finder.find_links: log.debug("") log.debug("Configuration:") for find_link in repository.finder.find_links: log.debug(" -f {}".format(find_link)) try: resolver = Resolver( constraints, repository, prereleases=pre, clear_caches=rebuild, allow_unsafe=allow_unsafe, ) results = resolver.resolve(max_rounds=max_rounds) if generate_hashes: hashes = resolver.resolve_hashes(results) else: hashes = None except PipToolsError as e: log.error(str(e)) sys.exit(2) log.debug("") ## # Output ## # Compute reverse dependency annotations statically, from the # dependency cache that the resolver has populated by now. # # TODO (1a): reverse deps for any editable package are lost # what SHOULD happen is that they are cached in memory, just # not persisted to disk! # # TODO (1b): perhaps it's easiest if the dependency cache has an API # that could take InstallRequirements directly, like: # # cache.set(ireq, ...) # # then, when ireq is editable, it would store in # # editables[egg_name][link_without_fragment] = deps # editables['pip-tools']['git+...ols.git@future'] = { # 'click>=3.0', 'six' # } # # otherwise: # # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'} # reverse_dependencies = None if annotate: reverse_dependencies = resolver.reverse_dependencies(results) writer = OutputWriter( src_files, output_file, click_ctx=ctx, dry_run=dry_run, emit_header=header, emit_index=index, emit_trusted_host=emit_trusted_host, annotate=annotate, generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, trusted_hosts=pip_options.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, find_links=repository.finder.find_links, ) writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, reverse_dependencies=reverse_dependencies, primary_packages={ key_from_ireq(ireq) for ireq in constraints if not ireq.constraint }, markers={ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers }, hashes=hashes, ) if dry_run: log.warning("Dry-run, so nothing updated.")
def cli( ctx: click.Context, verbose: int, quiet: int, dry_run: bool, pre: bool, rebuild: bool, find_links: Tuple[str], index_url: str, extra_index_url: Tuple[str], cert: Optional[str], client_cert: Optional[str], trusted_host: Tuple[str], header: bool, emit_trusted_host: bool, annotate: bool, upgrade: bool, upgrade_packages: Tuple[str], output_file: Optional[LazyFile], allow_unsafe: bool, generate_hashes: bool, reuse_hashes: bool, src_files: Tuple[str], max_rounds: int, build_isolation: bool, emit_find_links: bool, cache_dir: str, pip_args_str: Optional[str], emit_index_url: bool, ) -> None: """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet if len(src_files) == 0: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE, ) elif os.path.exists("setup.py"): src_files = ("setup.py", ) else: raise click.BadParameter(("If you do not specify an input file, " "the default is {} or setup.py" ).format(DEFAULT_REQUIREMENTS_FILE)) if not output_file: # An output file must be provided for stdin if src_files == ("-", ): raise click.BadParameter( "--output-file is required if input is from stdin") # Use default requirements output file if there is a setup.py the source file elif os.path.basename(src_files[0]) in METADATA_FILENAMES: file_name = os.path.join(os.path.dirname(src_files[0]), DEFAULT_REQUIREMENTS_OUTPUT_FILE) # An output file must be provided if there are multiple source files elif len(src_files) > 1: raise click.BadParameter( "--output-file is required if two or more input files are given." ) # Otherwise derive the output file from the source file else: base_name = src_files[0].rsplit(".", 1)[0] file_name = base_name + ".txt" output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True) # Close the file at the end of the context execution assert output_file is not None ctx.call_on_close(safecall(output_file.close_intelligently)) ### # Setup ### right_args = shlex.split(pip_args_str or "") pip_args = [] for link in find_links: pip_args.extend(["-f", link]) if index_url: pip_args.extend(["-i", index_url]) for extra_index in extra_index_url: pip_args.extend(["--extra-index-url", extra_index]) if cert: pip_args.extend(["--cert", cert]) if client_cert: pip_args.extend(["--client-cert", client_cert]) if pre: pip_args.extend(["--pre"]) for host in trusted_host: pip_args.extend(["--trusted-host", host]) if not build_isolation: pip_args.append("--no-build-isolation") pip_args.extend(right_args) repository: BaseRepository repository = PyPIRepository(pip_args, cache_dir=cache_dir) # Parse all constraints coming from --upgrade-package/-P upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen } existing_pins_to_upgrade = set() # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): # Use a temporary repository to ensure outdated(removed) options from # existing requirements.txt wouldn't get into the current repository. tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir) ireqs = parse_requirements( output_file.name, finder=tmp_repository.finder, session=tmp_repository.session, options=tmp_repository.options, ) # Exclude packages from --upgrade-package/-P from the existing # constraints, and separately gather pins to be upgraded existing_pins = {} for ireq in filter(is_pinned_requirement, ireqs): key = key_from_ireq(ireq) if key in upgrade_install_reqs: existing_pins_to_upgrade.add(key) else: existing_pins[key] = ireq repository = LocalRequirementsRepository(existing_pins, repository, reuse_hashes=reuse_hashes) ### # Parsing/collecting initial requirements ### constraints = [] for src_file in src_files: is_setup_file = os.path.basename(src_file) in METADATA_FILENAMES if src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin # to a temporary file and have pip read that. also used for # reading requirements from install_requires in setup.py. tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) tmpfile.write(sys.stdin.read()) comes_from = "-r -" tmpfile.flush() reqs = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options, )) for req in reqs: req.comes_from = comes_from constraints.extend(reqs) elif is_setup_file: dist = meta.load(os.path.dirname(os.path.abspath(src_file))) comes_from = f"{dist.metadata.get_all('Name')[0]} ({src_file})" constraints.extend([ install_req_from_line(req, comes_from=comes_from) for req in dist.requires or [] ]) else: constraints.extend( parse_requirements( src_file, finder=repository.finder, session=repository.session, options=repository.options, )) primary_packages = { key_from_ireq(ireq) for ireq in constraints if not ireq.constraint } allowed_upgrades = primary_packages | existing_pins_to_upgrade constraints.extend(ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades) # Filter out pip environment markers which do not match (PEP496) constraints = [ req for req in constraints if req.markers is None # We explicitly set extra=None to filter out optional requirements # since evaluating an extra marker with no environment raises UndefinedEnvironmentName # (see https://packaging.pypa.io/en/latest/markers.html#usage) or req.markers.evaluate({"extra": None}) ] log.debug("Using indexes:") with log.indentation(): for index_url in dedup(repository.finder.index_urls): log.debug(redact_auth_from_url(index_url)) if repository.finder.find_links: log.debug("") log.debug("Using links:") with log.indentation(): for find_link in dedup(repository.finder.find_links): log.debug(redact_auth_from_url(find_link)) try: resolver = Resolver( constraints, repository, prereleases=repository.finder.allow_all_prereleases or pre, cache=DependencyCache(cache_dir), clear_caches=rebuild, allow_unsafe=allow_unsafe, ) results = resolver.resolve(max_rounds=max_rounds) hashes = resolver.resolve_hashes(results) if generate_hashes else None except PipToolsError as e: log.error(str(e)) sys.exit(2) log.debug("") ## # Output ## writer = OutputWriter( cast(BinaryIO, output_file), click_ctx=ctx, dry_run=dry_run, emit_header=header, emit_index_url=emit_index_url, emit_trusted_host=emit_trusted_host, annotate=annotate, generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, trusted_hosts=repository.finder.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, find_links=repository.finder.find_links, emit_find_links=emit_find_links, ) writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, markers={ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers }, hashes=hashes, ) if dry_run: log.info("Dry-run, so nothing updated.")
def cli( ctx, verbose, quiet, dry_run, pre, rebuild, find_links, index_url, extra_index_url, cert, client_cert, trusted_host, header, index, emit_trusted_host, annotate, upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes, src_files, max_rounds, build_isolation, emit_find_links, cache_dir, ): """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet if len(src_files) == 0: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE, ) elif os.path.exists("setup.py"): src_files = ("setup.py", ) else: raise click.BadParameter(("If you do not specify an input file, " "the default is {} or setup.py" ).format(DEFAULT_REQUIREMENTS_FILE)) if not output_file: # An output file must be provided for stdin if src_files == ("-", ): raise click.BadParameter( "--output-file is required if input is from stdin") # Use default requirements output file if there is a setup.py the source file elif src_files == ("setup.py", ): file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE # An output file must be provided if there are multiple source files elif len(src_files) > 1: raise click.BadParameter( "--output-file is required if two or more input files are given." ) # Otherwise derive the output file from the source file else: base_name = src_files[0].rsplit(".", 1)[0] file_name = base_name + ".txt" output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True) # Close the file at the end of the context execution ctx.call_on_close(safecall(output_file.close_intelligently)) ### # Setup ### pip_args = [] if find_links: for link in find_links: pip_args.extend(["-f", link]) if index_url: pip_args.extend(["-i", index_url]) if extra_index_url: for extra_index in extra_index_url: pip_args.extend(["--extra-index-url", extra_index]) if cert: pip_args.extend(["--cert", cert]) if client_cert: pip_args.extend(["--client-cert", client_cert]) if pre: pip_args.extend(["--pre"]) if trusted_host: for host in trusted_host: pip_args.extend(["--trusted-host", host]) repository = PyPIRepository(pip_args, build_isolation=build_isolation, cache_dir=cache_dir) # Parse all constraints coming from --upgrade-package/-P upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen } existing_pins_to_upgrade = set() # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): # Use a temporary repository to ensure outdated(removed) options from # existing requirements.txt wouldn't get into the current repository. tmp_repository = PyPIRepository(pip_args, build_isolation=build_isolation, cache_dir=cache_dir) ireqs = parse_requirements( output_file.name, finder=tmp_repository.finder, session=tmp_repository.session, options=tmp_repository.options, ) # Exclude packages from --upgrade-package/-P from the existing # constraints, and separately gather pins to be upgraded existing_pins = {} for ireq in filter(is_pinned_requirement, ireqs): key = key_from_ireq(ireq) if key in upgrade_install_reqs: existing_pins_to_upgrade.add(key) else: existing_pins[key] = ireq repository = LocalRequirementsRepository(existing_pins, repository) ### # Parsing/collecting initial requirements ### constraints = [] for src_file in src_files: is_setup_file = os.path.basename(src_file) == "setup.py" if is_setup_file or src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin # to a temporary file and have pip read that. also used for # reading requirements from install_requires in setup.py. tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) if is_setup_file: from distutils.core import run_setup dist = run_setup(src_file) tmpfile.write("\n".join(dist.install_requires)) comes_from = "{name} ({filename})".format(name=dist.get_name(), filename=src_file) else: tmpfile.write(sys.stdin.read()) comes_from = "-r -" tmpfile.flush() reqs = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options, )) for req in reqs: req.comes_from = comes_from constraints.extend(reqs) else: constraints.extend( parse_requirements( src_file, finder=repository.finder, session=repository.session, options=repository.options, )) primary_packages = { key_from_ireq(ireq) for ireq in constraints if not ireq.constraint } allowed_upgrades = primary_packages | existing_pins_to_upgrade constraints.extend(ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades) # Filter out pip environment markers which do not match (PEP496) constraints = [ req for req in constraints if req.markers is None or req.markers.evaluate() ] log.debug("Using indexes:") for index_url in dedup(repository.finder.index_urls): log.debug(" {}".format(index_url)) if repository.finder.find_links: log.debug("") log.debug("Configuration:") for find_link in dedup(repository.finder.find_links): log.debug(" -f {}".format(find_link)) try: resolver = Resolver( constraints, repository, prereleases=repository.finder.allow_all_prereleases or pre, cache=DependencyCache(cache_dir), clear_caches=rebuild, allow_unsafe=allow_unsafe, ) results = resolver.resolve(max_rounds=max_rounds) if generate_hashes: hashes = resolver.resolve_hashes(results) else: hashes = None except PipToolsError as e: log.error(str(e)) sys.exit(2) log.debug("") ## # Output ## writer = OutputWriter( src_files, output_file, click_ctx=ctx, dry_run=dry_run, emit_header=header, emit_index=index, emit_trusted_host=emit_trusted_host, annotate=annotate, generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, trusted_hosts=repository.finder.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, find_links=repository.finder.find_links, emit_find_links=emit_find_links, ) writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, markers={ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers }, hashes=hashes, ) if dry_run: log.info("Dry-run, so nothing updated.")
def cli( ctx, verbose, quiet, dry_run, pre, rebuild, find_links, index_url, extra_index_url, cert, client_cert, trusted_host, header, index, emit_trusted_host, annotate, upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes, src_files, max_rounds, build_isolation, ): """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet if len(src_files) == 0: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE,) elif os.path.exists("setup.py"): src_files = ("setup.py",) else: raise click.BadParameter( ( "If you do not specify an input file, " "the default is {} or setup.py" ).format(DEFAULT_REQUIREMENTS_FILE) ) if not output_file: # An output file must be provided for stdin if src_files == ("-",): raise click.BadParameter("--output-file is required if input is from stdin") # Use default requirements output file if there is a setup.py the source file elif src_files == ("setup.py",): file_name = DEFAULT_REQUIREMENTS_OUTPUT_FILE # An output file must be provided if there are multiple source files elif len(src_files) > 1: raise click.BadParameter( "--output-file is required if two or more input files are given." ) # Otherwise derive the output file from the source file else: base_name = src_files[0].rsplit(".", 1)[0] file_name = base_name + ".txt" output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True) # Close the file at the end of the context execution ctx.call_on_close(safecall(output_file.close_intelligently)) if upgrade and upgrade_packages: raise click.BadParameter( "Only one of --upgrade or --upgrade-package can be provided as an argument." ) ### # Setup ### pip_command = get_pip_command() pip_args = [] if find_links: for link in find_links: pip_args.extend(["-f", link]) if index_url: pip_args.extend(["-i", index_url]) if extra_index_url: for extra_index in extra_index_url: pip_args.extend(["--extra-index-url", extra_index]) if cert: pip_args.extend(["--cert", cert]) if client_cert: pip_args.extend(["--client-cert", client_cert]) if pre: pip_args.extend(["--pre"]) if trusted_host: for host in trusted_host: pip_args.extend(["--trusted-host", host]) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) repository = PyPIRepository(pip_options, session, build_isolation) upgrade_install_reqs = {} # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): ireqs = parse_requirements( output_file.name, finder=repository.finder, session=repository.session, options=pip_options, ) # Exclude packages from --upgrade-package/-P from # the existing pins: We want to upgrade. upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { key_from_req(install_req.req): install_req for install_req in upgrade_reqs_gen } existing_pins = { key_from_req(ireq.req): ireq for ireq in ireqs if is_pinned_requirement(ireq) and key_from_req(ireq.req) not in upgrade_install_reqs } repository = LocalRequirementsRepository(existing_pins, repository) log.debug("Using indexes:") # remove duplicate index urls before processing repository.finder.index_urls = list(dedup(repository.finder.index_urls)) for index_url in repository.finder.index_urls: log.debug(" {}".format(index_url)) if repository.finder.find_links: log.debug("") log.debug("Configuration:") for find_link in repository.finder.find_links: log.debug(" -f {}".format(find_link)) ### # Parsing/collecting initial requirements ### constraints = [] for src_file in src_files: is_setup_file = os.path.basename(src_file) == "setup.py" if is_setup_file or src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin # to a temporary file and have pip read that. also used for # reading requirements from install_requires in setup.py. tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) if is_setup_file: from distutils.core import run_setup dist = run_setup(src_file) tmpfile.write("\n".join(dist.install_requires)) else: tmpfile.write(sys.stdin.read()) tmpfile.flush() constraints.extend( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options, ) ) else: constraints.extend( parse_requirements( src_file, finder=repository.finder, session=repository.session, options=pip_options, ) ) constraints.extend(upgrade_install_reqs.values()) # Filter out pip environment markers which do not match (PEP496) constraints = [ req for req in constraints if req.markers is None or req.markers.evaluate() ] # Check the given base set of constraints first Resolver.check_constraints(constraints) try: resolver = Resolver( constraints, repository, prereleases=pre, clear_caches=rebuild, allow_unsafe=allow_unsafe, ) results = resolver.resolve(max_rounds=max_rounds) if generate_hashes: hashes = resolver.resolve_hashes(results) else: hashes = None except PipToolsError as e: log.error(str(e)) sys.exit(2) log.debug("") ## # Output ## # Compute reverse dependency annotations statically, from the # dependency cache that the resolver has populated by now. # # TODO (1a): reverse deps for any editable package are lost # what SHOULD happen is that they are cached in memory, just # not persisted to disk! # # TODO (1b): perhaps it's easiest if the dependency cache has an API # that could take InstallRequirements directly, like: # # cache.set(ireq, ...) # # then, when ireq is editable, it would store in # # editables[egg_name][link_without_fragment] = deps # editables['pip-tools']['git+...ols.git@future'] = { # 'click>=3.0', 'six' # } # # otherwise: # # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'} # reverse_dependencies = None if annotate: reverse_dependencies = resolver.reverse_dependencies(results) writer = OutputWriter( src_files, output_file, dry_run=dry_run, emit_header=header, emit_index=index, emit_trusted_host=emit_trusted_host, annotate=annotate, generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, trusted_hosts=pip_options.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, ) writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, reverse_dependencies=reverse_dependencies, primary_packages={ key_from_req(ireq.req) for ireq in constraints if not ireq.constraint }, markers={ key_from_req(ireq.req): ireq.markers for ireq in constraints if ireq.markers }, hashes=hashes, ) if dry_run: log.warning("Dry-run, so nothing updated.")