def test_classic_package(tmpdir): (tmpdir / 'setup.py').write_text( 'from distutils.core import setup; setup(name="foo", version="1.0")', encoding='utf-8', ) dist = meta.load(str(tmpdir)) assert dist.version == '1.0' assert dist.metadata['Name'] == 'foo'
def cli( ctx: click.Context, verbose: int, quiet: int, dry_run: bool, pre: bool, rebuild: bool, find_links: Tuple[str], index_url: str, extra_index_url: Tuple[str], cert: Optional[str], client_cert: Optional[str], trusted_host: Tuple[str], header: bool, emit_trusted_host: bool, annotate: bool, upgrade: bool, upgrade_packages: Tuple[str], output_file: Optional[LazyFile], allow_unsafe: bool, generate_hashes: bool, reuse_hashes: bool, src_files: Tuple[str], max_rounds: int, build_isolation: bool, emit_find_links: bool, cache_dir: str, pip_args_str: Optional[str], emit_index_url: bool, ) -> None: """Compiles requirements.txt from requirements.in specs.""" log.verbosity = verbose - quiet if len(src_files) == 0: if os.path.exists(DEFAULT_REQUIREMENTS_FILE): src_files = (DEFAULT_REQUIREMENTS_FILE, ) elif os.path.exists("setup.py"): src_files = ("setup.py", ) else: raise click.BadParameter(("If you do not specify an input file, " "the default is {} or setup.py" ).format(DEFAULT_REQUIREMENTS_FILE)) if not output_file: # An output file must be provided for stdin if src_files == ("-", ): raise click.BadParameter( "--output-file is required if input is from stdin") # Use default requirements output file if there is a setup.py the source file elif os.path.basename(src_files[0]) in METADATA_FILENAMES: file_name = os.path.join(os.path.dirname(src_files[0]), DEFAULT_REQUIREMENTS_OUTPUT_FILE) # An output file must be provided if there are multiple source files elif len(src_files) > 1: raise click.BadParameter( "--output-file is required if two or more input files are given." ) # Otherwise derive the output file from the source file else: base_name = src_files[0].rsplit(".", 1)[0] file_name = base_name + ".txt" output_file = click.open_file(file_name, "w+b", atomic=True, lazy=True) # Close the file at the end of the context execution assert output_file is not None ctx.call_on_close(safecall(output_file.close_intelligently)) ### # Setup ### right_args = shlex.split(pip_args_str or "") pip_args = [] for link in find_links: pip_args.extend(["-f", link]) if index_url: pip_args.extend(["-i", index_url]) for extra_index in extra_index_url: pip_args.extend(["--extra-index-url", extra_index]) if cert: pip_args.extend(["--cert", cert]) if client_cert: pip_args.extend(["--client-cert", client_cert]) if pre: pip_args.extend(["--pre"]) for host in trusted_host: pip_args.extend(["--trusted-host", host]) if not build_isolation: pip_args.append("--no-build-isolation") pip_args.extend(right_args) repository: BaseRepository repository = PyPIRepository(pip_args, cache_dir=cache_dir) # Parse all constraints coming from --upgrade-package/-P upgrade_reqs_gen = (install_req_from_line(pkg) for pkg in upgrade_packages) upgrade_install_reqs = { key_from_ireq(install_req): install_req for install_req in upgrade_reqs_gen } existing_pins_to_upgrade = set() # Proxy with a LocalRequirementsRepository if --upgrade is not specified # (= default invocation) if not upgrade and os.path.exists(output_file.name): # Use a temporary repository to ensure outdated(removed) options from # existing requirements.txt wouldn't get into the current repository. tmp_repository = PyPIRepository(pip_args, cache_dir=cache_dir) ireqs = parse_requirements( output_file.name, finder=tmp_repository.finder, session=tmp_repository.session, options=tmp_repository.options, ) # Exclude packages from --upgrade-package/-P from the existing # constraints, and separately gather pins to be upgraded existing_pins = {} for ireq in filter(is_pinned_requirement, ireqs): key = key_from_ireq(ireq) if key in upgrade_install_reqs: existing_pins_to_upgrade.add(key) else: existing_pins[key] = ireq repository = LocalRequirementsRepository(existing_pins, repository, reuse_hashes=reuse_hashes) ### # Parsing/collecting initial requirements ### constraints = [] for src_file in src_files: is_setup_file = os.path.basename(src_file) in METADATA_FILENAMES if src_file == "-": # pip requires filenames and not files. Since we want to support # piping from stdin, we need to briefly save the input from stdin # to a temporary file and have pip read that. also used for # reading requirements from install_requires in setup.py. tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False) tmpfile.write(sys.stdin.read()) comes_from = "-r -" tmpfile.flush() reqs = list( parse_requirements( tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options, )) for req in reqs: req.comes_from = comes_from constraints.extend(reqs) elif is_setup_file: dist = meta.load(os.path.dirname(os.path.abspath(src_file))) comes_from = f"{dist.metadata.get_all('Name')[0]} ({src_file})" constraints.extend([ install_req_from_line(req, comes_from=comes_from) for req in dist.requires or [] ]) else: constraints.extend( parse_requirements( src_file, finder=repository.finder, session=repository.session, options=repository.options, )) primary_packages = { key_from_ireq(ireq) for ireq in constraints if not ireq.constraint } allowed_upgrades = primary_packages | existing_pins_to_upgrade constraints.extend(ireq for key, ireq in upgrade_install_reqs.items() if key in allowed_upgrades) # Filter out pip environment markers which do not match (PEP496) constraints = [ req for req in constraints if req.markers is None # We explicitly set extra=None to filter out optional requirements # since evaluating an extra marker with no environment raises UndefinedEnvironmentName # (see https://packaging.pypa.io/en/latest/markers.html#usage) or req.markers.evaluate({"extra": None}) ] log.debug("Using indexes:") with log.indentation(): for index_url in dedup(repository.finder.index_urls): log.debug(redact_auth_from_url(index_url)) if repository.finder.find_links: log.debug("") log.debug("Using links:") with log.indentation(): for find_link in dedup(repository.finder.find_links): log.debug(redact_auth_from_url(find_link)) try: resolver = Resolver( constraints, repository, prereleases=repository.finder.allow_all_prereleases or pre, cache=DependencyCache(cache_dir), clear_caches=rebuild, allow_unsafe=allow_unsafe, ) results = resolver.resolve(max_rounds=max_rounds) hashes = resolver.resolve_hashes(results) if generate_hashes else None except PipToolsError as e: log.error(str(e)) sys.exit(2) log.debug("") ## # Output ## writer = OutputWriter( cast(BinaryIO, output_file), click_ctx=ctx, dry_run=dry_run, emit_header=header, emit_index_url=emit_index_url, emit_trusted_host=emit_trusted_host, annotate=annotate, generate_hashes=generate_hashes, default_index_url=repository.DEFAULT_INDEX_URL, index_urls=repository.finder.index_urls, trusted_hosts=repository.finder.trusted_hosts, format_control=repository.finder.format_control, allow_unsafe=allow_unsafe, find_links=repository.finder.find_links, emit_find_links=emit_find_links, ) writer.write( results=results, unsafe_requirements=resolver.unsafe_constraints, markers={ key_from_ireq(ireq): ireq.markers for ireq in constraints if ireq.markers }, hashes=hashes, ) if dry_run: log.info("Dry-run, so nothing updated.")
def test_meta_for_this_package(): dist = meta.load('.') assert re.match(r'[\d.]+', dist.version) assert dist.metadata['Name'] == 'pep517'
def test_meta_output(capfd): """load shouldn't emit any output""" meta.load('.') captured = capfd.readouterr() assert captured.out == captured.err == ''