def get_dependencies(self, section: Optional[str] = None ) -> Dict[str, Requirement]: metadata = self.meta optional_dependencies = metadata.get("optional-dependencies", {}) dev_dependencies = self.tool_settings.get("dev-dependencies", {}) if section in (None, "default"): deps = metadata.get("dependencies", []) else: if section in optional_dependencies and section in dev_dependencies: self.core.ui.echo( f"The {section} section exists in both [optional-dependencies] " "and [dev-dependencies], the former is taken.", err=True, fg="yellow", ) if section in optional_dependencies: deps = optional_dependencies[section] elif section in dev_dependencies: deps = dev_dependencies[section] else: raise PdmUsageError(f"Non-exist section {section}") result = {} with cd(self.root): for line in deps: if line.startswith("-e "): req = parse_requirement(line[3:].strip(), True) else: req = parse_requirement(line) # make editable packages behind normal ones to override correctly. result[req.identify()] = req return result
def resolve_func( lines, requires_python="", allow_prereleases=None, strategy="all", tracked_names=None, ): repository.environment.python_requires = PySpecSet(requires_python) if allow_prereleases is not None: project.tool_settings["allow_prereleases"] = allow_prereleases requirements = [] for line in lines: if line.startswith("-e "): requirements.append(parse_requirement(line[3:], True)) else: requirements.append(parse_requirement(line)) provider = project.get_provider(strategy, tracked_names) ui = project.core.ui with ui.open_spinner("Resolving dependencies") as spin, ui.logging( "lock"): reporter = SpinnerReporter(spin, requirements) resolver = Resolver(provider, reporter) mapping, *_ = _resolve(resolver, requirements, repository.environment.python_requires) return mapping
def resolve_requirements( repository, lines, requires_python="", allow_prereleases=None, strategy="all", preferred_pins=None, tracked_names=None, ): requirements = [] for line in lines: if line.startswith("-e "): requirements.append(parse_requirement(line[3:], True)) else: requirements.append(parse_requirement(line)) requires_python = PySpecSet(requires_python) if not preferred_pins: provider = BaseProvider(repository, requires_python, allow_prereleases) else: provider_class = (ReusePinProvider if strategy == "reuse" else EagerUpdateProvider) provider = provider_class( preferred_pins, tracked_names or (), repository, requires_python, allow_prereleases, ) ui = termui.UI() with ui.open_spinner("Resolving dependencies") as spin, ui.logging("lock"): reporter = SpinnerReporter(spin, requirements) resolver = Resolver(provider, reporter) mapping, *_ = resolve(resolver, requirements, requires_python) return mapping
def test_sync_packages_with_sections(project, working_set): project.add_dependencies({"requests": parse_requirement("requests")}) project.add_dependencies({"pytz": parse_requirement("pytz")}, "date") actions.do_lock(project) actions.do_sync(project, sections=["date"]) assert "pytz" in working_set assert "requests" in working_set assert "idna" in working_set
def do_add( project: Project, dev: bool = False, section: Optional[str] = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), ) -> None: """Add packages and install :param project: the project instance :param dev: add to dev dependencies section :param section: specify section to be add to :param sync: whether to install added packages :param save: save strategy :param strategy: update strategy :param editables: editable requirements :param packages: normal requirements """ check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") section = "dev" if dev else section or "default" tracked_names = set() requirements = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() r.from_section = section tracked_names.add(key) requirements[key] = r project.core.ui.echo(f"Adding packages to {section} dependencies: " + ", ".join( termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies all_dependencies.setdefault(section, {}).update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. save_version_specifiers(requirements, resolved, save) project.add_dependencies(requirements) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, sections=(section, ), dev=False, default=False, dry_run=False, clean=False, )
def test_sync_production_packages(project, working_set, is_dev): project.add_dependencies({"requests": parse_requirement("requests")}) project.add_dependencies({"pytz": parse_requirement("pytz")}, "dev", dev=True) actions.do_lock(project) actions.do_sync(project, dev=is_dev) assert "requests" in working_set assert ("pytz" in working_set) == is_dev
def test_sync_packages_with_all_dev(project, working_set): project.add_dependencies({"requests": parse_requirement("requests")}) project.add_dependencies({"pytz": parse_requirement("pytz")}, "date", True) project.add_dependencies({"pyopenssl": parse_requirement("pyopenssl")}, "ssl", True) actions.do_lock(project) actions.do_sync(project, dev=True, default=False) assert "requests" not in working_set assert "idna" not in working_set assert "pytz" in working_set assert "pyopenssl" in working_set
def do_add( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), unconstrained: bool = False, no_editable: bool = False, no_self: bool = False, ) -> None: """Add packages and install""" check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") if not group: group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() tracked_names.add(key) requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " + ", ".join(termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) if unconstrained: for req in group_deps.values(): req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) project.add_dependencies(deps_to_update, group, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, groups=(group, ), default=False, no_editable=no_editable, no_self=no_self, )
def test_sync_packages_with_group_all(project, working_set): project.add_dependencies({"requests": parse_requirement("requests")}) project.add_dependencies({"pytz": parse_requirement("pytz")}, "date") project.add_dependencies({"pyopenssl": parse_requirement("pyopenssl")}, "ssl") actions.do_lock(project) actions.do_sync(project, groups=[":all"]) assert "pytz" in working_set assert "requests" in working_set assert "idna" in working_set assert "pyopenssl" in working_set
def test_vcs_candidate_in_subdirectory(project, is_editable): line = ("git+https://github.com/test-root/demo-parent-package.git" "@master#egg=package-a&subdirectory=package-a") req = parse_requirement(line, is_editable) candidate = Candidate(req, project.environment) assert candidate.get_dependencies_from_metadata() == ["flask"] assert candidate.version == "0.1.0" line = ("git+https://github.com/test-root/demo-parent-package.git" "@master#egg=package-b&subdirectory=package-b") req = parse_requirement(line, is_editable) candidate = Candidate(req, project.environment) assert candidate.get_dependencies_from_metadata() == ["django"] assert candidate.version == "0.1.0"
def dev_dependencies(self) -> Dict[str, Requirement]: """All development dependencies""" dev_group = self.tool_settings.get("dev-dependencies", {}) if not dev_group: return {} result = {} with cd(self.root): for _, deps in dev_group.items(): for line in deps: if line.startswith("-e "): req = parse_requirement(line[3:].strip(), True) else: req = parse_requirement(line) result[req.identify()] = req return result
def test_install_wheel_with_cache(project, invoke): req = parse_requirement("future-fstrings") candidate = Candidate( req, project.environment, link=Link( "http://fixtures.test/artifacts/future_fstrings-1.2.0-py2.py3-none-any.whl" ), ) installer = InstallManager(project.environment, use_install_cache=True) installer.install(candidate) lib_path = project.environment.get_paths()["purelib"] assert os.path.isfile(os.path.join(lib_path, "future_fstrings.pth")) assert os.path.isfile(os.path.join(lib_path, "aaaaa_future_fstrings.pth")) cache_path = project.cache( "packages") / "future_fstrings-1.2.0-py2.py3-none-any" assert cache_path.is_dir() r = invoke(["run", "python", "-c", "import future_fstrings"], obj=project) assert r.exit_code == 0 dist = project.environment.get_working_set()["future-fstrings"] installer.uninstall(dist) assert not os.path.isfile(os.path.join(lib_path, "future_fstrings.pth")) assert not os.path.isfile( os.path.join(lib_path, "aaaaa_future_fstrings.pth")) assert not dist.read_text("direct_url.json") assert not cache_path.exists()
def get_dependencies( self, candidate: Candidate) -> Tuple[List[Requirement], PySpecSet, str]: """Get (dependencies, python_specifier, summary) of the candidate.""" requirements, requires_python, summary = [], "", "" last_ext_info = None for getter in self.dependency_generators(): try: requirements, requires_python, summary = getter(candidate) except CandidateInfoNotFound: last_ext_info = sys.exc_info() continue break else: if last_ext_info is not None: raise last_ext_info[1].with_traceback(last_ext_info[2]) requirements = [parse_requirement(line) for line in requirements] if candidate.req.extras: # HACK: If this candidate has extras, add the original candidate # (same pinned version, no extras) as its dependency. This ensures # the same package with different extras (treated as distinct by # the resolver) have the same version. self_req = candidate.req.copy() self_req.extras = None requirements.append(self_req) return requirements, PySpecSet(requires_python), summary
def test_rollback_after_commit(project, caplog): caplog.set_level(logging.ERROR, logger="pdm.termui") req = parse_requirement("demo") candidate = Candidate( req, project.environment, link=Link( "http://fixtures.test/artifacts/demo-0.0.1-py2.py3-none-any.whl"), ) installer = InstallManager(project.environment) lib_path = project.environment.get_paths()["purelib"] installer.install(candidate) lib_file = os.path.join(lib_path, "demo.py") assert os.path.exists(lib_file) remove_paths = installer.get_paths_to_remove( project.environment.get_working_set()["demo"]) remove_paths.remove() remove_paths.commit() assert not os.path.exists(lib_file) caplog.clear() remove_paths.rollback() assert not os.path.exists(lib_file) assert any(record.message == "Can't rollback, not uninstalled yet" for record in caplog.records)
def requirement_from_ireq(ireq): """Formats an `InstallRequirement` instance as a `pdm.models.requirement.Requirement`. Generic formatter for pretty printing InstallRequirements to the terminal in a less verbose way than using its `__str__` method. :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. :return: A formatted string for prettyprinting :rtype: str """ if ireq.editable: line = "{}".format(ireq.link) else: line = _requirement_to_str_lowercase_name(ireq.req) if str(ireq.req.marker) != str(ireq.markers): if not ireq.req.marker: line = "{}; {}".format(line, ireq.markers) else: name, markers = line.split(";", 1) markers = Marker(markers) & ireq.markers line = "{}; {}".format(name, markers) return parse_requirement(line, ireq.editable)
def test_convert_req_dict_to_req_line(req, req_dict, result): r = parse_requirement(req) assert r.as_req_dict() == req_dict assert r.as_ireq() r = Requirement.from_req_dict(*req_dict) result = result or req assert r.as_line() == result
def test_parse_poetry_project_metadata(project, is_editable): req = parse_requirement( f"{(FIXTURES / 'projects/poetry-demo').as_posix()}", is_editable) candidate = Candidate(req, project.environment) assert candidate.get_dependencies_from_metadata() == ["requests<3.0,>=2.6"] assert candidate.name == "poetry-demo" assert candidate.version == "0.1.0"
def test_export_replace_project_root(project): artifact = FIXTURES / "artifacts/first-2.0.2-py2.py3-none-any.whl" shutil.copy2(artifact, project.root) with cd(project.root): req = parse_requirement(f"./{artifact.name}") result = requirements.export(project, [req], Namespace(hashes=False)) assert "${PROJECT_ROOT}" not in result
def ireq_as_line(ireq: InstallRequirement, environment: Environment) -> str: """Formats an `InstallRequirement` instance as a PEP 508 dependency string. Generic formatter for pretty printing InstallRequirements to the terminal in a less verbose way than using its `__str__` method. :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. :return: A formatted string for prettyprinting :rtype: str """ if ireq.editable: line = "-e {}".format(ireq.link) else: if not ireq.req: ireq.req = parse_requirement("dummy @" + ireq.link.url) # type: ignore wheel = Wheel(environment.build(ireq)) ireq.req.name = wheel.name # type: ignore line = _requirement_to_str_lowercase_name(cast(PRequirement, ireq.req)) assert ireq.req if not ireq.req.marker and ireq.markers: line = f"{line}; {ireq.markers}" return line
def ireq_as_line(ireq: InstallRequirement, environment: Environment) -> str: """Formats an `InstallRequirement` instance as a PEP 508 dependency string. Generic formatter for pretty printing InstallRequirements to the terminal in a less verbose way than using its `__str__` method. :param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance. :return: A formatted string for prettyprinting :rtype: str """ if ireq.editable: line = "-e {}".format(ireq.link) else: if not ireq.req: ireq.req = parse_requirement("dummy @" + ireq.link.url) wheel = Wheel(environment.build(ireq)) ireq.req.name = wheel.name line = _requirement_to_str_lowercase_name(ireq.req) if str(ireq.req.marker) != str(ireq.markers): if not ireq.req.marker: line = "{}; {}".format(line, ireq.markers) else: name, markers = line.split(";", 1) markers = Marker(markers) & ireq.markers line = "{}; {}".format(name, markers) return line
def test_expand_project_root_in_url(req_str, core): project = core.create_project(FIXTURES.parent.parent) if req_str.startswith("-e "): req = parse_requirement(req_str[3:], True) else: req = parse_requirement(req_str) candidate = Candidate(req, project.environment) assert candidate.get_dependencies_from_metadata() == [ "idna", 'chardet; os_name == "nt"', ] lockfile_entry = candidate.as_lockfile_entry() if "path" in lockfile_entry: assert lockfile_entry["path"].startswith("./") else: assert "${PROJECT_ROOT}" in lockfile_entry["url"]
def test_parse_project_file_on_build_error_no_dep(project): req = parse_requirement( f"{(FIXTURES / 'projects/demo-failure-no-dep').as_posix()}") candidate = Candidate(req, project.environment) assert candidate.get_dependencies_from_metadata() == [] assert candidate.name == "demo" assert candidate.version == "0.0.1"
def handle(self, project: Project, options: argparse.Namespace) -> None: package = options.package req = parse_requirement(package) repository = project.get_repository() # reverse the result so that latest is at first. matches = repository.find_candidates( req, project.environment.python_requires, True) latest = next(iter(matches), None) if not latest: stream.echo( stream.yellow(f"No match found for the package {package!r}"), err=True) return latest_stable = next(filter(filter_stable, matches), None) installed = project.environment.get_working_set().get(package) metadata = latest.get_metadata() if metadata._legacy: result = ProjectInfo(dict(metadata._legacy.items()), True) else: result = ProjectInfo(dict(metadata._data), False) if latest_stable: result.latest_stable_version = str(latest_stable.version) if installed: result.installed_version = str(installed.version) stream.display_columns(list(result.generate_rows()))
def add_package(key: str, dist: Distribution | None) -> Package: name, extras = strip_extras(key) extras = extras or () reqs: dict[str, Requirement] = {} if dist: requirements = (parse_requirement(r) for r in filter_requirements_with_extras( dist.requires or [], extras)) for req in requirements: if not req.marker or req.marker.evaluate(marker_env): reqs[req.identify()] = req version: str | None = dist.version else: version = None node = Package(key, version, reqs) if node not in graph: if extras: node_with_extras.add(name) graph.add(node) for k in reqs: child = add_package(k, working_set.get(strip_extras(k)[0])) graph.connect(node, child) return node
def resolve_requirements( repository, lines, requires_python="", allow_prereleases=None, strategy="reuse", preferred_pins=None, tracked_names=None, ): requirements = {} if isinstance(lines, list): lines = {"default": lines} for k, v in lines.items(): for line in v: req = parse_requirement(line) requirements.setdefault(k, {})[identify(req)] = req requires_python = PySpecSet(requires_python) if not preferred_pins: provider = BaseProvider(repository, requires_python, allow_prereleases) else: provider_class = (ReusePinProvider if strategy == "reuse" else EagerUpdateProvider) provider = provider_class( preferred_pins, tracked_names or (), repository, requires_python, allow_prereleases, ) flat_reqs = list( itertools.chain(*[deps.values() for _, deps in requirements.items()])) reporter = SimpleReporter(flat_reqs) resolver = Resolver(provider, reporter) mapping, *_ = resolve(resolver, requirements, requires_python) return mapping
def make_self_candidate(self, editable: bool = True) -> Candidate: req = parse_requirement(self.root.as_posix(), editable) req.name = self.meta.name return Candidate(req, self.environment, name=self.meta.name, version=self.meta.version)
def test_lock_dependencies(project): project.add_dependencies({"requests": parse_requirement("requests")}) actions.do_lock(project) assert project.lockfile_file.exists() locked = project.get_locked_candidates() for package in ("requests", "idna", "chardet", "certifi"): assert package in locked
def get_dependencies( self, candidate: Candidate ) -> tuple[list[Requirement], PySpecSet, str]: """Get (dependencies, python_specifier, summary) of the candidate.""" requires_python, summary = "", "" requirements: list[str] = [] last_ext_info = None for getter in self.dependency_generators(): try: requirements, requires_python, summary = getter(candidate) except CandidateInfoNotFound: last_ext_info = sys.exc_info() continue break else: if last_ext_info is not None: raise last_ext_info[1].with_traceback(last_ext_info[2]) # type: ignore reqs = [parse_requirement(line) for line in requirements] if candidate.req.extras: # XXX: If the requirement has extras, add the original candidate # (without extras) as its dependency. This ensures the same package with # different extras resolve to the same version. self_req = dataclasses.replace(candidate.req, extras=None, marker=None) reqs.append(self_req) # Store the metadata on the candidate for caching candidate.requires_python = requires_python candidate.summary = summary return reqs, PySpecSet(requires_python), summary
def test_legacy_pep345_tag_link(project, index): req = parse_requirement("pep345-legacy") candidate = Candidate(req, project.environment) try: candidate.prepare() except Exception: pass assert candidate.requires_python == ">=3,<4"
def uninstall(self, dist: Distribution) -> None: req = parse_requirement(dist.project_name) ireq = pip_shims.install_req_from_line(dist.project_name) ireq.req = req pathset = ireq.uninstall(auto_confirm=self.auto_confirm) if pathset: pathset.commit()