def do_init( project: Project, name: str = "", version: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "project": { "name": name, "version": version, "description": "", "authors": array_of_inline_tables([{"name": author, "email": email}]), "license": make_inline_table({"text": license}), "urls": {"homepage": ""}, "dependencies": make_array([], True), "dev-dependencies": make_array([], True), "requires-python": python_requires, "dynamic": ["classifiers"], }, "build-system": {"requires": ["pdm-pep517"], "build-backend": "pdm.pep517.api"}, } if python_requires and python_requires != "*": get_specifier(python_requires) if not project.pyproject: project._pyproject = data else: project._pyproject["project"] = data["project"] project._pyproject["build-system"] = data["build-system"] project.write_pyproject()
def save_version_specifiers( requirements: dict[str, dict[str, Requirement]], resolved: dict[str, Candidate], save_strategy: str, ) -> None: """Rewrite the version specifiers according to the resolved result and save strategy :param requirements: the requirements to be updated :param resolved: the resolved mapping :param save_strategy: compatible/wildcard/exact """ for reqs in requirements.values(): for name, r in reqs.items(): if r.is_named and not r.specifier: if save_strategy == "exact": r.specifier = get_specifier(f"=={resolved[name].version}") elif save_strategy == "compatible": version = str(resolved[name].version) parsed = parse_version(version) if parsed.is_prerelease or parsed.is_devrelease: r.specifier = get_specifier( f">={version},<{parsed.major + 1}") else: r.specifier = get_specifier( f"~={parsed.major}.{parsed.minor}") elif save_strategy == "minimum": r.specifier = get_specifier(f">={resolved[name].version}")
def get_hashes(self, candidate: Candidate) -> dict[str, str] | None: """Get hashes of all possible installable candidates of a given package version. """ if ( candidate.req.is_vcs or candidate.req.is_file_or_url and candidate.req.is_local_dir # type: ignore ): return None if candidate.hashes: return candidate.hashes req = dataclasses.replace( candidate.req, specifier=get_specifier(f"=={candidate.version}") ) if candidate.req.is_file_or_url: matching_candidates: Iterable[Candidate] = [candidate] else: matching_candidates = self.find_candidates(req, True) result: dict[str, str] = {} with self.environment.get_finder(self.sources) as finder: self._hash_cache.session = finder.session # type: ignore for c in matching_candidates: link = c.prepare(self.environment).ireq.link if not link or link.is_vcs: continue result[link.filename] = self._hash_cache.get_hash(link) return result or None
def get_hashes(self, candidate: Candidate) -> Optional[Dict[str, str]]: """Get hashes of all possible installable candidates of a given package version. """ if ( candidate.req.is_vcs or candidate.req.is_file_or_url and candidate.req.is_local_dir # type: ignore ): return None if candidate.hashes: return candidate.hashes req = dataclasses.replace( candidate.req, specifier=get_specifier(f"=={candidate.version}") ) if candidate.req.is_file_or_url: matching_candidates: Iterable[Candidate] = [candidate] else: matching_candidates = self.find_candidates(req, allow_all=True) with self.environment.get_finder(self.sources) as finder: self._hash_cache.session = finder.session # type: ignore return { c.link.filename: self._hash_cache.get_hash(c.link) # type: ignore for c in matching_candidates }
def is_lockfile_compatible(self) -> bool: if not self.lockfile_file.exists(): return False lockfile_version = str( self.lockfile.get("metadata", {}).get("lock_version", "")) if "." not in lockfile_version: lockfile_version += ".0" accepted = get_specifier(f"~={lockfile_version}") return accepted.contains(self.LOCKFILE_VERSION)
def save_version_specifiers( requirements: Dict[str, Requirement], resolved: Dict[str, Candidate], save_strategy: str, ) -> None: """Rewrite the version specifiers according to the resolved result and save strategy :param requirements: the requirements to be updated :param resolved: the resolved mapping :param save_strategy: compatible/wildcard/exact """ for name, r in requirements.items(): if r.is_named and not r.specifier: if save_strategy == "exact": r.specifier = get_specifier(f"=={resolved[name].version}") elif save_strategy == "compatible": version = str(resolved[name].version) compatible_version = ".".join((version.split(".") + ["0"])[:2]) r.specifier = get_specifier(f"~={compatible_version}")
def do_add( project: Project, dev: bool = False, group: str | None = None, sync: bool = True, save: str = "compatible", strategy: str = "reuse", editables: Iterable[str] = (), packages: Iterable[str] = (), unconstrained: bool = False, no_editable: bool = False, no_self: bool = False, ) -> None: """Add packages and install""" check_project_file(project) if not editables and not packages: raise PdmUsageError( "Must specify at least one package or editable package.") if not group: group = "dev" if dev else "default" tracked_names: set[str] = set() requirements: dict[str, Requirement] = {} for r in [parse_requirement(line, True) for line in editables ] + [parse_requirement(line) for line in packages]: key = r.identify() tracked_names.add(key) requirements[key] = r project.core.ui.echo( f"Adding packages to {group} {'dev-' if dev else ''}dependencies: " + ", ".join(termui.green(key or "", bold=True) for key in requirements)) all_dependencies = project.all_dependencies group_deps = all_dependencies.setdefault(group, {}) if unconstrained: for req in group_deps.values(): req.specifier = get_specifier("") group_deps.update(requirements) reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock(project, strategy, tracked_names, reqs) # Update dependency specifiers and lockfile hash. deps_to_update = group_deps if unconstrained else requirements save_version_specifiers({group: deps_to_update}, resolved, save) project.add_dependencies(deps_to_update, group, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False) if sync: do_sync( project, groups=(group, ), default=False, no_editable=no_editable, no_self=no_self, )
def from_req_dict(cls, name: str, req_dict: RequirementDict) -> "Requirement": if isinstance(req_dict, str): # Version specifier only. return NamedRequirement(name=name, specifier=get_specifier(req_dict)) for vcs in VCS_SCHEMA: if vcs in req_dict: repo = cast(str, req_dict.pop(vcs, None)) url = vcs + "+" + repo return VcsRequirement.create(name=name, vcs=vcs, url=url, **req_dict) if "path" in req_dict or "url" in req_dict: return FileRequirement.create(name=name, **req_dict) return NamedRequirement.create(name=name, **req_dict)
def do_init( project: Project, name: str = "", version: str = "", description: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "project": { "name": name, "version": version, "description": description, "authors": array_of_inline_tables([{"name": author, "email": email}]), "license": make_inline_table({"text": license}), "dependencies": make_array([], True), }, "build-system": { "requires": ["pdm-pep517>=0.12.0"], "build-backend": "pdm.pep517.api", }, } if python_requires and python_requires != "*": data["project"]["requires-python"] = python_requires # type: ignore if name and version: readme = next(project.root.glob("README*"), None) if readme is None: readme = project.root.joinpath("README.md") readme.write_text(f"# {name}\n\n{description}\n") data["project"]["readme"] = readme.name # type: ignore get_specifier(python_requires) if not project.pyproject: project._pyproject = data else: project._pyproject["project"] = data["project"] # type: ignore project._pyproject["build-system"] = data["build-system"] # type: ignore project.write_pyproject() signals.post_init.send(project)
def __init__(self, **kwargs): self._marker = None self.from_section = "default" self.marker_no_python = None # type: Optional[Marker] self.requires_python = PySpecSet() # type: PySpecSet for k, v in kwargs.items(): if k == "specifier": v = get_specifier(v) setattr(self, k, v) if self.name and not self.project_name: self.project_name = safe_name(self.name) self.key = self.project_name.lower()
def create(cls: Type[T], **kwargs: Any) -> T: if "marker" in kwargs: try: kwargs["marker"] = get_marker(kwargs["marker"]) except InvalidMarker as e: raise RequirementError("Invalid marker: %s" % str(e)) from None if "extras" in kwargs and isinstance(kwargs["extras"], str): kwargs["extras"] = tuple( e.strip() for e in kwargs["extras"][1:-1].split(",")) version = kwargs.pop("version", None) if version: kwargs["specifier"] = get_specifier(version) return cls(**kwargs)
def do_init( project: Project, name: str = "", version: str = "", license: str = "MIT", author: str = "", email: str = "", python_requires: str = "", ) -> None: """Bootstrap the project and create a pyproject.toml""" data = { "tool": { "pdm": { "name": name, "version": version, "description": "", "author": f"{author} <{email}>", "license": license, "homepage": "", "dependencies": tomlkit.table(), "dev-dependencies": tomlkit.table(), } }, "build-system": { "requires": ["pdm"], "build-backend": "pdm.builders.api" }, } if python_requires and python_requires != "*": get_specifier(python_requires) data["tool"]["pdm"]["python_requires"] = python_requires if not project.pyproject: project._pyproject = data else: project._pyproject.setdefault("tool", {})["pdm"] = data["tool"]["pdm"] project._pyproject["build-system"] = data["build-system"] project.write_pyproject() project.environment.write_site_py()
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages :param project: The project instance :param dev: whether to update dev dependencies :param sections: update speicified sections :param default: update default :param strategy: update strategy (reuse/eager) :param save: save strategy (compatible/exact/wildcard) :param unconstrained: ignore version constraint :param packages: specified packages to update :return: None """ check_project_file(project) if len(packages) > 0 and (len(sections) > 1 or not default): raise click.BadParameter( "packages argument can't be used together with multple -s or --no-default." ) if not packages: if unconstrained: raise click.BadArgumentUsage( "--unconstrained must be used with package names given.") # pdm update with no packages given, same as 'lock' + 'sync' do_lock(project) do_sync(project, sections, dev, default, clean=False) return section = sections[0] if sections else ("dev" if dev else "default") all_dependencies = project.all_dependencies dependencies = all_dependencies[section] updated_deps = {} tracked_names = set() for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name( name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError("{} does not exist in {} dependencies.".format( context.io.green(name, bold=True), section)) if unconstrained: dependencies[matched_name].specifier = get_specifier("") tracked_names.add(matched_name) updated_deps[matched_name] = dependencies[matched_name] context.io.echo("Updating packages: {}.".format(", ".join( context.io.green(v, bold=True) for v in tracked_names))) resolved = do_lock(project, strategy, tracked_names, all_dependencies) do_sync(project, sections=(section, ), default=False, clean=False) if unconstrained: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps) lockfile = project.lockfile lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash( "md5") project.write_lockfile(lockfile, False)
def do_update( project: Project, *, dev: bool | None = None, groups: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Collection[str] = (), sync: bool = True, no_editable: bool = False, no_self: bool = False, prerelease: bool = False, ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(groups) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -G or " "--no-default and --top." ) all_dependencies = project.all_dependencies updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict) install_dev = True if dev is None else dev if not packages: if prerelease: raise PdmUsageError("--prerelease must be used with packages given") groups = translate_groups(project, default, install_dev, groups or ()) for group in groups: updated_deps[group] = all_dependencies[group] else: group = groups[0] if groups else ("dev" if dev else "default") dependencies = all_dependencies[group] for name in packages: matched_name = next( filter( lambda k: normalize_name(strip_extras(k)[0]) == normalize_name(name), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), group, "dev-" if dev else "" ) ) dependencies[matched_name].prerelease = prerelease updated_deps[group][matched_name] = dependencies[matched_name] project.core.ui.echo( "Updating packages: {}.".format( ", ".join( termui.green(v, bold=True) for v in chain.from_iterable(updated_deps.values()) ) ) ) if unconstrained: for deps in updated_deps.values(): for dep in deps.values(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy, chain.from_iterable(updated_deps.values()), reqs, dry_run=dry_run, ) if sync or dry_run: do_sync( project, groups=groups, dev=install_dev, default=default, clean=False, dry_run=dry_run, requirements=[r for deps in updated_deps.values() for r in deps.values()], tracked_names=list(chain.from_iterable(updated_deps.values())) if top else None, no_editable=no_editable, no_self=no_self, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) for group, deps in updated_deps.items(): project.add_dependencies(deps, group, dev or False) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def do_update( project: Project, dev: bool = False, sections: Sequence[str] = (), default: bool = True, strategy: str = "reuse", save: str = "compatible", unconstrained: bool = False, top: bool = False, dry_run: bool = False, packages: Sequence[str] = (), ) -> None: """Update specified packages or all packages""" check_project_file(project) if len(packages) > 0 and (top or len(sections) > 1 or not default): raise PdmUsageError( "packages argument can't be used together with multiple -s or " "--no-default and --top.") all_dependencies = project.all_dependencies updated_deps = {} if not packages: sections = translate_sections(project, default, dev, sections or ()) for section in sections: updated_deps.update(all_dependencies[section]) else: section = sections[0] if sections else ("dev" if dev else "default") dependencies = all_dependencies[section] for name in packages: matched_name = next( filter( lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(name).lower(), dependencies.keys(), ), None, ) if not matched_name: raise ProjectError( "{} does not exist in {} {}dependencies.".format( termui.green(name, bold=True), section, "dev-" if dev else "")) updated_deps[matched_name] = dependencies[matched_name] project.core.ui.echo("Updating packages: {}.".format(", ".join( termui.green(v, bold=True) for v in updated_deps))) if unconstrained: for _, dep in updated_deps.items(): dep.specifier = get_specifier("") reqs = [r for deps in all_dependencies.values() for r in deps.values()] resolved = do_lock( project, strategy if top or packages else "all", updated_deps.keys(), reqs, dry_run=dry_run, ) do_sync( project, sections=sections, dev=dev, default=default, clean=False, dry_run=dry_run, tracked_names=updated_deps.keys() if top else None, ) if unconstrained and not dry_run: # Need to update version constraints save_version_specifiers(updated_deps, resolved, save) project.add_dependencies(updated_deps, section, dev) lockfile = project.lockfile project.write_lockfile(lockfile, False)
def version(self, v: str) -> None: if not v or v == "*": self.specifier = SpecifierSet() else: self.specifier = get_specifier(f"=={v}")