def test_cwd(): p = PathPlus.cwd() q = PathPlus(os.getcwd()) assert (p == q) assertEqualNormCase(str(p), str(q)) assert (type(p) is type(q)) assert (p.is_absolute())
def log(entries: Optional[int] = None, reverse: bool = False, from_date: Optional[datetime] = None, from_tag: Optional[str] = None, colour: Optional[bool] = None, no_pager: bool = False) -> int: """ Show git commit log. """ # 3rd party from consolekit.terminal_colours import resolve_color_default from consolekit.utils import abort from domdf_python_tools.paths import PathPlus from southwark.log import Log from southwark.repo import Repo repo = Repo(PathPlus.cwd()) try: commit_log = Log(repo).log(max_entries=entries, reverse=reverse, from_date=from_date, from_tag=from_tag) except ValueError as e: raise abort(f"ERROR: {e}") if no_pager: click.echo(commit_log, color=resolve_color_default(colour)) else: click.echo_via_pager(commit_log, color=resolve_color_default(colour)) return 0
def make_recipe(out_dir: str = "./conda/"): """ Make a Conda ``meta.yaml`` recipe. """ # stdlib import warnings # 3rd party from consolekit.terminal_colours import Fore, resolve_color_default from domdf_python_tools.paths import PathPlus, traverse_to_file # this package from repo_helper import conda warnings.warn( "'repo-helper make-recipe' is deprecated. " "Please use 'mkrecipe' instead: https://mkrecipe.readthedocs.io/", DeprecationWarning, ) repo_dir = traverse_to_file(PathPlus.cwd(), "repo_helper.yml") recipe_file = PathPlus(out_dir).resolve() / "meta.yaml" recipe_file.parent.maybe_make() conda.make_recipe(repo_dir, recipe_file) click.echo(Fore.GREEN(f"Wrote recipe to {recipe_file!s}"), color=resolve_color_default())
def main( dest: PathLike = "venv", verbose: int = 0, colour: ColourTrilean = None, show_traceback: bool = False, upgrade: bool = False, ): """ Create virtual environments using pyproject.toml metadata. """ # this package from pyproject_devenv import mkdevenv from pyproject_devenv.config import ConfigTracebackHandler with handle_tracebacks(show_traceback, ConfigTracebackHandler): ret = mkdevenv(PathPlus.cwd(), dest, verbosity=verbose, upgrade=upgrade) if ret: sys.exit(ret) # pragma: no cover else: click.echo( Fore.GREEN("Successfully created development virtualenv."), color=resolve_color_default(colour), )
def version(quiet: bool = False) -> None: """ Show the repository version. """ # 3rd party from domdf_python_tools.paths import PathPlus from southwark import get_tags from southwark.repo import Repo # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) version = rh.templates.globals["version"] if quiet: click.echo(f"v{version}") else: click.echo(f"Current version: v{version}") repo = Repo(rh.target_repo) for sha, tag in get_tags(repo).items(): if tag == f"v{version}": walker = repo.get_walker() for idx, entry in enumerate(walker): commit_id = entry.commit.id.decode("UTF-8") if commit_id == sha: click.echo( f"{idx} commit{'s' if idx > 1 else ''} since that release." ) break break
def devenv( dest: PathLike = "venv", verbose: int = 0, colour: ColourTrilean = None, upgrade: bool = False, ): """ Create a virtualenv. """ # 3rd party from consolekit.terminal_colours import Fore, resolve_color_default from domdf_python_tools.paths import PathPlus # this package from repo_helper_devenv import mkdevenv ret = mkdevenv(PathPlus.cwd(), dest, verbose, upgrade=upgrade) if ret: sys.exit(ret) # pragma: no cover else: click.echo( Fore.GREEN("Successfully created development virtualenv."), color=resolve_color_default(colour), )
def protect_branch( branch: str, token: str, verbose: bool = False, colour: ColourTrilean = None, org: bool = False, ): """ Set or update the branch protection for the given branch on GitHub. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper_github import GitHubManager from repo_helper_github.exceptions import TracebackHandler with TracebackHandler()(): manager = GitHubManager(token, PathPlus.cwd(), verbose=verbose, colour=colour) try: sys.exit(manager.protect_branch(branch, org=org)) except NotFoundError: raise click.UsageError(f"No such branch {branch}")
def new(token: str, colour: Optional[bool] = None): """ Create a new ReadTheDocs project. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper_rtd import ReadTheDocsManager manager = ReadTheDocsManager(token, PathPlus.cwd(), colour=colour) manager.load_settings() response = manager.new() if response.status_code // 100 == 2: project_name = manager.templates.globals["repo_name"].lower().replace( '_', '-') click.echo( f"Success! View the project page at https://readthedocs.org/projects/{project_name}" ) sys.exit(0) print(response) sys.exit(1)
def requirement(requirement: str, file: Optional[str] = None) -> int: """ Add a requirement. """ # 3rd party from consolekit.utils import abort from domdf_python_tools.paths import PathPlus, traverse_to_file from domdf_python_tools.stringlist import StringList from packaging.requirements import InvalidRequirement from packaging.specifiers import SpecifierSet from shippinglabel import normalize_keep_dot from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml", "git_helper.yml") if file is None: requirements_file = repo_dir / "requirements.txt" if not requirements_file.is_file(): raise abort(f"'{file}' not found.") else: requirements_file = PathPlus(file) if not requirements_file.is_file(): raise abort("'requirements.txt' not found.") try: req = ComparableRequirement(requirement) except InvalidRequirement as e: raise BadRequirement(requirement, e) response = (PYPI_API / req.name / "json/").get() if response.status_code != 200: raise click.BadParameter(f"No such project {req.name}") else: req.name = normalize(response.json()["info"]["name"]) if not req.specifier: req.specifier = SpecifierSet( f">={response.json()['info']['version']}") click.echo(f"Adding requirement '{req}'") requirements, comments, invalid_lines = read_requirements( req_file=requirements_file, include_invalid=True, normalize_func=normalize_keep_dot, ) requirements.add(req) buf = StringList([*comments, *invalid_lines]) buf.extend(str(req) for req in sorted(combine_requirements(requirements))) requirements_file.write_lines(buf) return 0
def patch(commit: Optional[bool], message: str, force: bool): """ Bump to the next patch version. """ # this package from repo_helper.release import Bumper bumper = Bumper(PathPlus.cwd(), force) bumper.patch(commit, message)
def today(commit: Optional[bool], message: str, force: bool): """ Bump to the calver version for today's date, such as 2020.12.25. """ # this package from repo_helper.release import Bumper bumper = Bumper(PathPlus.cwd(), force) bumper.today(commit, message)
def version(version: str, commit: Optional[bool], message: str, force: bool): # 3rd party from packaging.version import Version # this package from repo_helper.release import Bumper bumper = Bumper(PathPlus.cwd(), force) bumper.bump(Version(version.lstrip('v')), commit, message)
def interactive_prompt( token: str, *, verbose: bool = False, colour: ColourTrilean = True, org: bool = True, ) -> None: """ Start an interactive session. :param token: The token to authenticate with the GitHub API. :param verbose: Whether to show information on the GitHub API rate limit. :param colour: Whether to use coloured output. :param org: Indicates the repository belongs to the organisation configured as 'username' in repo_helper.yml. """ click.echo("repo_helper_github interactive prompt.") click.echo(f"Version {repo_helper_github.__version__}") click.echo(f"Type 'help' for help or 'quit' to exit.") readline.set_history_length(-1) readline.set_auto_history(True) parser = InteractiveParser() manager = GitHubManager(token, PathPlus.cwd(), verbose=verbose, colour=colour) # This will catch a missing --org option error earlier manager.get_org_or_user(org) readline.parse_and_bind("tab: complete") readline.set_completer(parser.complete) HISTORY_FILE.read() HISTORY_FILE.get_history_items() try: while True: for command in prompt('>', prompt_suffix=' ').split("&&"): command = command.lower().strip() command, args = parse_command(command) if command is not None: try: getattr(manager, command)(*args, org=org) except Exception: click.echo(traceback.format_exc()) except (KeyboardInterrupt, EOFError, click.Abort): click.echo("\nExiting...") sys.exit(0) finally: HISTORY_FILE.write()
def run(self) -> Sequence[nodes.Node]: # type: ignore """ Process the content of the directive. """ if "hooks" in self.options: hooks = self.options["hooks"] else: cwd = PathPlus.cwd() for directory in (cwd, *cwd.parents): hook_file = directory / ".pre-commit-hooks.yaml" if hook_file.is_file(): hooks_dict = YAML(typ="safe", pure=True).load(hook_file.read_text()) hooks = [h["id"] for h in hooks_dict] break else: warnings.warn("No hooks specified and no .pre-commit-hooks.yaml file found.") return [] repo = make_github_url(self.env.config.github_username, self.env.config.github_repository) config: _Config = {"repo": str(repo)} if "rev" in self.options: config["rev"] = self.options["rev"] config["hooks"] = [{"id": hook_name} for hook_name in hooks] if "args" in self.options: config["hooks"][0]["args"] = self.options["args"] targetid = f'pre-commit-{self.env.new_serialno("pre-commit"):d}' targetnode = nodes.section(ids=[targetid]) yaml_dumper = YAML() yaml_dumper.default_flow_style = False yaml_output_stream = StringIO() yaml_dumper.dump([config], stream=yaml_output_stream) yaml_output = yaml_output_stream.getvalue() if not yaml_output: return [] content = f".. code-block:: yaml\n\n{indent(yaml_output, ' ')}\n\n" view = StringList(content.split('\n')) pre_commit_node = nodes.paragraph(rawsource=content) self.state.nested_parse(view, self.content_offset, pre_commit_node) pre_commit_node_purger.add_node(self.env, pre_commit_node, targetnode, self.lineno) return [pre_commit_node]
def init(ctx, force: bool, commit: bool, message: str): """ Initialise the repository with some boilerplate files. """ if ctx.obj["force"]: force = ctx.obj["force"] if ctx.obj["commit"] is not None: commit = ctx.obj["commit"] ret = run_repo_helper( path=PathPlus.cwd(), force=force, initialise=True, commit=commit, message=message, ) sys.exit(ret)
def update(token: str, colour: Optional[bool] = None): """ Update the ReadTheDocs project. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper_rtd import ReadTheDocsManager manager = ReadTheDocsManager(token, PathPlus.cwd(), colour=colour) manager.load_settings() response = manager.update() if response.status_code // 100 == 2: click.echo("Up to date!") sys.exit(0) print(response) sys.exit(1)
def new(token: str, verbose: bool = False, colour: ColourTrilean = None, org: bool = False): """ Create a new GitHub repository for this project. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper_github import GitHubManager from repo_helper_github.exceptions import TracebackHandler with TracebackHandler()(): sys.exit( GitHubManager(token, PathPlus.cwd(), verbose=verbose, colour=colour).new(org=org))
def version(version: str): """ Add a new Python version to test on. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper.configuration import YamlEditor from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() yaml = YamlEditor() data = yaml.load_file(rh.target_repo / "repo_helper.yml") if not isinstance(data, dict): return 1 def sort_key(value: str): if value.endswith("-dev"): return value[:-4] else: return value if "python_versions" in data: data["python_versions"] = natsorted(map( str, {*data["python_versions"], *version}), key=sort_key) yaml.dump_to_file(data, rh.target_repo / "repo_helper.yml", mode='w') else: yaml.dump_to_file( {"python_versions": natsorted(version, key=sort_key)}, rh.target_repo / "repo_helper.yml", mode='a', )
def changelog( entries: Optional[int] = None, reverse: bool = False, colour: Optional[bool] = None, no_pager: bool = False, ): """ Show commits since the last version tag. """ # 3rd party from consolekit.terminal_colours import resolve_color_default from consolekit.utils import abort from domdf_python_tools.paths import PathPlus from southwark.log import Log from southwark.repo import Repo # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) repo = Repo(rh.target_repo) try: commit_log = Log(repo).log( max_entries=entries, reverse=reverse, from_tag=f"v{rh.templates.globals['version']}", ) except ValueError as e: raise abort(f"ERROR: {e}") if no_pager: click.echo(commit_log, color=resolve_color_default(colour)) else: click.echo_via_pager(commit_log, color=resolve_color_default(colour))
def cli(ctx: Context, force: bool, commit: Optional[bool], message: str): """ Update files in the given repositories, based on settings in 'repo_helper.yml'. """ path = PathPlus.cwd() ctx.obj["PATH"] = path ctx.obj["commit"] = commit ctx.obj["force"] = force if ctx.invoked_subcommand is None: sys.exit( run_repo_helper(path=path, force=force, initialise=False, commit=commit, message=message)) else: if message != "Updated files with 'repo_helper'.": raise click.UsageError( f"--message cannot be used before a command. " f"Perhaps you meant 'repo_helper {ctx.invoked_subcommand} --message'?" )
def requirements( no_pager: bool = False, depth: int = -1, concise: bool = False, no_venv: bool = False, ): """ Lists the requirements of this library, and their dependencies. """ # stdlib import re import shutil # 3rd party from domdf_python_tools.compat import importlib_metadata from domdf_python_tools.iterative import make_tree from domdf_python_tools.paths import PathPlus, in_directory from domdf_python_tools.stringlist import StringList from packaging.requirements import Requirement from shippinglabel.requirements import (ComparableRequirement, combine_requirements, list_requirements, read_requirements) # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) with in_directory(rh.target_repo): buf = StringList([ f"{rh.templates.globals['pypi_name']}=={rh.templates.globals['version']}" ]) raw_requirements = sorted(read_requirements("requirements.txt")[0]) tree: List[Union[str, List[str], List[Union[str, List]]]] = [] venv_dir = (rh.target_repo / "venv") if venv_dir.is_dir() and not no_venv: # Use virtualenv as it exists search_path = [] for directory in (venv_dir / "lib").glob("python3.*"): search_path.append(str(directory / "site-packages")) importlib_metadata.DistributionFinder.Context.path = search_path # type: ignore if concise: concise_requirements = [] def flatten(iterable: Iterable[Union[Requirement, Iterable]]): for item in iterable: if isinstance(item, str): yield item else: yield from flatten(item) # type: ignore for requirement in raw_requirements: concise_requirements.append(requirement) # TODO: remove "extra == " marker for req in flatten( list_requirements(str(requirement), depth=depth - 1)): concise_requirements.append( ComparableRequirement( re.sub('; extra == ".*"', '', req))) concise_requirements = sorted( set(combine_requirements(concise_requirements))) tree = list(map(str, concise_requirements)) else: for requirement in raw_requirements: tree.append(str(requirement)) deps = list( list_requirements(str(requirement), depth=depth - 1)) if deps: tree.append(deps) buf.extend(make_tree(tree)) if shutil.get_terminal_size().lines >= len(buf): # Don't use pager if fewer lines that terminal height no_pager = True if no_pager: click.echo(str(buf)) else: click.echo_via_pager(str(buf))
def typed(): """ Add a 'py.typed' file and the associated trove classifier. """ # 3rd party from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from natsort import natsorted # this package from repo_helper.configupdater2 import ConfigUpdater from repo_helper.core import RepoHelper from repo_helper.utils import indent_join, stage_changes rh = RepoHelper(PathPlus.cwd()) rh.load_settings() py_typed = rh.target_repo / rh.templates.globals["import_name"] / "py.typed" if not py_typed.is_file(): py_typed.touch() stage_changes(rh.target_repo, [py_typed]) setup_cfg = rh.target_repo / "setup.cfg" pyproject_file = rh.target_repo / "pyproject.toml" if setup_cfg.is_file() and not rh.templates.globals["use_whey"]: content = setup_cfg.read_text() config = ConfigUpdater() config.read_string(content) existing_classifiers = config["metadata"]["classifiers"] existing_classifiers_string = str(existing_classifiers) classifiers = set( map(str.strip, existing_classifiers.value.split('\n'))) classifiers.add("Typing :: Typed") new_classifiers_lines = StringList( indent_join(natsorted(classifiers)).expandtabs(4)) new_classifiers_lines[0] = "classifiers =" new_classifiers_lines.blankline(ensure_single=True) setup_cfg.write_clean( content.replace(existing_classifiers_string, str(new_classifiers_lines))) if pyproject_file.is_file() and rh.templates.globals["use_whey"]: pyproject_config = dom_toml.load(pyproject_file) if "whey" in pyproject_config.get("tool", {}): classifiers = set( pyproject_config["tool"]["whey"]["base-classifiers"]) classifiers.add("Typing :: Typed") pyproject_config["tool"]["whey"]["base-classifiers"] = natsorted( classifiers) dom_toml.dump(pyproject_config, pyproject_file, encoder=dom_toml.TomlEncoder)
def wizard() -> None: """ Run the wizard 🧙 to create a 'repo_helper.yml' file. """ # stdlib import datetime import getpass import os import socket # 3rd party from apeye.email_validator import EmailSyntaxError, validate_email from consolekit.terminal_colours import Fore from domdf_python_tools.paths import PathPlus from dulwich.errors import NotGitRepository from ruamel.yaml import scalarstring from southwark.repo import Repo # this package from repo_helper.utils import _round_trip_dump, license_lookup path = PathPlus.cwd() config_file = path / "repo_helper.yml" try: r = Repo(path) except NotGitRepository: with Fore.RED: click.echo(f"The directory {path} is not a git repository.") click.echo( "You may need to run 'git init' in that directory first.") raise click.Abort # ---------- intro ---------- click.echo( "This wizard 🧙will guide you through creating a 'repo_helper.yml' configuration file." ) click.echo(f"This will be created in '{config_file}'.") if not confirm("Do you want to continue?"): raise click.Abort() # ---------- file exists warning ---------- if config_file.is_file(): click.echo( f"\nWoah! That file already exists. It will be overwritten if you continue!" ) if not confirm("Are you sure you want to continue?"): raise click.Abort() click.echo("\nDefault options are indicated in [square brackets].") # ---------- modname ---------- click.echo("\nThe name of the library/project.") modname = prompt("Name") # ---------- name ---------- click.echo("\nThe name of the author.") click.echo("The author is usually the person who wrote the library.") git_config = r.get_config_stack() try: default_author = git_config.get(("user", ), "name").decode("UTF-8") except KeyError: try: getpass_user = getpass.getuser() default_author = os.getenv( "GIT_AUTHOR_NAME", default=os.getenv("GIT_COMMITTER_NAME", default=getpass_user), ) except ImportError: # Usually USERNAME is not set when trying getpass.getuser() default_author = '' author = prompt("Name", default=default_author) # ---------- email ---------- try: default_email = git_config.get(("user", ), "email").decode("UTF-8") except KeyError: default_email = os.getenv( "GIT_AUTHOR_EMAIL", default=os.getenv("GIT_COMMITTER_EMAIL", default=f"{author}@{socket.gethostname()}")) click.echo( "\nThe email address of the author. This will be shown on PyPI, amongst other places." ) while True: try: email = validate_email(prompt("Email", default=default_email)).email break except EmailSyntaxError: click.echo("That is not a valid email address.") # ---------- username ---------- click.echo("\nThe username of the author.") click.echo( "(repo_helper naïvely assumes that you use the same username on GitHub as on other sites.)" ) username = prompt("Username", default=author) # TODO: validate username # ---------- version ---------- click.echo("\nThe version number of the library, in semver format.") version = prompt("Version number", default="0.0.0") # ---------- copyright_years ---------- click.echo("\nThe copyright years for the library.") copyright_years = prompt("Copyright years", default=str(datetime.datetime.today().year), type=str) # ---------- license_ ---------- click.echo(""" The SPDX identifier for the license this library is distributed under. Not all SPDX identifiers are allowed as not all map to PyPI Trove classifiers.""" ) while True: license_ = prompt("License") if license_ in license_lookup: break else: click.echo("That is not a valid identifier.") # ---------- short_desc ---------- click.echo("\nEnter a short, one-line description for the project.") short_desc = prompt("Description") # ---------- writeout ---------- data = { "modname": modname, "copyright_years": copyright_years, "author": author, "email": email, "username": username, "version": str(version), "license": license_, "short_desc": short_desc, } data = { k: scalarstring.SingleQuotedScalarString(v) for k, v in data.items() } config_file.write_lines([ "# Configuration for 'repo_helper' (https://github.com/repo-helper/repo_helper)", "---", _round_trip_dump(data), "enable_conda: false", ]) click.echo(f""" The options you provided have been written to the file {config_file}. You can configure additional options in that file. The schema for the Yaml file can be found at: https://github.com/repo-helper/repo_helper/blob/master/repo_helper/repo_helper_schema.json You may be able to configure your code editor to validate your configuration file against that schema. repo_helper can now be run with the 'repo_helper' command in the repository root. Be seeing you! """)
def classifiers( add: bool, status: Optional[int] = None, library: Optional[bool] = None, ): """ Suggest trove classifiers based on repository metadata. """ # stdlib import sys # 3rd party from consolekit.input import choice, confirm from domdf_python_tools.paths import PathPlus from natsort import natsorted from shippinglabel.classifiers import classifiers_from_requirements from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals suggested_classifiers = set() pkg_dir = rh.target_repo / config["import_name"] for language in detect_languages(pkg_dir): suggested_classifiers.add(f"Programming Language :: {language}") # If not a tty, assume default options are False if not sys.stdout.isatty(): if add is None: add = False if library is None: library = False if status is None and sys.stdout.isatty(): click.echo("What is the Development Status of this project?") status = choice(text="Status", options=development_status_options, start_index=1) + 1 if status is not None: status_string = f"Development Status :: {status} - {development_status_options[status - 1]}" suggested_classifiers.add(status_string) if library is None: library = click.confirm("Is this a library for developers?") if library: suggested_classifiers.add( "Topic :: Software Development :: Libraries :: Python Modules") suggested_classifiers.add("Intended Audience :: Developers") lib_requirements = combine_requirements( read_requirements(rh.target_repo / "requirements.txt")[0]) suggested_classifiers.update( classifiers_from_requirements(lib_requirements)) # file_content = dedent( # f"""\ # # Remove any classifiers you don't think are relevant. # # Lines starting with a # will be discarded. # """ # ) # file_content += "\n".join(natsorted(suggested_classifiers)) # # def remove_invalid_entries(line): # line = line.strip() # if not line: # return False # elif line.startswith("#"): # return False # else: # return True # # suggested_classifiers = set( # filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines()) # ) if not suggested_classifiers: if sys.stdout.isatty(): click.echo("Sorry, I've nothing to suggest 😢") sys.exit(1) if sys.stdout.isatty(): click.echo( "Based on what you've told me I think the following classifiers are appropriate:" ) for classifier in natsorted(suggested_classifiers): click.echo(f" - {classifier}") else: for classifier in natsorted(suggested_classifiers): click.echo(classifier) if add is None: add = confirm( "Do you want to add these to the 'repo_helper.yml' file?") if add: # this package from repo_helper.configuration import YamlEditor yaml = YamlEditor() yaml.update_key(rh.target_repo / "repo_helper.yml", "classifiers", suggested_classifiers, sort=True)
def stubs(add: Optional[bool] = None, force_tty: bool = False, no_pager: bool = False): """ Suggest :pep:`561` type stubs. """ # stdlib import shutil import sys from itertools import chain # 3rd party import tabulate from apeye import URL from apeye.requests_url import TrailingRequestsURL from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from shippinglabel import normalize from shippinglabel.pypi import PYPI_API from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals requirements_files = [rh.target_repo / "requirements.txt"] if config["enable_tests"]: requirements_files.append(rh.target_repo / config["tests_dir"] / "requirements.txt") requirements_files.extend( (rh.target_repo / config["import_name"]).iterchildren("**/requirements.txt")) all_requirements = set( chain.from_iterable( read_requirements(file, include_invalid=True)[0] for file in requirements_files)) stubs_file = rh.target_repo / "stubs.txt" if stubs_file.is_file(): existing_stubs, stub_comments, invalid_stubs = read_requirements( stubs_file, include_invalid=True) else: existing_stubs = set() stub_comments, invalid_stubs = [], [] suggestions = {} for requirement in all_requirements: if normalize(requirement.name) in {"typing-extensions"}: continue types_url = TrailingRequestsURL( PYPI_API / f"types-{requirement.name.lower()}" / "json/") stubs_url = TrailingRequestsURL( PYPI_API / f"{requirement.name.lower()}-stubs" / "json/") response = stubs_url.head() if response.status_code == 404: # No stubs found for -stubs response = types_url.head() if response.status_code == 404: # No stubs found for types- continue else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) if not suggestions: if sys.stdout.isatty() or force_tty: click.echo("No stubs to suggest.") sys.exit(1) if sys.stdout.isatty() or force_tty: table = StringList([ "Suggestions", "-----------", tabulate.tabulate(suggestions.items(), headers=["Requirement", "Stubs"]), ]) table.blankline(ensure_single=True) if no_pager or len(table) <= shutil.get_terminal_size().lines: click.echo('\n'.join(table)) else: click.echo_via_pager('\n'.join(table)) if add is None: add = confirm("Do you want to add these to the 'stubs.txt' file?") if add: new_stubs = sorted( combine_requirements(*existing_stubs, *suggestions.values())) stubs_file.write_lines([ *stub_comments, *invalid_stubs, *map(str, new_stubs), ]) else: for stub in suggestions.values(): click.echo(stub) sys.exit(0)
def builder_smoke_test( target_dir: PathLike, username: str, repository: str, *, actions: bool = False, conda: bool = False ) -> Tuple[int, float]: """ Tests that the given repository can be successfully built with repo-helper's experimental :pep:`517` backend. :param target_dir: :param username: :param repository: :param actions: Whether to create GitHub Actions groups. :param conda: Whether to test building a conda package. :returns: A tuple comprising: * A return code for the build process. * The build duration. """ ret = 0 target_dir = PathPlus(target_dir) url = GITHUB_COM / username / repository if actions: print(f"::group::{username}_{repository}") else: print("==============================================") print(f"Cloning {url!s} -> {target_dir!s}") if actions: errstream = BytesIO() else: errstream = default_bytes_err_stream clone(str(url), str(target_dir), depth=1, errstream=errstream) with in_directory(target_dir): # Run their tests # make_pyproject(target_dir, templates) # print((target_dir / "pyproject.toml").read_text()) # test_process = Popen(["python3", "-m", "tox", "-n", "test"]) # (output, err) = test_process.communicate() # exit_code = test_process.wait() # ret |= exit_code # Test pyp517 # make_pyproject(target_dir, templates) # print((target_dir / "pyproject.toml").read_text()) # tox_process = Popen(["python3", "-m", "tox", "-e", "build"]) # (output, err) = tox_process.communicate() # exit_code = tox_process.wait() # ret |= exit_code # Test repo_helper.build start_time = time.time() build_wheel(target_dir / "dist") build_sdist(target_dir / "dist") if conda: with tempfile.TemporaryDirectory() as tmpdir: builder = Builder( repo_dir=PathPlus.cwd(), build_dir=tmpdir, out_dir=target_dir / "conda_dist", verbose=True, ) builder.build_conda() build_time = time.time() - start_time sys.stdout.flush() # Twine check print("twine check") ret |= twine.cli.dispatch(["check", os.path.join("dist", '*')]) sys.stdout.flush() # check_wheel_contents print("check_wheel_contents") runner = CliRunner() result: Result = runner.invoke( check_wheel_contents.__main__.main, catch_exceptions=False, args=["dist"], ) ret |= result.exit_code print(result.stdout, flush=True) if actions: print("::endgroup::") # TODO: create virtualenv and install package in it return ret, build_time