def test_confirm_abort(capsys, monkeypatch, data_regression: DataRegressionFixture, exception): def fake_input(prompt): print(f"{prompt}", end='') raise exception monkeypatch.setattr(click.termui, "visible_prompt_func", fake_input) with pytest.raises(click.Abort, match="^$"): confirm(text="Do you wish to delete all files in '/' ?", default=False) expected = ["Do you wish to delete all files in '/' ? [y/N]:"] assert list(StringList(capsys.readouterr().out.splitlines())) == expected
def test_confirm(capsys, monkeypatch, data_regression: DataRegressionFixture): inputs = iter(['Y', 'N', '', '']) def fake_input(prompt): value = next(inputs) print(f"{prompt}{value}".rstrip()) return value monkeypatch.setattr(click.termui, "visible_prompt_func", fake_input) assert confirm(text="Do you wish to delete all files in '/' ?", default=False) is True assert confirm(text="Do you wish to delete all files in '/' ?", default=False) is False assert confirm(text="Do you wish to delete all files in '/' ?", default=False) is False assert confirm(text="Do you wish to delete all files in '/' ?", default=True) is True data_regression.check(list(StringList(capsys.readouterr().out.splitlines())))
def wizard() -> None: """ Run the wizard 🧙 to create a 'repo_helper.yml' file. """ # stdlib import datetime import getpass import os import socket # 3rd party from apeye.email_validator import EmailSyntaxError, validate_email from consolekit.terminal_colours import Fore from domdf_python_tools.paths import PathPlus from dulwich.errors import NotGitRepository from ruamel.yaml import scalarstring from southwark.repo import Repo # this package from repo_helper.utils import _round_trip_dump, license_lookup path = PathPlus.cwd() config_file = path / "repo_helper.yml" try: r = Repo(path) except NotGitRepository: with Fore.RED: click.echo(f"The directory {path} is not a git repository.") click.echo( "You may need to run 'git init' in that directory first.") raise click.Abort # ---------- intro ---------- click.echo( "This wizard 🧙will guide you through creating a 'repo_helper.yml' configuration file." ) click.echo(f"This will be created in '{config_file}'.") if not confirm("Do you want to continue?"): raise click.Abort() # ---------- file exists warning ---------- if config_file.is_file(): click.echo( f"\nWoah! That file already exists. It will be overwritten if you continue!" ) if not confirm("Are you sure you want to continue?"): raise click.Abort() click.echo("\nDefault options are indicated in [square brackets].") # ---------- modname ---------- click.echo("\nThe name of the library/project.") modname = prompt("Name") # ---------- name ---------- click.echo("\nThe name of the author.") click.echo("The author is usually the person who wrote the library.") git_config = r.get_config_stack() try: default_author = git_config.get(("user", ), "name").decode("UTF-8") except KeyError: try: getpass_user = getpass.getuser() default_author = os.getenv( "GIT_AUTHOR_NAME", default=os.getenv("GIT_COMMITTER_NAME", default=getpass_user), ) except ImportError: # Usually USERNAME is not set when trying getpass.getuser() default_author = '' author = prompt("Name", default=default_author) # ---------- email ---------- try: default_email = git_config.get(("user", ), "email").decode("UTF-8") except KeyError: default_email = os.getenv( "GIT_AUTHOR_EMAIL", default=os.getenv("GIT_COMMITTER_EMAIL", default=f"{author}@{socket.gethostname()}")) click.echo( "\nThe email address of the author. This will be shown on PyPI, amongst other places." ) while True: try: email = validate_email(prompt("Email", default=default_email)).email break except EmailSyntaxError: click.echo("That is not a valid email address.") # ---------- username ---------- click.echo("\nThe username of the author.") click.echo( "(repo_helper naïvely assumes that you use the same username on GitHub as on other sites.)" ) username = prompt("Username", default=author) # TODO: validate username # ---------- version ---------- click.echo("\nThe version number of the library, in semver format.") version = prompt("Version number", default="0.0.0") # ---------- copyright_years ---------- click.echo("\nThe copyright years for the library.") copyright_years = prompt("Copyright years", default=str(datetime.datetime.today().year), type=str) # ---------- license_ ---------- click.echo(""" The SPDX identifier for the license this library is distributed under. Not all SPDX identifiers are allowed as not all map to PyPI Trove classifiers.""" ) while True: license_ = prompt("License") if license_ in license_lookup: break else: click.echo("That is not a valid identifier.") # ---------- short_desc ---------- click.echo("\nEnter a short, one-line description for the project.") short_desc = prompt("Description") # ---------- writeout ---------- data = { "modname": modname, "copyright_years": copyright_years, "author": author, "email": email, "username": username, "version": str(version), "license": license_, "short_desc": short_desc, } data = { k: scalarstring.SingleQuotedScalarString(v) for k, v in data.items() } config_file.write_lines([ "# Configuration for 'repo_helper' (https://github.com/repo-helper/repo_helper)", "---", _round_trip_dump(data), "enable_conda: false", ]) click.echo(f""" The options you provided have been written to the file {config_file}. You can configure additional options in that file. The schema for the Yaml file can be found at: https://github.com/repo-helper/repo_helper/blob/master/repo_helper/repo_helper_schema.json You may be able to configure your code editor to validate your configuration file against that schema. repo_helper can now be run with the 'repo_helper' command in the repository root. Be seeing you! """)
def classifiers( add: bool, status: Optional[int] = None, library: Optional[bool] = None, ): """ Suggest trove classifiers based on repository metadata. """ # stdlib import sys # 3rd party from consolekit.input import choice, confirm from domdf_python_tools.paths import PathPlus from natsort import natsorted from shippinglabel.classifiers import classifiers_from_requirements from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals suggested_classifiers = set() pkg_dir = rh.target_repo / config["import_name"] for language in detect_languages(pkg_dir): suggested_classifiers.add(f"Programming Language :: {language}") # If not a tty, assume default options are False if not sys.stdout.isatty(): if add is None: add = False if library is None: library = False if status is None and sys.stdout.isatty(): click.echo("What is the Development Status of this project?") status = choice(text="Status", options=development_status_options, start_index=1) + 1 if status is not None: status_string = f"Development Status :: {status} - {development_status_options[status - 1]}" suggested_classifiers.add(status_string) if library is None: library = click.confirm("Is this a library for developers?") if library: suggested_classifiers.add( "Topic :: Software Development :: Libraries :: Python Modules") suggested_classifiers.add("Intended Audience :: Developers") lib_requirements = combine_requirements( read_requirements(rh.target_repo / "requirements.txt")[0]) suggested_classifiers.update( classifiers_from_requirements(lib_requirements)) # file_content = dedent( # f"""\ # # Remove any classifiers you don't think are relevant. # # Lines starting with a # will be discarded. # """ # ) # file_content += "\n".join(natsorted(suggested_classifiers)) # # def remove_invalid_entries(line): # line = line.strip() # if not line: # return False # elif line.startswith("#"): # return False # else: # return True # # suggested_classifiers = set( # filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines()) # ) if not suggested_classifiers: if sys.stdout.isatty(): click.echo("Sorry, I've nothing to suggest 😢") sys.exit(1) if sys.stdout.isatty(): click.echo( "Based on what you've told me I think the following classifiers are appropriate:" ) for classifier in natsorted(suggested_classifiers): click.echo(f" - {classifier}") else: for classifier in natsorted(suggested_classifiers): click.echo(classifier) if add is None: add = confirm( "Do you want to add these to the 'repo_helper.yml' file?") if add: # this package from repo_helper.configuration import YamlEditor yaml = YamlEditor() yaml.update_key(rh.target_repo / "repo_helper.yml", "classifiers", suggested_classifiers, sort=True)
def stubs(add: Optional[bool] = None, force_tty: bool = False, no_pager: bool = False): """ Suggest :pep:`561` type stubs. """ # stdlib import shutil import sys from itertools import chain # 3rd party import tabulate from apeye import URL from apeye.requests_url import TrailingRequestsURL from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from shippinglabel import normalize from shippinglabel.pypi import PYPI_API from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals requirements_files = [rh.target_repo / "requirements.txt"] if config["enable_tests"]: requirements_files.append(rh.target_repo / config["tests_dir"] / "requirements.txt") requirements_files.extend( (rh.target_repo / config["import_name"]).iterchildren("**/requirements.txt")) all_requirements = set( chain.from_iterable( read_requirements(file, include_invalid=True)[0] for file in requirements_files)) stubs_file = rh.target_repo / "stubs.txt" if stubs_file.is_file(): existing_stubs, stub_comments, invalid_stubs = read_requirements( stubs_file, include_invalid=True) else: existing_stubs = set() stub_comments, invalid_stubs = [], [] suggestions = {} for requirement in all_requirements: if normalize(requirement.name) in {"typing-extensions"}: continue types_url = TrailingRequestsURL( PYPI_API / f"types-{requirement.name.lower()}" / "json/") stubs_url = TrailingRequestsURL( PYPI_API / f"{requirement.name.lower()}-stubs" / "json/") response = stubs_url.head() if response.status_code == 404: # No stubs found for -stubs response = types_url.head() if response.status_code == 404: # No stubs found for types- continue else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) if not suggestions: if sys.stdout.isatty() or force_tty: click.echo("No stubs to suggest.") sys.exit(1) if sys.stdout.isatty() or force_tty: table = StringList([ "Suggestions", "-----------", tabulate.tabulate(suggestions.items(), headers=["Requirement", "Stubs"]), ]) table.blankline(ensure_single=True) if no_pager or len(table) <= shutil.get_terminal_size().lines: click.echo('\n'.join(table)) else: click.echo_via_pager('\n'.join(table)) if add is None: add = confirm("Do you want to add these to the 'stubs.txt' file?") if add: new_stubs = sorted( combine_requirements(*existing_stubs, *suggestions.values())) stubs_file.write_lines([ *stub_comments, *invalid_stubs, *map(str, new_stubs), ]) else: for stub in suggestions.values(): click.echo(stub) sys.exit(0)
def secrets( self, org: bool = False, overwrite: Optional[bool] = None, PYPI_TOKEN: Optional[str] = None, ANACONDA_TOKEN: Optional[str] = None ) -> int: """ Set or update the secrets of the GitHub repository for this project. .. versionadded:: 0.3.0 :param org: Whether the repository should be created for the organization set as ``username``, or for the authenticated user (default). :param overwrite: Overwrite existing values. :default overwrite: ask first. ``PYPI_TOKEN`` and ``ANACONDA_TOKEN`` can either be passed as keyword arguments to this function or provided at the interactive prompt. :rtype: .. versionchanged:: 0.4.0 Added ``overwrite``, ``PYPI_TOKEN``, ``ANACONDA_TOKEN`` options. """ with self.echo_rate_limit(): user = self.get_org_or_user(org) repo_name = self.templates.globals["repo_name"] repo: repos.Repository = self._get_repository(user, repo_name, org) # List of existing secrets. existing_secrets = secrets.get_secrets(repo) # Public key to encrypt secrets with. public_key = secrets.get_public_key(repo) ret = 0 target_secrets: Dict[str, Callable[[str], Tuple[bool, str]]] = {"PYPI_TOKEN": validate_pypi_token} if self.templates.globals["enable_conda"]: target_secrets["ANACONDA_TOKEN"] = no_op_validator for secret_name in sorted(target_secrets): if overwrite is not None: update = True elif secret_name in existing_secrets: click.echo(f"A value for the secret {secret_name!r} already exists. ") update = confirm("Do you want to update the secret?") else: update = True if update: operation = "update" if secret_name in existing_secrets else "create" secret_value = locals().get(secret_name, None) or getpass(f"{secret_name}: ") valid, invalid_reason = target_secrets[secret_name](secret_value) if not valid: raise click.Abort( f"The value for {secret_name} does not appear to be valid: {invalid_reason}" ) response = secrets.set_secret( repo, secret_name=secret_name, value=secret_value, public_key=public_key, ) if response.status_code not in {200, 201, 204}: message = f"Could not {operation} the secret {secret_name!r}: Status {response.status_code}" click.echo(Fore.YELLOW(message), color=self.colour) ret |= 1 else: message = f"Successfully {operation}d the secret {secret_name!r}." click.echo(Fore.GREEN(message), color=self.colour) return ret
def commit_changed_files( repo_path: PathLike, managed_files: Iterable[PathLike], commit: Optional[bool] = None, message: bytes = b"Updated files with 'repo_helper'.", enable_pre_commit: bool = True, ) -> bool: """ Stage and commit any files that have been updated, added or removed. :param repo_path: The path to the repository root. :param managed_files: List of files managed by ``repo_helper``. :param commit: Whether to commit the changes automatically. :py:obj:`None` (default) indicates the user should be asked. :param message: The commit message to use. Default ``"Updated files with 'repo_helper'."`` :param enable_pre_commit: Whether to install and configure pre-commit. Default :py:obj`True`. :returns: :py:obj:`True` if the changes were committed. :py:obj:`False` otherwise. """ # this package from repo_helper.utils import commit_changes, sort_paths, stage_changes repo_path = PathPlus(repo_path).absolute() r = Repo(str(repo_path)) staged_files = stage_changes(r.path, managed_files) # Ensure pre-commit hooks are installed if enable_pre_commit and platform.system() == "Linux": with in_directory(repo_path), suppress(ImportError): # 3rd party import pre_commit.main # type: ignore pre_commit.main.main(["install"]) if staged_files: click.echo("\nThe following files will be committed:") # Sort staged_files and put directories first for staged_filename in sort_paths(*staged_files): click.echo(f" {staged_filename.as_posix()!s}") click.echo() if commit is None: commit = confirm("Commit?", default=True) if commit: if enable_pre_commit or "pre-commit" in r.hooks: # Ensure the working directory for pre-commit is correct r.hooks["pre-commit"].cwd = str( repo_path.absolute()) # type: ignore try: commit_id = commit_changes(r, message.decode("UTF-8")) click.echo(f"Committed as {commit_id}") return True except CommitError as e: click.echo(f"Unable to commit: {e}", err=True) else: click.echo("Changed files were staged but not committed.") else: click.echo("Nothing to commit") return False