def test_managed_message(temp_repo): rh = RepoHelper(temp_repo.path) managed_message = "This file is managed by 'repo_helper'. Don't edit it directly." assert rh.managed_message == managed_message assert rh.templates.globals["managed_message"] == managed_message rh.managed_message = "Different managed message" assert rh.managed_message == "Different managed message" assert rh.templates.globals[ "managed_message"] == "Different managed message" rh = RepoHelper(temp_repo.path, managed_message="Managed message 3") assert rh.managed_message == "Managed message 3" assert rh.templates.globals["managed_message"] == "Managed message 3"
def version(quiet: bool = False) -> None: """ Show the repository version. """ # 3rd party from domdf_python_tools.paths import PathPlus from southwark import get_tags from southwark.repo import Repo # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) version = rh.templates.globals["version"] if quiet: click.echo(f"v{version}") else: click.echo(f"Current version: v{version}") repo = Repo(rh.target_repo) for sha, tag in get_tags(repo).items(): if tag == f"v{version}": walker = repo.get_walker() for idx, entry in enumerate(walker): commit_id = entry.commit.id.decode("UTF-8") if commit_id == sha: click.echo( f"{idx} commit{'s' if idx > 1 else ''} since that release." ) break break
def test_via_Repo_class( temp_repo, capsys, file_regression: FileRegressionFixture, data_regression: DataRegressionFixture, monkeypatch, example_config, ): with in_directory(temp_repo.path): (temp_repo.path / "repo_helper.yml").write_text(example_config) (temp_repo.path / "requirements.txt").touch() (temp_repo.path / "tests").maybe_make() (temp_repo.path / "tests" / "requirements.txt").touch() (temp_repo.path / "README.rst").touch() (temp_repo.path / "doc-source").mkdir() (temp_repo.path / "doc-source" / "index.rst").touch() (temp_repo.path / ".pre-commit-config.yaml").touch() rh = RepoHelper(temp_repo.path) rh.load_settings() managed_files = rh.run() data_regression.check(sorted(managed_files)) assert capsys.readouterr().out == '' assert capsys.readouterr().err == ''
def __init__( self, project_dir: PathLike, venv_dir: PathLike = "venv", *, verbosity: int = 1, upgrade: bool = False, ): rh = RepoHelper(project_dir) rh.load_settings() self.project_dir = rh.target_repo self.config: ConfigDict = { "name": rh.templates.globals["modname"], "dependencies": [], "optional_dependencies": rh.templates.globals["extras_require"], "build_dependencies": None, } self.venv_dir = self.project_dir / venv_dir self.verbosity: int = int(verbosity) self.upgrade: bool = upgrade # TODO: config option self.extras_to_install = sorted(self.config["optional_dependencies"])
def __init__(self, repo_path: PathPlus, force: bool = False): #: self.repo = RepoHelper(traverse_to_file(PathPlus(repo_path), "repo_helper.yml")) self.repo.load_settings() if not assert_clean(self.repo.target_repo): if force: click.echo(Fore.RED("Proceeding anyway"), err=True) else: raise click.Abort # pypi_secure_key = "travis_pypi_secure" # if self.repo.templates.globals["on_pypi"] and not self.repo.templates.globals[pypi_secure_key]: # raise abort(f"Cowardly refusing to bump the version when {pypi_secure_key!r} is unset.") # TODO: Handle this wrt github actions #: self.current_version = self.get_current_version() #: The path to the bumpversion configuration file. self.bumpversion_file = self.repo.target_repo / ".bumpversion.cfg"
def version(version: str): """ Add a new Python version to test on. """ # 3rd party from domdf_python_tools.paths import PathPlus # this package from repo_helper.configuration import YamlEditor from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() yaml = YamlEditor() data = yaml.load_file(rh.target_repo / "repo_helper.yml") if not isinstance(data, dict): return 1 def sort_key(value: str): if value.endswith("-dev"): return value[:-4] else: return value if "python_versions" in data: data["python_versions"] = natsorted(map( str, {*data["python_versions"], *version}), key=sort_key) yaml.dump_to_file(data, rh.target_repo / "repo_helper.yml", mode='w') else: yaml.dump_to_file( {"python_versions": natsorted(version, key=sort_key)}, rh.target_repo / "repo_helper.yml", mode='a', )
def changelog( entries: Optional[int] = None, reverse: bool = False, colour: Optional[bool] = None, no_pager: bool = False, ): """ Show commits since the last version tag. """ # 3rd party from consolekit.terminal_colours import resolve_color_default from consolekit.utils import abort from domdf_python_tools.paths import PathPlus from southwark.log import Log from southwark.repo import Repo # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) repo = Repo(rh.target_repo) try: commit_log = Log(repo).log( max_entries=entries, reverse=reverse, from_tag=f"v{rh.templates.globals['version']}", ) except ValueError as e: raise abort(f"ERROR: {e}") if no_pager: click.echo(commit_log, color=resolve_color_default(colour)) else: click.echo_via_pager(commit_log, color=resolve_color_default(colour))
def classifiers( add: bool, status: Optional[int] = None, library: Optional[bool] = None, ): """ Suggest trove classifiers based on repository metadata. """ # stdlib import sys # 3rd party from consolekit.input import choice, confirm from domdf_python_tools.paths import PathPlus from natsort import natsorted from shippinglabel.classifiers import classifiers_from_requirements from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals suggested_classifiers = set() pkg_dir = rh.target_repo / config["import_name"] for language in detect_languages(pkg_dir): suggested_classifiers.add(f"Programming Language :: {language}") # If not a tty, assume default options are False if not sys.stdout.isatty(): if add is None: add = False if library is None: library = False if status is None and sys.stdout.isatty(): click.echo("What is the Development Status of this project?") status = choice(text="Status", options=development_status_options, start_index=1) + 1 if status is not None: status_string = f"Development Status :: {status} - {development_status_options[status - 1]}" suggested_classifiers.add(status_string) if library is None: library = click.confirm("Is this a library for developers?") if library: suggested_classifiers.add( "Topic :: Software Development :: Libraries :: Python Modules") suggested_classifiers.add("Intended Audience :: Developers") lib_requirements = combine_requirements( read_requirements(rh.target_repo / "requirements.txt")[0]) suggested_classifiers.update( classifiers_from_requirements(lib_requirements)) # file_content = dedent( # f"""\ # # Remove any classifiers you don't think are relevant. # # Lines starting with a # will be discarded. # """ # ) # file_content += "\n".join(natsorted(suggested_classifiers)) # # def remove_invalid_entries(line): # line = line.strip() # if not line: # return False # elif line.startswith("#"): # return False # else: # return True # # suggested_classifiers = set( # filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines()) # ) if not suggested_classifiers: if sys.stdout.isatty(): click.echo("Sorry, I've nothing to suggest 😢") sys.exit(1) if sys.stdout.isatty(): click.echo( "Based on what you've told me I think the following classifiers are appropriate:" ) for classifier in natsorted(suggested_classifiers): click.echo(f" - {classifier}") else: for classifier in natsorted(suggested_classifiers): click.echo(classifier) if add is None: add = confirm( "Do you want to add these to the 'repo_helper.yml' file?") if add: # this package from repo_helper.configuration import YamlEditor yaml = YamlEditor() yaml.update_key(rh.target_repo / "repo_helper.yml", "classifiers", suggested_classifiers, sort=True)
def stubs(add: Optional[bool] = None, force_tty: bool = False, no_pager: bool = False): """ Suggest :pep:`561` type stubs. """ # stdlib import shutil import sys from itertools import chain # 3rd party import tabulate from apeye import URL from apeye.requests_url import TrailingRequestsURL from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from shippinglabel import normalize from shippinglabel.pypi import PYPI_API from shippinglabel.requirements import combine_requirements, read_requirements # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings() config = rh.templates.globals requirements_files = [rh.target_repo / "requirements.txt"] if config["enable_tests"]: requirements_files.append(rh.target_repo / config["tests_dir"] / "requirements.txt") requirements_files.extend( (rh.target_repo / config["import_name"]).iterchildren("**/requirements.txt")) all_requirements = set( chain.from_iterable( read_requirements(file, include_invalid=True)[0] for file in requirements_files)) stubs_file = rh.target_repo / "stubs.txt" if stubs_file.is_file(): existing_stubs, stub_comments, invalid_stubs = read_requirements( stubs_file, include_invalid=True) else: existing_stubs = set() stub_comments, invalid_stubs = [], [] suggestions = {} for requirement in all_requirements: if normalize(requirement.name) in {"typing-extensions"}: continue types_url = TrailingRequestsURL( PYPI_API / f"types-{requirement.name.lower()}" / "json/") stubs_url = TrailingRequestsURL( PYPI_API / f"{requirement.name.lower()}-stubs" / "json/") response = stubs_url.head() if response.status_code == 404: # No stubs found for -stubs response = types_url.head() if response.status_code == 404: # No stubs found for types- continue else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) else: response_url = URL(response.url) suggestions[str(requirement)] = response_url.parent.name # print(requirement, response.url) if not suggestions: if sys.stdout.isatty() or force_tty: click.echo("No stubs to suggest.") sys.exit(1) if sys.stdout.isatty() or force_tty: table = StringList([ "Suggestions", "-----------", tabulate.tabulate(suggestions.items(), headers=["Requirement", "Stubs"]), ]) table.blankline(ensure_single=True) if no_pager or len(table) <= shutil.get_terminal_size().lines: click.echo('\n'.join(table)) else: click.echo_via_pager('\n'.join(table)) if add is None: add = confirm("Do you want to add these to the 'stubs.txt' file?") if add: new_stubs = sorted( combine_requirements(*existing_stubs, *suggestions.values())) stubs_file.write_lines([ *stub_comments, *invalid_stubs, *map(str, new_stubs), ]) else: for stub in suggestions.values(): click.echo(stub) sys.exit(0)
def run_repo_helper( path, force: bool, initialise: bool, commit: Optional[bool], message: str, enable_pre_commit: bool = True, ) -> int: """ Run repo_helper. :param path: The repository path. :param force: Whether to force the operation if the repository is not clean. :param initialise: Whether to initialise the repository. :param commit: Whether to commit unchanged files. :param message: The commit message. :param enable_pre_commit: Whether to install and configure pre-commit. Default :py:obj`True`. """ # this package from repo_helper.cli.commands.init import init_repo from repo_helper.core import RepoHelper from repo_helper.utils import easter_egg try: rh = RepoHelper(path) rh.load_settings() except FileNotFoundError as e: error_block = textwrap.indent(str(e), '\t') raise abort( f"Unable to run 'repo_helper'.\nThe error was:\n{error_block}") if not assert_clean(rh.target_repo, allow_config=("repo_helper.yml", "git_helper.yml")): if force: click.echo(Fore.RED("Proceeding anyway"), err=True) else: return 1 if initialise: r = Repo(rh.target_repo) for filename in init_repo(rh.target_repo, rh.templates): r.stage(os.path.normpath(filename)) managed_files = rh.run() try: commit_changed_files( repo_path=rh.target_repo, managed_files=managed_files, commit=commit, message=message.encode("UTF-8"), enable_pre_commit=enable_pre_commit, ) except CommitError as e: indented_error = '\n'.join(f"\t{line}" for line in textwrap.wrap(str(e))) click.echo( f"Unable to commit changes. The error was:\n\n{indented_error}", err=True) return 1 easter_egg() return 0
def test_repo_name(temp_repo): rh = RepoHelper(temp_repo.path) rh.load_settings() assert rh.repo_name == "repo_helper_demo"
def typed(): """ Add a 'py.typed' file and the associated trove classifier. """ # 3rd party from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from natsort import natsorted # this package from repo_helper.configupdater2 import ConfigUpdater from repo_helper.core import RepoHelper from repo_helper.utils import indent_join, stage_changes rh = RepoHelper(PathPlus.cwd()) rh.load_settings() py_typed = rh.target_repo / rh.templates.globals["import_name"] / "py.typed" if not py_typed.is_file(): py_typed.touch() stage_changes(rh.target_repo, [py_typed]) setup_cfg = rh.target_repo / "setup.cfg" pyproject_file = rh.target_repo / "pyproject.toml" if setup_cfg.is_file() and not rh.templates.globals["use_whey"]: content = setup_cfg.read_text() config = ConfigUpdater() config.read_string(content) existing_classifiers = config["metadata"]["classifiers"] existing_classifiers_string = str(existing_classifiers) classifiers = set( map(str.strip, existing_classifiers.value.split('\n'))) classifiers.add("Typing :: Typed") new_classifiers_lines = StringList( indent_join(natsorted(classifiers)).expandtabs(4)) new_classifiers_lines[0] = "classifiers =" new_classifiers_lines.blankline(ensure_single=True) setup_cfg.write_clean( content.replace(existing_classifiers_string, str(new_classifiers_lines))) if pyproject_file.is_file() and rh.templates.globals["use_whey"]: pyproject_config = dom_toml.load(pyproject_file) if "whey" in pyproject_config.get("tool", {}): classifiers = set( pyproject_config["tool"]["whey"]["base-classifiers"]) classifiers.add("Typing :: Typed") pyproject_config["tool"]["whey"]["base-classifiers"] = natsorted( classifiers) dom_toml.dump(pyproject_config, pyproject_file, encoder=dom_toml.TomlEncoder)
def update_repository(repository: Dict, recreate: bool = False) -> int: """ Run the updater for the given repository. :param repository: :param recreate: """ # TODO: rebase # TODO: if branch already exists and PR has been merged, abort db_repository: Repository = get_db_repository( repo_id=repository["id"], owner=repository["owner"]["login"], name=repository["name"], ) last_pr_date = datetime.fromtimestamp(db_repository.last_pr or 200) now = datetime.now() if not recreate and last_pr_date.day == now.day and last_pr_date.month == now.month: print( f"A PR for {db_repository.fullname} has already been created today. Skipping." ) return 1 owner = repository["owner"]["login"] repository_name = repository["name"] # Log in as the app context_switcher.login_as_app() # Log in as installation installation_id = context_switcher.login_as_repo_installation( owner=owner, repository=repository_name) github_repo: GitHubRepository = client.repository(owner, repository_name) # Ensure 'repo_helper.yml' exists try: github_repo.file_contents("repo_helper.yml") except NotFoundError: print( f"repo_helper.yml not found in the repository {repository['owner']['login']}/{repository['name']}" ) return 1 with TemporaryDirectory() as tmpdir: # Clone to tmpdir repo = clone(repository["html_url"], tmpdir) if recreate: # Delete any existing branch and create again from master recreate_branch(repo) elif f"refs/remotes/origin/{BRANCH_NAME}".encode("UTF-8") in dict( repo.refs): checkout_branch(repo) else: # Switch to new branch create_branch(repo) # Update files try: rh = RepoHelper(tmpdir) rh.load_settings() except FileNotFoundError as e: error_block = indent(str(e), '\t') print( f"Unable to run 'repo_helper'.\nThe error was:\n{error_block}") managed_files = rh.run() staged_files = stage_changes(repo.path, managed_files) if not staged_files and recreate: # Everything is up to date, close PR. close_pr(owner, repository_name) return 0 try: if not commit_changed_files( repo_path=rh.target_repo, managed_files=managed_files, commit=True, message=b"Updated files with 'repo_helper'.", enable_pre_commit=False, ): sys.stdout.flush() sys.stderr.flush() print("Failure!") return 1 sys.stdout.flush() sys.stderr.flush() except CommitError as e: indented_error = '\n'.join(f"\t{line}" for line in wrap(str(e))) print( f"Unable to commit changes. The error was:\n\n{indented_error}" ) print("Failure!") return 1 # Push dulwich.porcelain.push( repo, repository["html_url"], BRANCH_NAME.encode("UTF-8"), username="******", password=get_installation_access_token(github_repo, installation_id), force=recreate, ) sys.stdout.flush() sys.stderr.flush() # Create PR base = github_repo.default_branch head = f"{owner}:{BRANCH_NAME}" if not list( github_repo.pull_requests(state="open", base=base, head=head)): created_pr = github_repo.create_pull( title="[repo-helper] Configuration Update", base=base, head=head, body=make_pr_details(), ) if created_pr is not None: db_repository.add_pr(int(created_pr.number)) db_repository.last_pr = datetime.now().timestamp() db.session.commit() print("Success!") return 0
def requirements( no_pager: bool = False, depth: int = -1, concise: bool = False, no_venv: bool = False, ): """ Lists the requirements of this library, and their dependencies. """ # stdlib import re import shutil # 3rd party from domdf_python_tools.compat import importlib_metadata from domdf_python_tools.iterative import make_tree from domdf_python_tools.paths import PathPlus, in_directory from domdf_python_tools.stringlist import StringList from packaging.requirements import Requirement from shippinglabel.requirements import (ComparableRequirement, combine_requirements, list_requirements, read_requirements) # this package from repo_helper.core import RepoHelper rh = RepoHelper(PathPlus.cwd()) rh.load_settings(allow_unknown_keys=True) with in_directory(rh.target_repo): buf = StringList([ f"{rh.templates.globals['pypi_name']}=={rh.templates.globals['version']}" ]) raw_requirements = sorted(read_requirements("requirements.txt")[0]) tree: List[Union[str, List[str], List[Union[str, List]]]] = [] venv_dir = (rh.target_repo / "venv") if venv_dir.is_dir() and not no_venv: # Use virtualenv as it exists search_path = [] for directory in (venv_dir / "lib").glob("python3.*"): search_path.append(str(directory / "site-packages")) importlib_metadata.DistributionFinder.Context.path = search_path # type: ignore if concise: concise_requirements = [] def flatten(iterable: Iterable[Union[Requirement, Iterable]]): for item in iterable: if isinstance(item, str): yield item else: yield from flatten(item) # type: ignore for requirement in raw_requirements: concise_requirements.append(requirement) # TODO: remove "extra == " marker for req in flatten( list_requirements(str(requirement), depth=depth - 1)): concise_requirements.append( ComparableRequirement( re.sub('; extra == ".*"', '', req))) concise_requirements = sorted( set(combine_requirements(concise_requirements))) tree = list(map(str, concise_requirements)) else: for requirement in raw_requirements: tree.append(str(requirement)) deps = list( list_requirements(str(requirement), depth=depth - 1)) if deps: tree.append(deps) buf.extend(make_tree(tree)) if shutil.get_terminal_size().lines >= len(buf): # Don't use pager if fewer lines that terminal height no_pager = True if no_pager: click.echo(str(buf)) else: click.echo_via_pager(str(buf))