def __init__(
			self,
			token: str,
			username: str,
			repo_name: str,
			*,
			managed_message="This file is managed by 'repo_helper'. Don't edit it directly.",
			verbose: bool = False,
			colour: ColourTrilean = True,
			):

		self._tmpdir = tempfile.TemporaryDirectory()

		self.github = Github(token=token)
		self.verbose = verbose
		self.colour = resolve_color_default(colour)

		target_repo = PathPlus(self._tmpdir.name)
		config_file_name = "repo_helper.yml"

		github_repo: repos.Repository = self.github.repository(username, repo_name)
		contents_from_github: contents.Contents = github_repo.file_contents(config_file_name)
		(target_repo / config_file_name).write_bytes(contents_from_github.decoded)

		RepoHelper.__init__(self, target_repo, managed_message)

		self.load_settings()
Exemple #2
0
def version(quiet: bool = False) -> None:
    """
	Show the repository version.
	"""

    # 3rd party
    from domdf_python_tools.paths import PathPlus
    from southwark import get_tags
    from southwark.repo import Repo

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings(allow_unknown_keys=True)
    version = rh.templates.globals["version"]

    if quiet:
        click.echo(f"v{version}")

    else:
        click.echo(f"Current version: v{version}")

        repo = Repo(rh.target_repo)
        for sha, tag in get_tags(repo).items():
            if tag == f"v{version}":
                walker = repo.get_walker()
                for idx, entry in enumerate(walker):
                    commit_id = entry.commit.id.decode("UTF-8")
                    if commit_id == sha:
                        click.echo(
                            f"{idx} commit{'s' if idx > 1 else ''} since that release."
                        )
                        break
                break
Exemple #3
0
def test_via_Repo_class(
    temp_repo,
    capsys,
    file_regression: FileRegressionFixture,
    data_regression: DataRegressionFixture,
    monkeypatch,
    example_config,
):

    with in_directory(temp_repo.path):
        (temp_repo.path / "repo_helper.yml").write_text(example_config)
        (temp_repo.path / "requirements.txt").touch()
        (temp_repo.path / "tests").maybe_make()
        (temp_repo.path / "tests" / "requirements.txt").touch()
        (temp_repo.path / "README.rst").touch()
        (temp_repo.path / "doc-source").mkdir()
        (temp_repo.path / "doc-source" / "index.rst").touch()
        (temp_repo.path / ".pre-commit-config.yaml").touch()

        rh = RepoHelper(temp_repo.path)
        rh.load_settings()
        managed_files = rh.run()

    data_regression.check(sorted(managed_files))

    assert capsys.readouterr().out == ''
    assert capsys.readouterr().err == ''
    def __init__(
        self,
        project_dir: PathLike,
        venv_dir: PathLike = "venv",
        *,
        verbosity: int = 1,
        upgrade: bool = False,
    ):

        rh = RepoHelper(project_dir)
        rh.load_settings()

        self.project_dir = rh.target_repo

        self.config: ConfigDict = {
            "name": rh.templates.globals["modname"],
            "dependencies": [],
            "optional_dependencies": rh.templates.globals["extras_require"],
            "build_dependencies": None,
        }

        self.venv_dir = self.project_dir / venv_dir
        self.verbosity: int = int(verbosity)
        self.upgrade: bool = upgrade

        # TODO: config option
        self.extras_to_install = sorted(self.config["optional_dependencies"])
Exemple #5
0
def test_managed_message(temp_repo):
    rh = RepoHelper(temp_repo.path)
    managed_message = "This file is managed by 'repo_helper'. Don't edit it directly."
    assert rh.managed_message == managed_message
    assert rh.templates.globals["managed_message"] == managed_message

    rh.managed_message = "Different managed message"
    assert rh.managed_message == "Different managed message"
    assert rh.templates.globals[
        "managed_message"] == "Different managed message"

    rh = RepoHelper(temp_repo.path, managed_message="Managed message 3")
    assert rh.managed_message == "Managed message 3"
    assert rh.templates.globals["managed_message"] == "Managed message 3"
Exemple #6
0
	def __init__(self, repo_path: PathPlus, force: bool = False):
		#:
		self.repo = RepoHelper(traverse_to_file(PathPlus(repo_path), "repo_helper.yml"))

		self.repo.load_settings()

		if not assert_clean(self.repo.target_repo):
			if force:
				click.echo(Fore.RED("Proceeding anyway"), err=True)
			else:
				raise click.Abort

		# pypi_secure_key = "travis_pypi_secure"
		# if self.repo.templates.globals["on_pypi"] and not self.repo.templates.globals[pypi_secure_key]:
		# 	raise abort(f"Cowardly refusing to bump the version when {pypi_secure_key!r} is unset.")
		# TODO: Handle this wrt github actions

		#:
		self.current_version = self.get_current_version()

		#: The path to the bumpversion configuration file.
		self.bumpversion_file = self.repo.target_repo / ".bumpversion.cfg"
Exemple #7
0
def version(version: str):
    """
	Add a new Python version to test on.
	"""

    # 3rd party
    from domdf_python_tools.paths import PathPlus

    # this package
    from repo_helper.configuration import YamlEditor
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()

    yaml = YamlEditor()

    data = yaml.load_file(rh.target_repo / "repo_helper.yml")
    if not isinstance(data, dict):
        return 1

    def sort_key(value: str):
        if value.endswith("-dev"):
            return value[:-4]
        else:
            return value

    if "python_versions" in data:
        data["python_versions"] = natsorted(map(
            str, {*data["python_versions"], *version}),
                                            key=sort_key)
        yaml.dump_to_file(data, rh.target_repo / "repo_helper.yml", mode='w')
    else:
        yaml.dump_to_file(
            {"python_versions": natsorted(version, key=sort_key)},
            rh.target_repo / "repo_helper.yml",
            mode='a',
        )
Exemple #8
0
def changelog(
    entries: Optional[int] = None,
    reverse: bool = False,
    colour: Optional[bool] = None,
    no_pager: bool = False,
):
    """
	Show commits since the last version tag.
	"""

    # 3rd party
    from consolekit.terminal_colours import resolve_color_default
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus
    from southwark.log import Log
    from southwark.repo import Repo

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings(allow_unknown_keys=True)
    repo = Repo(rh.target_repo)

    try:
        commit_log = Log(repo).log(
            max_entries=entries,
            reverse=reverse,
            from_tag=f"v{rh.templates.globals['version']}",
        )
    except ValueError as e:
        raise abort(f"ERROR: {e}")

    if no_pager:
        click.echo(commit_log, color=resolve_color_default(colour))
    else:
        click.echo_via_pager(commit_log, color=resolve_color_default(colour))
Exemple #9
0
def classifiers(
    add: bool,
    status: Optional[int] = None,
    library: Optional[bool] = None,
):
    """
	Suggest trove classifiers based on repository metadata.
	"""

    # stdlib
    import sys

    # 3rd party
    from consolekit.input import choice, confirm
    from domdf_python_tools.paths import PathPlus
    from natsort import natsorted
    from shippinglabel.classifiers import classifiers_from_requirements
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals
    suggested_classifiers = set()
    pkg_dir = rh.target_repo / config["import_name"]

    for language in detect_languages(pkg_dir):
        suggested_classifiers.add(f"Programming Language :: {language}")

    # If not a tty, assume default options are False
    if not sys.stdout.isatty():
        if add is None:
            add = False
        if library is None:
            library = False

    if status is None and sys.stdout.isatty():
        click.echo("What is the Development Status of this project?")
        status = choice(text="Status",
                        options=development_status_options,
                        start_index=1) + 1

    if status is not None:
        status_string = f"Development Status :: {status} - {development_status_options[status - 1]}"
        suggested_classifiers.add(status_string)

    if library is None:
        library = click.confirm("Is this a library for developers?")

    if library:
        suggested_classifiers.add(
            "Topic :: Software Development :: Libraries :: Python Modules")
        suggested_classifiers.add("Intended Audience :: Developers")

    lib_requirements = combine_requirements(
        read_requirements(rh.target_repo / "requirements.txt")[0])

    suggested_classifiers.update(
        classifiers_from_requirements(lib_requirements))

    # file_content = dedent(
    # 		f"""\
    # # Remove any classifiers you don't think are relevant.
    # # Lines starting with a # will be discarded.
    # """
    # 		)
    # file_content += "\n".join(natsorted(suggested_classifiers))
    #
    # def remove_invalid_entries(line):
    # 	line = line.strip()
    # 	if not line:
    # 		return False
    # 	elif line.startswith("#"):
    # 		return False
    # 	else:
    # 		return True
    #
    # suggested_classifiers = set(
    # 		filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines())
    # 		)

    if not suggested_classifiers:
        if sys.stdout.isatty():
            click.echo("Sorry, I've nothing to suggest 😢")

        sys.exit(1)

    if sys.stdout.isatty():
        click.echo(
            "Based on what you've told me I think the following classifiers are appropriate:"
        )
        for classifier in natsorted(suggested_classifiers):
            click.echo(f" - {classifier}")
    else:
        for classifier in natsorted(suggested_classifiers):
            click.echo(classifier)

    if add is None:
        add = confirm(
            "Do you want to add these to the 'repo_helper.yml' file?")

    if add:

        # this package
        from repo_helper.configuration import YamlEditor

        yaml = YamlEditor()
        yaml.update_key(rh.target_repo / "repo_helper.yml",
                        "classifiers",
                        suggested_classifiers,
                        sort=True)
Exemple #10
0
def stubs(add: Optional[bool] = None,
          force_tty: bool = False,
          no_pager: bool = False):
    """
	Suggest :pep:`561` type stubs.
	"""

    # stdlib
    import shutil
    import sys
    from itertools import chain

    # 3rd party
    import tabulate
    from apeye import URL
    from apeye.requests_url import TrailingRequestsURL
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from shippinglabel import normalize
    from shippinglabel.pypi import PYPI_API
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals

    requirements_files = [rh.target_repo / "requirements.txt"]

    if config["enable_tests"]:
        requirements_files.append(rh.target_repo / config["tests_dir"] /
                                  "requirements.txt")

    requirements_files.extend(
        (rh.target_repo /
         config["import_name"]).iterchildren("**/requirements.txt"))

    all_requirements = set(
        chain.from_iterable(
            read_requirements(file, include_invalid=True)[0]
            for file in requirements_files))

    stubs_file = rh.target_repo / "stubs.txt"

    if stubs_file.is_file():
        existing_stubs, stub_comments, invalid_stubs = read_requirements(
            stubs_file, include_invalid=True)
    else:
        existing_stubs = set()
        stub_comments, invalid_stubs = [], []

    suggestions = {}

    for requirement in all_requirements:
        if normalize(requirement.name) in {"typing-extensions"}:
            continue

        types_url = TrailingRequestsURL(
            PYPI_API / f"types-{requirement.name.lower()}" / "json/")
        stubs_url = TrailingRequestsURL(
            PYPI_API / f"{requirement.name.lower()}-stubs" / "json/")

        response = stubs_url.head()
        if response.status_code == 404:
            # No stubs found for -stubs
            response = types_url.head()
            if response.status_code == 404:
                # No stubs found for types-
                continue
            else:
                response_url = URL(response.url)
                suggestions[str(requirement)] = response_url.parent.name
                # print(requirement, response.url)
        else:
            response_url = URL(response.url)
            suggestions[str(requirement)] = response_url.parent.name
            # print(requirement, response.url)

    if not suggestions:
        if sys.stdout.isatty() or force_tty:
            click.echo("No stubs to suggest.")
        sys.exit(1)

    if sys.stdout.isatty() or force_tty:

        table = StringList([
            "Suggestions",
            "-----------",
            tabulate.tabulate(suggestions.items(),
                              headers=["Requirement", "Stubs"]),
        ])
        table.blankline(ensure_single=True)

        if no_pager or len(table) <= shutil.get_terminal_size().lines:
            click.echo('\n'.join(table))
        else:
            click.echo_via_pager('\n'.join(table))

        if add is None:
            add = confirm("Do you want to add these to the 'stubs.txt' file?")

        if add:
            new_stubs = sorted(
                combine_requirements(*existing_stubs, *suggestions.values()))

            stubs_file.write_lines([
                *stub_comments,
                *invalid_stubs,
                *map(str, new_stubs),
            ])

    else:
        for stub in suggestions.values():
            click.echo(stub)

    sys.exit(0)
Exemple #11
0
class Bumper:
	"""
	Class to bump the repository version.

	:param repo_path:
	:param force: Whether to force bumping the version when the repository is unclean.
	"""

	def __init__(self, repo_path: PathPlus, force: bool = False):
		#:
		self.repo = RepoHelper(traverse_to_file(PathPlus(repo_path), "repo_helper.yml"))

		self.repo.load_settings()

		if not assert_clean(self.repo.target_repo):
			if force:
				click.echo(Fore.RED("Proceeding anyway"), err=True)
			else:
				raise click.Abort

		# pypi_secure_key = "travis_pypi_secure"
		# if self.repo.templates.globals["on_pypi"] and not self.repo.templates.globals[pypi_secure_key]:
		# 	raise abort(f"Cowardly refusing to bump the version when {pypi_secure_key!r} is unset.")
		# TODO: Handle this wrt github actions

		#:
		self.current_version = self.get_current_version()

		#: The path to the bumpversion configuration file.
		self.bumpversion_file = self.repo.target_repo / ".bumpversion.cfg"

	def major(self, commit: Optional[bool], message: str):
		"""
		Bump to the next major version.

		:param commit: Whether to commit automatically (:py:obj:`True`) or ask first (:py:obj:`None`).
		:param message: The commit message.
		"""

		new_version = _Version.from_parts((self.current_version.major + 1, 0, 0))
		self.bump(new_version, commit, message)

	def minor(self, commit: Optional[bool], message: str):
		"""
		Bump to the next minor version.

		:param commit: Whether to commit automatically (:py:obj:`True`) or ask first (:py:obj:`None`).
		:param message: The commit message.
		"""

		new_version = _Version.from_parts((self.current_version.major, self.current_version.minor + 1, 0))
		self.bump(new_version, commit, message)

	def patch(self, commit: Optional[bool], message: str):
		"""
		Bump to the next patch version.

		:param commit: Whether to commit automatically (:py:obj:`True`) or ask first (:py:obj:`None`).
		:param message: The commit message.
		"""

		new_version = _Version.from_parts((
				self.current_version.major,
				self.current_version.minor,
				self.current_version.micro + 1,
				))
		self.bump(new_version, commit, message)

	def today(self, commit: Optional[bool], message: str):
		"""
		Bump to the calver version for today's date.

		E.g. 2020.12.25 for 25th December 2020

		:param commit: Whether to commit automatically (:py:obj:`True`) or ask first (:py:obj:`None`).
		:param message: The commit message.
		"""

		today = date.today()
		new_version = _Version.from_parts((today.year, today.month, today.day))
		self.bump(new_version, commit, message)

	def bump(self, new_version: Version, commit: Optional[bool], message: str):
		"""
		Bump to the given version.

		:param new_version:
		:param commit: Whether to commit automatically (:py:obj:`True`) or ask first (:py:obj:`None`).
		:param message: The commit message.

		.. versionchanged:: 2021.8.11

			Now takes a :class:`packaging.version.Version` rather than a
			:class:`domdf_python_tools.versions.Version`.
		"""

		new_version_str = str(new_version)

		dulwich_repo = Repo(self.repo.target_repo)

		if f"v{new_version_str}".encode("UTF-8") in dulwich_repo.refs.as_dict(b"refs/tags"):
			raise abort(f"The tag 'v{new_version_str}' already exists!")

		bumpversion_config = self.get_bumpversion_config(str(self.current_version), new_version_str)

		changed_files = [self.bumpversion_file.relative_to(self.repo.target_repo).as_posix()]

		for filename in bumpversion_config.keys():
			if not os.path.isfile(filename):
				raise FileNotFoundError(filename)

		for filename, config in bumpversion_config.items():
			self.bump_version_for_file(filename, config)
			changed_files.append(filename)

		# Update number in .bumpversion.cfg
		bv = ConfigUpdater()
		bv.read(self.bumpversion_file)
		bv["bumpversion"]["current_version"] = new_version_str
		self.bumpversion_file.write_clean(str(bv))

		commit_message = message.format(current_version=self.current_version, new_version=new_version)
		click.echo(commit_message)

		if commit_changed_files(
				self.repo.target_repo,
				managed_files=changed_files,
				commit=commit,
				message=commit_message.encode("UTF-8"),
				enable_pre_commit=False,
				):

			tag_create(dulwich_repo, f"v{new_version_str}")

	def get_current_version(self) -> Version:
		"""
		Returns the current version from the ``repo_helper.yml`` configuration file.
		"""

		return Version(self.repo.templates.globals["version"])

	def get_bumpversion_config(
			self,
			current_version: str,
			new_version: str,
			) -> Dict[str, BumpversionFileConfig]:
		"""
		Returns the bumpversion config.

		:param current_version:
		:param new_version:
		"""

		bv = ConfigUpdater()
		bv.read(self.bumpversion_file)

		def default():
			return {"search": current_version, "replace": new_version}

		# populate with the sections which are managed by repo_helper
		config: Dict[str, BumpversionFileConfig] = {
				filename: default()
				for filename in get_bumpversion_filenames(self.repo.templates)
				}

		if self.repo.templates.globals["enable_docs"]:
			config[f"{self.repo.templates.globals['docs_dir']}/index.rst"] = default()

		for section in bv.sections():
			if not section.startswith("bumpversion:file:"):
				continue

			section_dict: Dict[str, str] = bv[section].to_dict()
			config[section[17:]] = dict(
					search=section_dict.get("search", "{current_version}").format(current_version=current_version),
					replace=section_dict.get("replace", "{new_version}").format(new_version=new_version),
					)

		return config

	def bump_version_for_file(self, filename: PathLike, config: BumpversionFileConfig):
		"""
		Bumps the version for the given file.

		:param filename:
		:param config:
		"""

		filename = self.repo.target_repo / filename
		filename.write_text(filename.read_text().replace(config["search"], config["replace"]))
Exemple #12
0
def run_repo_helper(
    path,
    force: bool,
    initialise: bool,
    commit: Optional[bool],
    message: str,
    enable_pre_commit: bool = True,
) -> int:
    """
	Run repo_helper.

	:param path: The repository path.
	:param force: Whether to force the operation if the repository is not clean.
	:param initialise: Whether to initialise the repository.
	:param commit: Whether to commit unchanged files.
	:param message: The commit message.
	:param enable_pre_commit: Whether to install and configure pre-commit. Default :py:obj`True`.
	"""

    # this package
    from repo_helper.cli.commands.init import init_repo
    from repo_helper.core import RepoHelper
    from repo_helper.utils import easter_egg

    try:
        rh = RepoHelper(path)
        rh.load_settings()
    except FileNotFoundError as e:
        error_block = textwrap.indent(str(e), '\t')
        raise abort(
            f"Unable to run 'repo_helper'.\nThe error was:\n{error_block}")

    if not assert_clean(rh.target_repo,
                        allow_config=("repo_helper.yml", "git_helper.yml")):
        if force:
            click.echo(Fore.RED("Proceeding anyway"), err=True)
        else:
            return 1

    if initialise:
        r = Repo(rh.target_repo)
        for filename in init_repo(rh.target_repo, rh.templates):
            r.stage(os.path.normpath(filename))

    managed_files = rh.run()

    try:
        commit_changed_files(
            repo_path=rh.target_repo,
            managed_files=managed_files,
            commit=commit,
            message=message.encode("UTF-8"),
            enable_pre_commit=enable_pre_commit,
        )
    except CommitError as e:
        indented_error = '\n'.join(f"\t{line}"
                                   for line in textwrap.wrap(str(e)))
        click.echo(
            f"Unable to commit changes. The error was:\n\n{indented_error}",
            err=True)
        return 1

    easter_egg()

    return 0
Exemple #13
0
def test_repo_name(temp_repo):
    rh = RepoHelper(temp_repo.path)
    rh.load_settings()
    assert rh.repo_name == "repo_helper_demo"
Exemple #14
0
def typed():
    """
	Add a 'py.typed' file and the associated trove classifier.
	"""

    # 3rd party
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from natsort import natsorted

    # this package
    from repo_helper.configupdater2 import ConfigUpdater
    from repo_helper.core import RepoHelper
    from repo_helper.utils import indent_join, stage_changes

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()

    py_typed = rh.target_repo / rh.templates.globals["import_name"] / "py.typed"
    if not py_typed.is_file():
        py_typed.touch()

    stage_changes(rh.target_repo, [py_typed])

    setup_cfg = rh.target_repo / "setup.cfg"
    pyproject_file = rh.target_repo / "pyproject.toml"

    if setup_cfg.is_file() and not rh.templates.globals["use_whey"]:
        content = setup_cfg.read_text()

        config = ConfigUpdater()
        config.read_string(content)

        existing_classifiers = config["metadata"]["classifiers"]
        existing_classifiers_string = str(existing_classifiers)

        classifiers = set(
            map(str.strip, existing_classifiers.value.split('\n')))
        classifiers.add("Typing :: Typed")

        new_classifiers_lines = StringList(
            indent_join(natsorted(classifiers)).expandtabs(4))
        new_classifiers_lines[0] = "classifiers ="
        new_classifiers_lines.blankline(ensure_single=True)

        setup_cfg.write_clean(
            content.replace(existing_classifiers_string,
                            str(new_classifiers_lines)))

    if pyproject_file.is_file() and rh.templates.globals["use_whey"]:
        pyproject_config = dom_toml.load(pyproject_file)
        if "whey" in pyproject_config.get("tool", {}):
            classifiers = set(
                pyproject_config["tool"]["whey"]["base-classifiers"])
            classifiers.add("Typing :: Typed")
            pyproject_config["tool"]["whey"]["base-classifiers"] = natsorted(
                classifiers)

        dom_toml.dump(pyproject_config,
                      pyproject_file,
                      encoder=dom_toml.TomlEncoder)
Exemple #15
0
def update_repository(repository: Dict, recreate: bool = False) -> int:
    """
	Run the updater for the given repository.

	:param repository:
	:param recreate:
	"""

    # TODO: rebase
    # TODO: if branch already exists and PR has been merged, abort

    db_repository: Repository = get_db_repository(
        repo_id=repository["id"],
        owner=repository["owner"]["login"],
        name=repository["name"],
    )

    last_pr_date = datetime.fromtimestamp(db_repository.last_pr or 200)
    now = datetime.now()
    if not recreate and last_pr_date.day == now.day and last_pr_date.month == now.month:
        print(
            f"A PR for {db_repository.fullname} has already been created today. Skipping."
        )
        return 1

    owner = repository["owner"]["login"]
    repository_name = repository["name"]

    # Log in as the app
    context_switcher.login_as_app()

    # Log in as installation
    installation_id = context_switcher.login_as_repo_installation(
        owner=owner, repository=repository_name)

    github_repo: GitHubRepository = client.repository(owner, repository_name)

    # Ensure 'repo_helper.yml' exists
    try:
        github_repo.file_contents("repo_helper.yml")
    except NotFoundError:
        print(
            f"repo_helper.yml not found in the repository {repository['owner']['login']}/{repository['name']}"
        )
        return 1

    with TemporaryDirectory() as tmpdir:

        # Clone to tmpdir
        repo = clone(repository["html_url"], tmpdir)

        if recreate:
            # Delete any existing branch and create again from master
            recreate_branch(repo)
        elif f"refs/remotes/origin/{BRANCH_NAME}".encode("UTF-8") in dict(
                repo.refs):
            checkout_branch(repo)
        else:
            # Switch to new branch
            create_branch(repo)

        # Update files
        try:
            rh = RepoHelper(tmpdir)
            rh.load_settings()
        except FileNotFoundError as e:
            error_block = indent(str(e), '\t')
            print(
                f"Unable to run 'repo_helper'.\nThe error was:\n{error_block}")

        managed_files = rh.run()
        staged_files = stage_changes(repo.path, managed_files)

        if not staged_files and recreate:
            # Everything is up to date, close PR.
            close_pr(owner, repository_name)
            return 0

        try:
            if not commit_changed_files(
                    repo_path=rh.target_repo,
                    managed_files=managed_files,
                    commit=True,
                    message=b"Updated files with 'repo_helper'.",
                    enable_pre_commit=False,
            ):
                sys.stdout.flush()
                sys.stderr.flush()
                print("Failure!")
                return 1

            sys.stdout.flush()
            sys.stderr.flush()

        except CommitError as e:
            indented_error = '\n'.join(f"\t{line}" for line in wrap(str(e)))
            print(
                f"Unable to commit changes. The error was:\n\n{indented_error}"
            )
            print("Failure!")
            return 1

        # Push
        dulwich.porcelain.push(
            repo,
            repository["html_url"],
            BRANCH_NAME.encode("UTF-8"),
            username="******",
            password=get_installation_access_token(github_repo,
                                                   installation_id),
            force=recreate,
        )

        sys.stdout.flush()
        sys.stderr.flush()

        # Create PR
        base = github_repo.default_branch
        head = f"{owner}:{BRANCH_NAME}"

        if not list(
                github_repo.pull_requests(state="open", base=base, head=head)):
            created_pr = github_repo.create_pull(
                title="[repo-helper] Configuration Update",
                base=base,
                head=head,
                body=make_pr_details(),
            )

            if created_pr is not None:
                db_repository.add_pr(int(created_pr.number))

        db_repository.last_pr = datetime.now().timestamp()
        db.session.commit()

        print("Success!")
        return 0
Exemple #16
0
def requirements(
    no_pager: bool = False,
    depth: int = -1,
    concise: bool = False,
    no_venv: bool = False,
):
    """
	Lists the requirements of this library, and their dependencies.
	"""

    # stdlib
    import re
    import shutil

    # 3rd party
    from domdf_python_tools.compat import importlib_metadata
    from domdf_python_tools.iterative import make_tree
    from domdf_python_tools.paths import PathPlus, in_directory
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import Requirement
    from shippinglabel.requirements import (ComparableRequirement,
                                            combine_requirements,
                                            list_requirements,
                                            read_requirements)

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings(allow_unknown_keys=True)

    with in_directory(rh.target_repo):

        buf = StringList([
            f"{rh.templates.globals['pypi_name']}=={rh.templates.globals['version']}"
        ])
        raw_requirements = sorted(read_requirements("requirements.txt")[0])
        tree: List[Union[str, List[str], List[Union[str, List]]]] = []
        venv_dir = (rh.target_repo / "venv")

        if venv_dir.is_dir() and not no_venv:
            # Use virtualenv as it exists
            search_path = []

            for directory in (venv_dir / "lib").glob("python3.*"):
                search_path.append(str(directory / "site-packages"))

            importlib_metadata.DistributionFinder.Context.path = search_path  # type: ignore

        if concise:
            concise_requirements = []

            def flatten(iterable: Iterable[Union[Requirement, Iterable]]):
                for item in iterable:
                    if isinstance(item, str):
                        yield item
                    else:
                        yield from flatten(item)  # type: ignore

            for requirement in raw_requirements:
                concise_requirements.append(requirement)
                # TODO: remove "extra == " marker
                for req in flatten(
                        list_requirements(str(requirement), depth=depth - 1)):
                    concise_requirements.append(
                        ComparableRequirement(
                            re.sub('; extra == ".*"', '', req)))

            concise_requirements = sorted(
                set(combine_requirements(concise_requirements)))
            tree = list(map(str, concise_requirements))

        else:
            for requirement in raw_requirements:
                tree.append(str(requirement))
                deps = list(
                    list_requirements(str(requirement), depth=depth - 1))
                if deps:
                    tree.append(deps)

        buf.extend(make_tree(tree))

        if shutil.get_terminal_size().lines >= len(buf):
            # Don't use pager if fewer lines that terminal height
            no_pager = True

        if no_pager:
            click.echo(str(buf))
        else:
            click.echo_via_pager(str(buf))