Esempio n. 1
0
    def create_patches(self,
                       upstream: str = None,
                       destination: str = None) -> List[PatchMetadata]:
        """
        Create patches from downstream commits.

        :param destination: str
        :param upstream: str -- git branch or tag
        :return: [PatchMetadata, ...] list of patches
        """
        upstream = upstream or self.get_specfile_version()
        destination = destination or self.local_project.working_dir

        sync_files_to_ignore = [
            str(sf.src.relative_to(self.local_project.working_dir)) for sf in
            self.package_config.get_all_files_to_sync().get_raw_files_to_sync(
                self.local_project.working_dir,
                Path(
                    # dest (downstream) is not important, we only care about src (upstream)
                    destination),
            )
        ]
        files_to_ignore = (self.package_config.patch_generation_ignore_paths +
                           sync_files_to_ignore)

        pg = PatchGenerator(self.local_project)
        return pg.create_patches(upstream,
                                 destination,
                                 files_to_ignore=files_to_ignore)
Esempio n. 2
0
def test_squash_patches_by_name(source_git_repo: git.Repo,
                                dist_git_repo: git.Repo):
    """Patch files corresponding to commits which have identical 'patch_name'
    metadata defined are squashed.
    """
    local_project = flexmock(git_repo=source_git_repo,
                             ref="HEAD",
                             working_dir=source_git_repo.working_dir)

    create_commits_to_squash(source_git_repo)

    patch_generator = PatchGenerator(local_project)
    patch_list = patch_generator.create_patches(
        git_ref="0.1.0", destination=dist_git_repo.working_dir)
    assert len(patch_list) == 3
    assert patch_list[1].path == Path(dist_git_repo.working_dir,
                                      "distro.patch")
    assert patch_list[1].name == "distro.patch"
    assert sorted(dist_git_repo.untracked_files) == [
        "0001-Add-a-standalone-patch.patch",
        "0004-Add-another-standalone-patch.patch",
        "distro.patch",
    ]
    patch = Path(dist_git_repo.working_dir, "distro.patch").read_text()
    assert "+The first commit of the second patch." in patch
    assert "+The second commit of the second patch." in patch
Esempio n. 3
0
    def create_patches(
        self, upstream: Optional[str] = None, destination: Union[str, Path] = None
    ) -> List[PatchMetadata]:
        """
        Create patches from downstream commits.

        :param destination: str
        :param upstream: str -- git branch or tag
        :return: [PatchMetadata, ...] list of patches
        """
        upstream = upstream or self.get_specfile_version()
        destination = Path(destination) or self.local_project.working_dir

        sync_files_to_ignore = self.package_config.get_all_files_to_sync()
        for file in sync_files_to_ignore:
            file.resolve(
                src_base=self.local_project.working_dir,
                # dest (downstream) is not important, we only care about src (upstream)
                dest_base=destination,
            )
        sync_files_to_ignore = [
            str(Path(file).relative_to(self.local_project.working_dir))
            for file in iter_srcs(sync_files_to_ignore)
        ]
        files_to_ignore = (
            self.package_config.patch_generation_ignore_paths + sync_files_to_ignore
        )

        pg = PatchGenerator(self.local_project)
        return pg.create_patches(
            upstream, str(destination), files_to_ignore=files_to_ignore
        )
Esempio n. 4
0
def test_fail_if_not_adjacent(source_git_repo: git.Repo,
                              dist_git_repo: git.Repo):
    local_project = flexmock(git_repo=source_git_repo,
                             ref="HEAD",
                             working_dir=source_git_repo.working_dir)

    create_non_adjacent_commits_to_squash(source_git_repo)

    patch_generator = PatchGenerator(local_project)
    with pytest.raises(PackitException) as ex:
        patch_generator.create_patches(git_ref="0.1.0",
                                       destination=dist_git_repo.working_dir)
    assert "Non-adjacent patches" in str(ex)
Esempio n. 5
0
def test_linearization(api_instance_source_git):
    ref = "0.1.0"
    sg_path = Path(api_instance_source_git.upstream_local_project.working_dir)
    mock_spec_download_remote_s(sg_path, sg_path / DISTRO_DIR, ref)
    create_merge_commit_in_source_git(sg_path, go_nuts=True)
    with cwd("/"):  # let's mimic p-s by having different cwd than the project
        pg = PatchGenerator(api_instance_source_git.upstream_local_project)
        pg.create_patches(ref, sg_path / DISTRO_DIR)
    assert {x.name for x in sg_path.joinpath(DISTRO_DIR).glob("*.patch")} == {
        "0001-sourcegit-content.patch",
        "0002-MERGE-COMMIT.patch",
        "0003-ugly-merge-commit.patch",
    }
Esempio n. 6
0
def test_undo_identical(git_repo):
    """
    Check that identical patches are correctly detected and changes
    undone in the target git repo.
    """
    input_patch_list = [
        PatchMetadata(name=path.name, path=path)
        for path in Path(git_repo.working_tree_dir).iterdir()
        if path.suffix == ".patch"
    ]
    output_patch_list = [
        x for x in input_patch_list if x.name == "weird-identical.patch"
    ]
    assert (PatchGenerator.undo_identical(input_patch_list,
                                          git_repo) == output_patch_list)
    # 'weird-identical.patch' is identical, except the original patch file
    # is missing a "function" name at one of the hunks, which causes the
    # patch-ids to be different.
    # Is there any safe way to handle this?
    assert [item.a_path for item in git_repo.index.diff(None)
            ] == ["weird-identical.patch"]
Esempio n. 7
0
    def sync_release(
        self,
        dist_git_branch: str = None,
        version: str = None,
        tag: str = None,
        use_local_content=False,
        force_new_sources=False,
        upstream_ref: str = None,
        create_pr: bool = True,
        force: bool = False,
    ) -> Optional[PullRequest]:
        """
        Update given package in Fedora

        :param dist_git_branch: branch in dist-git, defaults to repo's default branch
        :param use_local_content: don't check out anything
        :param version: upstream version to update in Fedora
        :param tag: upstream git tag
        :param force_new_sources: don't check the lookaside cache and perform new-sources
        :param upstream_ref: for a source-git repo, use this ref as the latest upstream commit
        :param create_pr: create a pull request if set to True
        :param force: ignore changes in the git index

        :return created PullRequest if create_pr is True, else None
        """
        dist_git_branch = (dist_git_branch
                           or self.dg.local_project.git_project.default_branch)
        # process version and tag parameters
        if version and tag:
            raise PackitException(
                "Function parameters version and tag are mutually exclusive.")
        elif not tag:
            version = version or self.up.get_version()
            if not version:
                raise PackitException(
                    "Could not figure out version of latest upstream release.")
            upstream_tag = self.up.convert_version_to_tag(version)
        else:
            upstream_tag = tag
            version = self.up.get_version_from_tag(tag)

        assert_existence(self.up.local_project, "Upstream local project")
        assert_existence(self.dg.local_project, "Dist-git local project")
        if self.dg.is_dirty():
            raise PackitException(
                f"The distgit repository {self.dg.local_project.working_dir} is dirty."
                f"This is not supported.")
        if not force and self.up.is_dirty() and not use_local_content:
            raise PackitException(
                "The repository is dirty, will not discard the changes. Use --force to bypass."
            )
        # do not add anything between distgit clone and saving gpg keys!
        self.up.allowed_gpg_keys = self.dg.get_allowed_gpg_keys_from_downstream_config(
        )

        upstream_ref = self.up._expand_git_ref(
            upstream_ref or self.package_config.upstream_ref)
        create_pr = create_pr and self.package_config.create_pr
        self.up.run_action(actions=ActionName.post_upstream_clone)

        current_up_branch = self.up.active_branch
        try:
            # we want to check out the tag only when local_content is not set
            # and it's an actual upstream repo and not source-git
            if upstream_ref:
                logger.info("We will not check out the upstream tag "
                            "because this is a source-git repo.")
            elif not use_local_content:
                self.up.local_project.checkout_release(upstream_tag)

            self.dg.check_last_commit()

            self.up.run_action(actions=ActionName.pre_sync)
            self.dg.create_branch(
                dist_git_branch,
                base=f"remotes/origin/{dist_git_branch}",
                setup_tracking=True,
            )

            # fetch and reset --hard upstream/$branch?
            logger.info(f"Using {dist_git_branch!r} dist-git branch.")
            self.dg.update_branch(dist_git_branch)
            self.dg.checkout_branch(dist_git_branch)

            if create_pr:
                local_pr_branch = f"{version}-{dist_git_branch}-update"
                self.dg.create_branch(local_pr_branch)
                self.dg.checkout_branch(local_pr_branch)

            description = (
                f"Upstream tag: {upstream_tag}\n"
                f"Upstream commit: {self.up.local_project.commit_hexsha}\n")

            readme_path = self.dg.local_project.working_dir / "README.packit"
            logger.debug(f"README: {readme_path}")
            readme_path.write_text(
                SYNCING_NOTE.format(packit_version=get_packit_version()))

            raw_sync_files = (self.package_config.get_all_files_to_sync().
                              get_raw_files_to_sync(
                                  self.up.local_project.working_dir,
                                  self.dg.local_project.working_dir,
                              ))

            if self.up.with_action(action=ActionName.prepare_files):
                raw_files_to_sync = self._prepare_files_to_sync(
                    raw_sync_files=raw_sync_files,
                    full_version=version,
                    upstream_tag=upstream_tag,
                )
                sync_files(raw_files_to_sync)
                if upstream_ref and self.up.with_action(
                        action=ActionName.create_patches):
                    patches = self.up.create_patches(
                        upstream=upstream_ref,
                        destination=str(self.dg.absolute_specfile_dir),
                    )
                    patches = PatchGenerator.undo_identical(
                        patches, self.dg.local_project.git_repo)
                    self.dg.specfile_add_patches(patches)
                self._handle_sources(add_new_sources=True,
                                     force_new_sources=force_new_sources)

            # when the action is defined, we still need to copy the files
            if self.up.has_action(action=ActionName.prepare_files):

                sync_files(raw_sync_files)

            self.dg.commit(title=f"{version} upstream release",
                           msg=description)

            new_pr = None
            if create_pr:
                title = f"Update to upstream release {version}"

                if not self.dg.pr_exists(title, description.rstrip(),
                                         dist_git_branch):
                    new_pr = self.push_and_create_pr(
                        pr_title=title,
                        pr_description=description,
                        dist_git_branch=dist_git_branch,
                    )
            else:
                self.dg.push(refspec=f"HEAD:{dist_git_branch}")
        finally:
            if not use_local_content and not upstream_ref:
                self.up.local_project.git_repo.git.checkout(current_up_branch)
            self.dg.refresh_specfile()
            self.dg.local_project.git_repo.git.reset("--hard", "HEAD")
        return new_pr
Esempio n. 8
0
    def update_dist_git(
        self,
        version: Optional[str],
        upstream_ref: Optional[str],
        add_new_sources: bool,
        force_new_sources: bool,
        upstream_tag: Optional[str],
        commit_title: str,
        commit_msg: str,
        sync_default_files: bool = True,
        pkg_tool: str = "",
    ):
        """Update a dist-git repo from an upstream (aka source-git) repo

        - copy files to be synced to dist-git
        - generate and update patch files and the spec-file
        - upload source archives to the lookaside cache
        - commit the changes to dist-git, if a commit title is defined

        Args:
            version: Upstream version to update in Fedora.
            upstream_ref: For a source-git repo, use this ref as the latest upstream commit.
            add_new_sources: Download and upload source archives.
            force_new_sources: Download/upload the archive even if it's name
                is already in the cache or in sources file.
            upstream_tag: Use the message of the commit referenced by this tag to update the
                changelog in the spec-file, if requested.
            commit_title: Commit message title (aka subject-line) in dist-git.
                Do not commit if this is false-ish.
            commit_msg: Use this commit message in dist-git.
            sync_default_files: Whether to sync the default files, that is: packit.yaml and
                the spec-file.
            pkg_tool: what tool (fedpkg/centpkg) to use upload to lookaside cache
        """
        if sync_default_files:
            synced_files = self.package_config.get_all_files_to_sync()
        else:
            synced_files = self.package_config.files_to_sync
        # Make all paths absolute and check that they are within
        # the working directories of the repositories.
        for item in synced_files:
            item.resolve(
                src_base=self.up.local_project.working_dir,
                dest_base=self.dg.local_project.working_dir,
            )

        if self.up.with_action(action=ActionName.prepare_files):
            synced_files = self._prepare_files_to_sync(
                synced_files=synced_files,
                full_version=version,
                upstream_tag=upstream_tag,
            )

        sync_files(synced_files)

        if upstream_ref and self.up.with_action(
                action=ActionName.create_patches):
            patches = self.up.create_patches(
                upstream=upstream_ref,
                destination=str(self.dg.absolute_specfile_dir),
            )
            # Undo identical patches, but don't remove them
            # from the list, so that they are added to the spec-file.
            PatchGenerator.undo_identical(patches,
                                          self.dg.local_project.git_repo)
            self.dg.specfile_add_patches(
                patches, self.package_config.patch_generation_patch_id_digits)

        if add_new_sources or force_new_sources:
            self._handle_sources(
                force_new_sources=force_new_sources,
                pkg_tool=pkg_tool,
            )

        if commit_title:
            self.dg.commit(title=commit_title, msg=commit_msg, prefix="")