Exemple #1
0
def init(
    config,
    path_or_url,
    force,
):
    """
    Create the initial Packit configuration in a repository

    See 'packit source-git init', if you want to initialize a repository
    as a source-git repo.
    """
    working_dir = path_or_url.working_dir
    config_path = get_existing_config(working_dir)
    if config_path:
        if not force:
            raise PackitException(
                f"Packit config {config_path} already exists."
                " If you want to regenerate it use `packit init --force`")
    else:
        # Use default name
        config_path = working_dir / ".packit.yaml"

    specfile_path = get_local_specfile_path(working_dir)
    template_data = {
        "upstream_package_name":
        path_or_url.repo_name,
        "downstream_package_name":
        path_or_url.repo_name,
        "specfile_path": (specfile_path if specfile_path is not None else
                          f"{path_or_url.repo_name}.spec"),
    }

    generate_config(config_file=config_path,
                    write_to_file=True,
                    template_data=template_data)
Exemple #2
0
    def validate(self) -> str:
        """ Create output for PackageConfig validation."""
        schema_errors: Union[List[Any], Dict[Any, Any]] = None
        try:
            PackageConfig.get_from_dict(
                self.content,
                config_file_path=str(self.config_file_path),
                spec_file_path=str(
                    get_local_specfile_path(self.config_file_path.parent)),
            )
        except ValidationError as e:
            schema_errors = e.messages
        except PackitConfigException as e:
            return str(e)

        if not schema_errors:
            return f"{self.config_file_path.name} is valid and ready to be used"

        output = f"{self.config_file_path.name} does not pass validation:\n"
        if isinstance(schema_errors, list):
            output += "\n".join(map(str, schema_errors))
            return output

        for field_name, errors in schema_errors.items():
            output += self.validate_get_field_output(errors, field_name)
        return output
    def validate(self) -> str:
        """Create output for PackageConfig validation."""
        schema_errors: Union[List[Any], Dict[Any, Any]] = None
        config = None
        try:
            config = PackageConfig.get_from_dict(
                self.content,
                config_file_path=str(self.config_file_path),
                spec_file_path=str(
                    get_local_specfile_path(self.config_file_path.parent)),
            )
        except ValidationError as e:
            schema_errors = e.messages
        except PackitConfigException as e:
            return str(e)

        specfile_path = self.content.get("specfile_path", None)
        if specfile_path and not Path(specfile_path).is_file():
            logger.warning(
                f"The spec file you defined ({specfile_path}) is not "
                f"present in the repository. If it's being generated "
                f"dynamically, you can verify the functionality by "
                f"running `packit srpm` to create an SRPM "
                f"from the current checkout. If it's not being generated, "
                f"please make sure the path is correct and "
                f"the file is present.")

        synced_files_errors = []
        if config:
            synced_files_errors = [
                f for f in iter_srcs(config.files_to_sync)
                if not Path(f).exists()
            ]

        output = f"{self.config_file_path.name} does not pass validation:\n"

        if schema_errors:
            if isinstance(schema_errors, list):
                output += "\n".join(map(str, schema_errors))
            else:
                for field_name, errors in schema_errors.items():
                    output += self.validate_get_field_output(
                        errors, field_name)

        if synced_files_errors:
            output += "The following {} configured to be synced but {} not exist: {}\n".format(
                *((
                    "paths are",
                    "do",
                ) if (len(synced_files_errors) > 1) else ("path is", "does")),
                ", ".join(synced_files_errors),
            )

        if schema_errors or synced_files_errors:
            return output
        else:
            return f"{self.config_file_path.name} is valid and ready to be used"
Exemple #4
0
def test_get_local_specfile_path():
    assert str(get_local_specfile_path(UP_OSBUILD)) == "osbuild.spec"
    assert not get_local_specfile_path(SYNC_FILES)
Exemple #5
0
def apply_patches(ctx, gitdir):
    """Apply the patches used in the SPEC-file found in GITDIR.

    Apply all the patches used in the SPEC-file, then update the
    SPEC-file by commenting the patches that were applied and deleting
    those patches from the disk.

    Stage and commit changes after each patch, except the ones in the
    'centos-packaging' directory.
    """
    class Specfile(SpecFile):
        def comment_patches(self, patch_indexes):
            pattern = re.compile(r"^Patch(?P<index>\d+)\s*:.+$")
            package = self.spec_content.section("%package")
            for i, line in enumerate(package):
                match = pattern.match(line)
                if match:
                    index = int(match.group("index"))
                    if index in patch_indexes:
                        logger.debug(f"Commenting patch {index}")
                        package[i] = f"# {line}"
            self.spec_content.replace_section("%package", package)

    downstream_files_dir = Path(gitdir, DOWNSTREAM_FILES_DIR)
    specdir = downstream_files_dir / "SPECS"
    specpath = specdir / get_local_specfile_path(specdir)
    logger.info(f"specpath = {specpath}")
    specfile = Specfile(
        specpath,
        sources_location=str(downstream_files_dir / "SOURCES"),
    )
    repo = git.Repo(gitdir)
    applied_patches = specfile.get_applied_patches()

    # TODO(csomh):
    # the bellow is not complete, as there are many more ways to specify
    # patches in spec files. Cover this in the future.
    patch_indices = [p.index for p in applied_patches]
    # comment out all Patch in %package
    specfile.comment_patches(patch_indices)
    # comment out all %patch in %prep
    specfile._process_patches(patch_indices)
    specfile.save()
    repo.git.add(specpath.relative_to(gitdir))
    repo.git.commit(m="Downstream spec with commented patches")

    # Create a tag marking last commit before downstream patches
    logger.info(f"Creating tag {START_TAG}")
    repo.create_tag(START_TAG)

    # Transfer all patches that were in spec into git commits ('git am' or 'git apply')
    for patch in applied_patches:
        message = f"Apply Patch{patch.index}: {patch.get_patch_name()}"
        logger.info(message)
        rel_path = os.path.relpath(patch.path, gitdir)
        try:
            repo.git.am(rel_path)
        except git.exc.CommandError as e:
            logger.debug(str(e))
            repo.git.apply(rel_path, p=patch.strip)
            ctx.invoke(stage, gitdir=gitdir, exclude=DOWNSTREAM_FILES_DIR)
            ctx.invoke(commit, gitdir=gitdir, m=message)
        # The patch is a commit now, so clean it up.
        os.unlink(patch.path)
Exemple #6
0
def test_get_local_specfile_path():
    assert get_local_specfile_path([UP_OSBUILD]) == "osbuild.spec"
    assert not get_local_specfile_path([SYNC_FILES])