def __init__( self, config: ServiceConfig, job_config: JobConfig, event: Union[PullRequestEvent, ReleaseEvent, PushGitHubEvent], ): super().__init__(config=config, job_config=job_config, event=event) if not isinstance(event, (PullRequestEvent, PushGitHubEvent, ReleaseEvent)): raise PackitException( "Unknown event, only " "PullRequestEvent, ReleaseEvent, and PushGitHubEvent " "are accepted.") # lazy property self._koji_build_helper: Optional[KojiBuildJobHelper] = None self._package_config: Optional[PackageConfig] = None self._project: Optional[GitProject] = None
def get_version_from_tag(self, tag: str) -> str: """ Extracts version from git tag using upstream_template_tag :param tag: git tag containing version :return: version string """ field = "version" regex = self._template2regex(self.package_config.upstream_tag_template) p = re.compile(regex) match = p.match(tag) if match and field in match.groupdict(): return match.group(field) else: msg = (f'Unable to extract "{field}" from {tag} using ' f"{self.package_config.upstream_tag_template}") logger.error(msg) raise PackitException(msg)
def convert_version_to_tag(self, version_: str) -> str: """ Converts version to tag using upstream_tag_tepmlate :param version_: version to be converted upstream_template_tag :return: tag """ try: tag = self.package_config.upstream_tag_template.format(version=version_) except KeyError: msg = ( f"Invalid upstream_tag_template: {self.package_config.upstream_tag_template} - " f'"version" placeholder is missing' ) logger.error(msg) raise PackitException(msg) return tag
def git_checkout_block(self, ref: str = None): """Allows temporarily checkout another git-ref.""" current_head = self._get_ref_from_git_repo() if ref: logger.debug( f"Leaving old ref: '{current_head}' and checkout new ref: '{ref}'" ) if ref not in self.git_repo.refs: if not is_a_git_ref(self.git_repo, ref): raise PackitException( f"Git ref '{ref}' not found, cannot checkout.") ref = self.git_repo.commit(ref).hexsha self.git_repo.git.checkout(ref) yield if ref: logger.debug( f"Leaving new ref: '{ref}' and checkout old ref: '{current_head}'" ) self.git_repo.git.checkout(current_head)
def absolute_specfile_path(self) -> Path: if not self._specfile_path: # TODO: we should probably have a "discovery" phase before # creating Upstream, DistGit objects # when things which we don't know are discovered # possible_paths = [ # Path(self.local_project.working_dir) # / f"{self.package_config.downstream_package_name}.spec", # ] # for path in possible_paths: # if path.exists(): # self._specfile_path = str(path) # break self._specfile_path = Path(self.local_project.working_dir).joinpath( self.package_config.specfile_path ) if not self._specfile_path.exists(): raise PackitException(f"Specfile {self._specfile_path} not found.") return self._specfile_path
def set_spec_version(self, version: str, changelog_entry: str): """ Set version in spec and add a changelog_entry. :param version: new version :param changelog_entry: accompanying changelog entry """ try: # also this code adds 3 rpmbuild dirs into the upstream repo, # we should ask rebase-helper not to do that self.specfile.set_version(version=version) self.specfile.changelog_entry = changelog_entry # https://github.com/rebase-helper/rebase-helper/blob/643dab4a864288327289f34e023124d5a499e04b/rebasehelper/application.py#L446-L448 new_log = self.specfile.get_new_log() new_log.extend(self.specfile.spec_content.sections["%changelog"]) self.specfile.spec_content.sections["%changelog"] = new_log self.specfile.save() except RebaseHelperError as ex: logger.error(f"rebase-helper failed to change the spec file: {ex!r}") raise PackitException("rebase-helper didn't do the job")
def specfile_path(self) -> str: if not self._specfile_path: possible_paths = [ Path(self.local_project.working_dir) / self.package_config.specfile_path, Path(self.local_project.working_dir) / f"{self.package_config.downstream_package_name}.spec", ] for path in possible_paths: if path.exists(): self._specfile_path = str(path) break else: raise PackitException( f"Specfile not found." f"Tried: {','.join(str(p) for p in possible_paths)} .") return self._specfile_path
def get_user_config(cls) -> "Config": xdg_config_home = os.getenv("XDG_CONFIG_HOME") if xdg_config_home: directory = Path(xdg_config_home) else: directory = Path.home() / ".config" logger.debug(f"Loading user config from directory: {directory}") loaded_config: dict = {} for config_file_name in CONFIG_FILE_NAMES: config_file_name_full = directory / config_file_name logger.debug(f"Trying to load user config from: {config_file_name_full}") if config_file_name_full.is_file(): try: loaded_config = safe_load(open(config_file_name_full)) except Exception as ex: logger.error(f"Cannot load user config '{config_file_name_full}'.") raise PackitException(f"Cannot load user config: {ex}.") break return Config.get_from_dict(raw_dict=loaded_config)
def sync_files(files_to_sync: List[RawSyncFilesItem], src_working_dir: str, dest_working_dir: str) -> None: """ Sync required files from upstream to downstream. """ logger.debug(f"Copy synced files {files_to_sync}") for fi in files_to_sync: # Check if destination dir exists # If not create the destination dir dest_dir = os.path.join(dest_working_dir, fi.dest) logger.debug(f"Destination {dest_dir}") # Sync all source file src_file = os.path.join(src_working_dir, fi.src) logger.debug(f"Source file {src_file}") if os.path.exists(src_file): logger.info(f"Syncing {src_file}") shutil.copy2(src_file, dest_dir) else: raise PackitException( f"File {src_file} is not present in the upstream repository. ")
def squash_by_patch_name(patch_list: List[PatchMetadata]) -> List[PatchMetadata]: """Squash adjacent patches if they have identical names. Squashing is done by appending the content of a patch to the previous patch if the name of the patch matches the name of the previous patch, and removing the corresponding PatchMetadata object from the list. Note, that renaming the patch files so that their names matches the name specified in the metadata is done in 'rename_patches()'. Args: patch_list: List of PatchMetadata objects, ordered from the oldest to the newest commit. Returns: List of PatchMetadata objects after squashing the patches. Raises: PackitException if non-adjacent patches have the same name. """ logger.debug("Squashing commits by 'patch_name'.") squashed_patch_list: List[PatchMetadata] = [] seen_patch_names = set() for patch in patch_list: if squashed_patch_list and squashed_patch_list[-1].name == patch.name: logger.debug( f"Appending patch {patch!r} to {squashed_patch_list[-1]!r}." ) squashed_patch_list[-1].path.write_text( squashed_patch_list[-1].path.read_text() + patch.path.read_text() ) patch.path.unlink() else: if patch.name in seen_patch_names: raise PackitException( f"Non-adjacent patches cannot have the same name: {patch.name}." ) seen_patch_names.add(patch.name) squashed_patch_list.append(patch) return squashed_patch_list
def _fix_spec_prep(self, version): prep = self.specfile.spec_content.section("%prep") if not prep: logger.warning("this package doesn't have a %prep section") return # stolen from tito, thanks! # https://github.com/dgoodwin/tito/blob/master/src/tito/common.py#L695 regex = re.compile(r"^(\s*%(?:auto)?setup)(.*?)$") for idx, line in enumerate(prep): m = regex.match(line) if m: break else: logger.error( "this package is not using %(auto)setup macro in prep, " "packit can't work in this environment" ) return new_setup_line = m[1] # replace -n with our -n because it's better args_match = re.search(r"(.*?)\s+-n\s+\S+(.*)", m[2]) if args_match: new_setup_line += args_match.group(1) new_setup_line += args_match.group(2) else: new_setup_line += m[2] if not self.package_config.upstream_package_name: raise PackitException( f'"upstream_package_name" is not set: unable to fix the spec file; please set it.' ) new_setup_line += f" -n {self.package_config.upstream_package_name}-{version}" logger.debug( f"new {'%autosetup' if 'autosetup' in new_setup_line else '%setup'}" f" line:\n{new_setup_line}" ) prep[idx] = new_setup_line self.specfile.spec_content.replace_section("%prep", prep) self.specfile.write_spec_content()
def get_commit_messages( self, after: Optional[str] = None, before: str = "HEAD" ) -> str: """ :param after: get commit messages after this revision, if None, all commit messages before 'before' will be returned :param before: get commit messages before this revision :return: commit messages """ # let's print changes b/w the last 2 revisions; # ambiguous argument '0.1.0..HEAD': unknown revision or path not in the working tree. # Use '--' to separate paths from revisions, like this commits_range = f"{after}..{before}" if after else before if not before: raise PackitException( "Unable to get a list of commit messages in range " f"{commits_range} because the upper bound is not " f"defined ({before!r})." ) cmd = [ "git", "log", "--no-merges", "--pretty=format:- %s (%an)", commits_range, "--", ] try: return run_command( cmd, output=True, cwd=self.local_project.working_dir ).stdout.strip() except PackitCommandFailedError as ex: logger.error(f"We couldn't get commit messages for %changelog\n{ex}") logger.info(f"Does the git ref {after} exist in the git repo?") logger.info( "If the ref is a git tag, " 'you should consider setting "upstream_tag_template":\n ' "https://packit.dev/docs/configuration/#upstream_tag_template" ) raise
def create_archive(self, version: str = None) -> str: """ Create archive, using `git archive` by default, from the content of the upstream repository, only committed changes are present in the archive """ if self.has_action(action=ActionName.create_archive): return self.get_output_from_action( action=ActionName.create_archive) version = version or self.get_current_version() if self.package_config.upstream_project_name: dir_name = f"{self.package_config.upstream_project_name}" f"-{version}" else: dir_name = f"{self.package_config.downstream_package_name}-{version}" logger.debug("name + version = %s", dir_name) archive_extension = self.get_archive_extension(dir_name, version) if archive_extension not in COMMON_ARCHIVE_EXTENSIONS: raise PackitException( "The target archive doesn't use a common extension ({}), " "git archive can't be used. Please provide your own script " "for archive creation.".format( ", ".join(COMMON_ARCHIVE_EXTENSIONS))) archive_name = f"{dir_name}{archive_extension}" if self.package_config.create_tarball_command: archive_cmd = self.package_config.create_tarball_command else: archive_cmd = [ "git", "archive", "-o", archive_name, "--prefix", f"{dir_name}/", "HEAD", ] self.command_handler.run_command(archive_cmd, return_output=True) return archive_name
def upstream_ref(self) -> str: if self._upstream_ref is None: self._upstream_ref = get_tarball_comment(str(self.primary_archive)) if self._upstream_ref: logger.info( "upstream base ref was not set, " f"discovered it from the archive: {self._upstream_ref}") else: # fallback to HEAD try: self._upstream_ref = self.local_project.commit_hexsha except ValueError as ex: raise PackitException( "Current branch seems to be empty - we cannot get the hash of " "the top commit. We need to set upstream_ref in packit.yaml to " "distinct between upstream and downstream changes. " "Please set --upstream-ref or pull the upstream git history yourself. " f"Error: {ex}") logger.info( "upstream base ref was not set, " f"falling back to the HEAD commit: {self._upstream_ref}") return self._upstream_ref
def koji_build( self, scratch: bool = False, nowait: bool = False, koji_target: Optional[str] = None, srpm_path: Optional[Path] = None, ): """ Perform a `koji build` in the repository :param scratch: should the build be a scratch build? :param nowait: don't wait on build? :param koji_target: koji target to pick (see `koji list-targets`) :param srpm_path: use selected SRPM for build, not dist-git repo & ref """ if not koji_target: raise PackitException( "koji target needs to be set when building directly from upstream" ) # we can't use fedpkg b/c upstream repo is not dist-git cmd = ["koji", "build"] if scratch: cmd.append("--scratch") if nowait: cmd.append("--nowait") cmd += [koji_target, str(srpm_path)] logger.info("Starting a koji build.") if not nowait: logger.info( "We will be actively waiting for the build to finish, it may take some time." ) out = utils.run_command_remote( cmd, cwd=self.local_project.working_dir, output=True, decode=True, print_live=True, ) return out
def __init__( self, config: ServiceConfig, job: JobConfig, event: Union[PullRequestEvent, ReleaseEvent], ): super().__init__(config=config, job=job, event=event) if isinstance(event, PullRequestEvent): base_ref = event.base_ref pr_id = event.pr_id elif isinstance(event, ReleaseEvent): base_ref = event.tag_name pr_id = None else: raise PackitException( "Unknown event, only PREvent and ReleaseEvent are accepted.") self.project: GitProject = event.get_project() self.package_config: PackageConfig = self.get_package_config_from_repo( self.project, base_ref, pr_id) self.package_config.upstream_project_url = event.project_url
def run_command(cmd, error_message=None, cwd=None, fail=True, output=False): logger.debug("cmd = %s", cmd) if not isinstance(cmd, list): logger.debug("cmd = '%s'", " ".join(cmd)) cmd = shlex.split(cmd) cwd = cwd or str(Path.cwd()) error_message = error_message or cmd[0] shell = subprocess.run( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=cwd, universal_newlines=True, ) if not output: # output is returned, let the caller process it logger.debug("%s", shell.stdout) stderr = shell.stderr.strip() if stderr: logger.error("%s", shell.stderr) if shell.returncode != 0: logger.error("Command %s failed", shell.args) logger.error("%s", error_message) if fail: raise PackitException( f"Command {shell.args!r} failed: {error_message}") success = False else: success = True if not output: return success return shell.stdout
def get_latest_build_in_tag(self, downstream_package_name, dist_git_branch): """Query Koji for the latest build of a package in a tag. Args: downstream_package_name (str): package name used for the Koji build dist_git_branch (str): dist-git branch where to look for the build Returns The latest known build """ logger.debug("Querying Koji for the latest build " f"of package {downstream_package_name!r} " f"in dist-git-branch {dist_git_branch!r}") # EPEL uses "testing-candidate" instead of "updates-candidate" prefix = "testing" if dist_git_branch.startswith("epel") else "updates" koji_tag = f"{dist_git_branch}-{prefix}-candidate" session = ClientSession(baseurl=KOJI_BASEURL) koji_build = session.listTagged( tag=koji_tag, package=downstream_package_name, inherit=True, latest=True, strict=False, ) if not koji_build: raise PackitException( f"There is no build for {downstream_package_name!r} " f"and koji tag {koji_tag}") else: koji_build_str = koji_build[0]["nvr"] logger.info("Koji build for package " f"{downstream_package_name!r} and koji tag {koji_tag}:" f"\n{koji_build_str}") return koji_build_str
def set_spec_version(self, version: str = None, release: str = None, changelog_entry: str = None): """ Set version in spec, release and add a changelog_entry (if they are presented). :param version: new version :param release: new release :param changelog_entry: accompanying changelog entry """ try: if version: # also this code adds 3 rpmbuild dirs into the upstream repo, # we should ask rebase-helper not to do that # using set_tag instead of set_version to turn off preserving macros self.set_tag("Version", version, preserve_macros=False) if release: # using set_tag instead of set_release to turn off preserving macros self.set_tag("Release", "{}%{{?dist}}".format(release), preserve_macros=False) if not changelog_entry: return if not self.spec_content.section("%changelog"): logger.debug( "The specfile doesn't have any %changelog, will not set it." ) return self.update_changelog_in_spec(changelog_entry) except RebaseHelperError as ex: logger.error(f"Rebase-helper failed to change the spec file: {ex}") raise PackitException("Rebase-helper didn't do the job.")
def commit(self, title: str, msg: str, prefix: str = "[packit] ") -> None: """ Perform `git add -A` and `git commit` """ logger.debug("About to add all & commit") main_msg = f"{prefix}{title}" if not self.local_project.git_repo.is_dirty(): raise PackitException( "No changes are present in the dist-git repo: nothing to commit." ) self.local_project.git_repo.git.add("-A") self.local_project.git_repo.index.write() commit_args = ["-s", "-m", main_msg] if msg: commit_args += ["-m", msg] # TODO: attach git note to every commit created # TODO: implement cleaning policy: once the PR is closed (merged/refused), remove the branch # make this configurable so that people know this would happen, don't clean by default # we should likely clean only merged PRs by default # TODO: implement signing properly: we need to create a cert for the bot, # distribute it to the container, prepare git config and then we can start signing # TODO: make -s configurable self.local_project.git_repo.git.commit(*commit_args)
def generate(path_or_url, force): """ Generate new packit config. """ working_dir = Path(path_or_url.working_dir) config_path = get_existing_config(working_dir) if config_path: if not force: raise PackitException( f"Packit config {config_path} already exists." " If you want to regenerate it use `packit generate --force`") else: # Use default name config_path = working_dir / ".packit.yaml" template_data = { "upstream_project_name": path_or_url.repo_name, "downstream_package_name": path_or_url.repo_name, } generate_config(config_file=config_path, write_to_file=True, template_data=template_data)
def __init__( self, config: ServiceConfig, job: JobConfig, event: Union[PullRequestEvent, ReleaseEvent], ): super().__init__(config=config, job=job, event=event) if isinstance(event, PullRequestEvent): base_ref = event.base_ref elif isinstance(event, ReleaseEvent): base_ref = event.tag_name else: raise PackitException( "Unknown event, only PREvent and ReleaseEvent are accepted.") self.project: GitProject = event.get_project() self.package_config: PackageConfig = get_package_config_from_repo( self.project, base_ref) if not self.package_config: raise ValueError( f"No config file found in {self.project.full_repo_name}") self.package_config.upstream_project_url = event.project_url
def _run_prep(self): """ run `rpmbuild -bp` in the dist-git repo to get a git-repo in the %prep phase so we can pick the commits in the source-git repo """ _packitpatch_path = shutil.which("_packitpatch") if not _packitpatch_path: raise PackitException( "We are trying to unpack a dist-git archive and lay patches on top " 'by running `rpmbuild -bp` but we cannot find "_packitpatch" command on PATH: ' "please install packit as an RPM." ) logger.info( f"expanding %prep section in {self.dist_git.local_project.working_dir}" ) rpmbuild_args = [ "rpmbuild", "--nodeps", "--define", f"_topdir {str(self.dist_git.local_project.working_dir)}", "-bp", "--define", f"_specdir {str(self.dist_git.absolute_specfile_dir)}", "--define", f"_sourcedir {str(self.dist_git.absolute_source_dir)}", ] rpmbuild_args += RPM_MACROS_FOR_PREP if logger.level <= logging.DEBUG: # -vv can be super-duper verbose rpmbuild_args.append("-v") rpmbuild_args.append(str(self.dist_git.absolute_specfile_path)) run_command( rpmbuild_args, cwd=self.dist_git.local_project.working_dir, print_live=True, )
def add_patches(self, patch_list: List[Tuple[Path, str]]) -> None: """ Add given patches to the specfile. :param patch_list: [(patch_name, msg)] """ if not patch_list: return logger.debug(f"About to add patches {patch_list} to specfile.") if [t.name for t in self.tags.filter(name="Patch*")]: raise PackitException( "This specfile already contains patches, please remove them.") new_content = "\n# PATCHES FROM SOURCE GIT:\n" for i, (patch, msg) in enumerate(patch_list): new_content += "\n# " + "\n# ".join(msg.split("\n")) new_content += f"\nPatch{(i + 1):04d}: {patch.name}\n" # valid=None: take any SourceX even if it's disabled last_source_tag_line = [ t.line for t in self.tags.filter(name="Source*", valid=None) ][-1] # find the first empty line after last_source_tag for i, line in enumerate( self.spec_content.section("%package")[last_source_tag_line:]): if line.strip() == "": break else: logger.error("Can't find where to add patches.") return where = last_source_tag_line + i # insert new content below last Source self.spec_content.section("%package")[where:where] = new_content.split( "\n") logger.info(f"{len(patch_list)} patches added to {self.path!r}.")
def __init__( self, config: ServiceConfig, job_config: JobConfig, event: Union[CoprBuildEvent, PullRequestCommentEvent], chroot: str, ): super().__init__(config=config, job_config=job_config, event=event) self.chroot = chroot self.project: GitProject = event.get_project() if isinstance(event, CoprBuildEvent): self.base_ref = event.git_ref pr_id = None elif isinstance(event, PullRequestCommentEvent): self.base_ref = event.commit_sha pr_id = event.pr_id else: raise PackitException( "Unknown event, only PREvent and CoprBuildEvent are accepted.") self.package_config: PackageConfig = self.get_package_config_from_repo( self.project, self.base_ref, pr_id) self.package_config.upstream_project_url = event.project_url self.testing_farm_helper = TestingFarmJobHelper( self.config, self.package_config, self.project, self.event)
def push(self, refspec: str, remote_name: str = "origin", force: bool = False): """ push selected refspec to a git remote """ logger.info(f"pushing changes to remote {remote_name} using refspec {refspec}") push_infos_list: Iterable[PushInfo] = self.local_project.push( refspec, remote_name=remote_name, force=force ) for pi in push_infos_list: logger.info(f"push summary: {pi.summary}") push_failed = [ bool(x & pi.flags) for x in ( PushInfo.ERROR, PushInfo.REMOTE_FAILURE, PushInfo.REMOTE_REJECTED, PushInfo.NO_MATCH, # this looks like it's not used in gitpython PushInfo.REJECTED, PushInfo.UP_TO_DATE, # is this an error? ) ] if any(push_failed): logger.debug(f"push_info flags: {pi.flags}") raise PackitException( f"We were unable to push to dist-git: {pi.summary}." )
def _fix_spec_source(self, archive): prefix = "Source" regex = re.compile(r"^Source\s*:.+$") for line in self.specfile.spec_content.section("%package"): # we are looking for Source lines if line.startswith(prefix): # it's a Source line! if line.startswith(self.package_config.spec_source_id): # it even matches the specific Source\d+ full_name = self.package_config.spec_source_id elif regex.match(line): # okay, let's try the other very common default # https://github.com/packit-service/packit/issues/536#issuecomment-534074925 full_name = prefix else: # nope, let's continue the search continue # we found it! break else: raise PackitException( "The spec file doesn't have sources set " f"via {self.package_config.spec_source_id} nor {prefix}.") self.specfile.set_tag(full_name, archive)
def sync_release( self, dist_git_branch: str, use_local_content=False, version: str = None, force_new_sources=False, upstream_ref: str = None, create_pr: bool = True, force: bool = False, ) -> Optional[PullRequest]: """ Update given package in Fedora :param dist_git_branch: branch in dist-git :param use_local_content: don't check out anything :param version: upstream version to update in Fedora :param force_new_sources: don't check the lookaside cache and perform new-sources :param upstream_ref: for a source-git repo, use this ref as the latest upstream commit :param create_pr: create a pull request if set to True :param force: ignore changes in the git index :return created PullRequest if create_pr is True, else None """ assert_existence(self.up.local_project) assert_existence(self.dg.local_project) if self.dg.is_dirty(): raise PackitException( f"The distgit repository {self.dg.local_project.working_dir} is dirty." f"This is not supported.") if not force and self.up.is_dirty() and not use_local_content: raise PackitException( "The repository is dirty, will not discard the changes. Use --force to bypass." ) # do not add anything between distgit clone and saving gpg keys! self.up.allowed_gpg_keys = self.dg.get_allowed_gpg_keys_from_downstream_config( ) upstream_ref = upstream_ref or self.package_config.upstream_ref create_pr = create_pr and self.package_config.create_pr self.up.run_action(actions=ActionName.post_upstream_clone) full_version = version or self.up.get_version() if not full_version: raise PackitException( "Could not figure out version of latest upstream release.") current_up_branch = self.up.active_branch try: upstream_tag = self.up.package_config.upstream_tag_template.format( version=full_version) if not use_local_content: self.up.local_project.checkout_release(upstream_tag) self.dg.check_last_commit() self.up.run_action(actions=ActionName.pre_sync) self.dg.create_branch( dist_git_branch, base=f"remotes/origin/{dist_git_branch}", setup_tracking=True, ) # fetch and reset --hard upstream/$branch? logger.info(f"Using {dist_git_branch!r} dist-git branch.") self.dg.update_branch(dist_git_branch) self.dg.checkout_branch(dist_git_branch) if create_pr: local_pr_branch = f"{full_version}-{dist_git_branch}-update" self.dg.create_branch(local_pr_branch) self.dg.checkout_branch(local_pr_branch) description = ( f"Upstream tag: {upstream_tag}\n" f"Upstream commit: {self.up.local_project.commit_hexsha}\n") path = os.path.join(self.dg.local_project.working_dir, "README.packit") logger.debug(f"Path of README: {path}") with open(path, "w") as f: f.write( SYNCING_NOTE.format(packit_version=get_packit_version())) files_to_sync = self.package_config.get_all_files_to_sync() if self.up.with_action(action=ActionName.prepare_files): comment = f"- new upstream release: {full_version}" try: self.dg.set_specfile_content(self.up.specfile, full_version, comment) except FileNotFoundError as ex: # no downstream spec file: this is either a mistake or # there is no spec file in dist-git yet, hence warning logger.warning( f"There is not spec file downstream: {ex}, copying the one from upstream." ) shutil.copy2( self.up.absolute_specfile_path, self.dg.get_absolute_specfile_path(), ) raw_sync_files = files_to_sync.get_raw_files_to_sync( Path(self.up.local_project.working_dir), Path(self.dg.local_project.working_dir), ) # exclude spec, we have special plans for it raw_sync_files = [ x for x in raw_sync_files if x.src != self.up.absolute_specfile_path ] sync_files(raw_sync_files) if upstream_ref: if self.up.with_action(action=ActionName.create_patches): patches = self.up.create_patches( upstream=upstream_ref, destination=str(self.dg.absolute_specfile_dir), ) self.dg.specfile_add_patches(patches) self._handle_sources(add_new_sources=True, force_new_sources=force_new_sources) # when the action is defined, we still need to copy the files if self.up.has_action(action=ActionName.prepare_files): raw_sync_files = files_to_sync.get_raw_files_to_sync( Path(self.up.local_project.working_dir), Path(self.dg.local_project.working_dir), ) sync_files(raw_sync_files) self.dg.commit(title=f"{full_version} upstream release", msg=description) new_pr = None if create_pr: new_pr = self.push_and_create_pr( pr_title=f"Update to upstream release {full_version}", pr_description=description, dist_git_branch=dist_git_branch, ) else: self.dg.push(refspec=f"HEAD:{dist_git_branch}") finally: if not use_local_content: self.up.local_project.git_repo.git.checkout(current_up_branch) self.dg.refresh_specfile() return new_pr
def create_bodhi_update( self, dist_git_branch: str, update_type: str, update_notes: str, koji_builds: Optional[Sequence[str]] = None, bugzilla_ids: Optional[List[int]] = None, ): logger.debug( f"About to create a Bodhi update of type {update_type!r} from {dist_git_branch!r}" ) bodhi_client = get_bodhi_client( fas_username=self.config.fas_user, fas_password=self.config.fas_password, kerberos_realm=self.config.kerberos_realm, ) # make sure we have the credentials bodhi_client.ensure_auth() if not koji_builds: koji_builds = [ self.get_latest_build_in_tag( self.package_config.downstream_package_name, dist_git_branch=dist_git_branch, ) ] # I was thinking of verifying that the build is valid for a new bodhi update # but in the end it's likely a waste of resources since bodhi will tell us rendered_note = update_notes.format( version=self.specfile.expanded_version) try: save_kwargs = { "builds": koji_builds, "notes": rendered_note, "type": update_type, } if bugzilla_ids: save_kwargs["bugs"] = list(map(str, bugzilla_ids)) try: result = bodhi_client.save(**save_kwargs) except BodhiClientException as ex: if "Unauthorized: new_update__POST failed permission check" in str( ex): raise PackitException( "You are using Bodhi 6 client. There is an issue with creating " "updates using this version: " "https://github.com/fedora-infra/bodhi/issues/4660") logger.debug(f"Bodhi client raised a login error: {ex}. " f"Let's clear the session, csrf token and retry.") bodhi_client.ensure_auth() result = bodhi_client.save(**save_kwargs) logger.debug(f"Bodhi response:\n{result}") logger.info(f"Bodhi update {result['alias']}:\n" f"- {result['url']}\n" f"- stable_karma: {result['stable_karma']}\n" f"- unstable_karma: {result['unstable_karma']}\n" f"- notes:\n{result['notes']}\n") if "caveats" in result: for cav in result["caveats"]: logger.info(f"- {cav['name']}: {cav['description']}\n") except AuthError as ex: logger.error(ex) raise PackitException( f"There is an authentication problem with Bodhi:\n{ex}" ) from ex except BodhiClientException as ex: logger.error(ex) raise PackitException( f"There is a problem with creating the bodhi update:\n{ex}" ) from ex return result["alias"]
def sync_from_downstream( self, dist_git_branch: str, upstream_branch: str, no_pr: bool = False, fork: bool = True, remote_name: str = None, exclude_files: Iterable[str] = None, force: bool = False, ): """ Sync content of Fedora dist-git repo back to upstream :param exclude_files: files that will be excluded from the sync :param dist_git_branch: branch in dist-git :param upstream_branch: upstream branch :param no_pr: won't create a pull request if set to True :param fork: forks the project if set to True :param remote_name: name of remote where we should push; if None, try to find a ssh_url :param force: ignore changes in the git index """ exclude_files = exclude_files or [] if not dist_git_branch: raise PackitException("Dist-git branch is not set.") if not upstream_branch: raise PackitException("Upstream branch is not set.") logger.info(f"Upstream active branch: {self.up.active_branch}") if not force and self.up.is_dirty(): raise PackitException( "The repository is dirty, will not discard the changes. Use --force to bypass." ) self.dg.update_branch(dist_git_branch) self.dg.checkout_branch(dist_git_branch) logger.info(f"Using {dist_git_branch!r} dist-git branch.") if no_pr: self.up.checkout_branch(upstream_branch) else: local_pr_branch = f"{dist_git_branch}-downstream-sync" self.up.create_branch(local_pr_branch) self.up.checkout_branch(local_pr_branch) raw_sync_files = self.package_config.synced_files.get_raw_files_to_sync( dest_dir=Path(self.dg.local_project.working_dir), src_dir=Path(self.up.local_project.working_dir), ) reverse_raw_sync_files = [ raw_file.reversed() for raw_file in raw_sync_files if Path(raw_file.dest).name not in exclude_files ] sync_files(reverse_raw_sync_files, fail_on_missing=False) if not no_pr: description = f"Downstream commit: {self.dg.local_project.commit_hexsha}\n" commit_msg = f"Sync from downstream branch {dist_git_branch!r}" pr_title = f"Update from downstream branch {dist_git_branch!r}" self.up.commit(title=commit_msg, msg=description) # the branch may already be up, let's push forcefully source_branch, fork_username = self.up.push_to_fork( self.up.local_project.ref, fork=fork, force=True, remote_name=remote_name, ) self.up.create_pull( pr_title, description, source_branch=source_branch, target_branch=upstream_branch, fork_username=fork_username, )