def get_job_input_from_dist_git_commit( self, event: dict ) -> Optional[Tuple[JobTriggerType, PackageConfig, GitProject]]: """ this corresponds to dist-git event when someone pushes new commits """ topic = nested_get(event, "topic") logger.debug(f"topic = {topic}") if topic == NewDistGitCommit.topic: repo_namespace = nested_get(event, "msg", "commit", "namespace") repo_name = nested_get(event, "msg", "commit", "repo") ref = nested_get(event, "msg", "commit", "branch") if not (repo_namespace and repo_name): logger.warning( "We could not figure out the full name of the repository.") return None if not ref: logger.warning("Target branch for the new commits is not set.") return None logger.info( f"New commits added to dist-git repo {repo_namespace}/{repo_name}, branch {ref}." ) msg_id = nested_get(event, "msg_id") logger.info(f"msg_id = {msg_id}") dg_proj = self.pagure_service.get_project(repo=repo_name, namespace=repo_namespace) package_config = get_package_config_from_repo(dg_proj, ref) return JobTriggerType.commit, package_config, dg_proj return None
def parse_koji_task_event(event) -> Optional[KojiTaskEvent]: if event.get("topic" ) != "org.fedoraproject.prod.buildsys.task.state.change": return None build_id = event.get("id") logger.info(f"Koji event: build_id={build_id}") state = nested_get(event, "info", "state") if not state: logger.debug("Cannot find build state.") return None state_enum = KojiTaskState( event.get("new")) if "new" in event else None old_state = KojiTaskState(event.get("old")) if "old" in event else None start_time = nested_get(event, "info", "start_time") completion_time = nested_get(event, "info", "completion_time") rpm_build_task_id = None for children in nested_get(event, "info", "children", default=[]): if children.get("method") == "buildArch": rpm_build_task_id = children.get("id") break return KojiTaskEvent( build_id=build_id, state=state_enum, old_state=old_state, start_time=start_time, completion_time=completion_time, rpm_build_task_id=rpm_build_task_id, )
def get_job_input_from_github_release( self, event: dict ) -> Optional[Tuple[JobTriggerType, PackageConfig, GitProject]]: """ look into the provided event and see if it's one for a published github release; if it is, process it and return input for the job handler """ action = nested_get(event, "action") logger.debug(f"action = {action}") release = nested_get(event, "release") if action == "published" and release: repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") if not (repo_namespace and repo_name): logger.warning( "We could not figure out the full name of the repository.") return None release_ref = nested_get(event, "release", "tag_name") if not release_ref: logger.warning("Release tag name is not set.") return None logger.info( f"New release event {release_ref} for repo {repo_namespace}/{repo_name}." ) gh_proj = get_github_project(self.config, repo=repo_name, namespace=repo_namespace) package_config = get_package_config_from_repo(gh_proj, release_ref) https_url = event["repository"]["html_url"] package_config.upstream_project_url = https_url return JobTriggerType.release, package_config, gh_proj return None
def get_package_config_from_github_release( self, event: dict ) -> Optional[Tuple[JobTriggerType, PackageConfig, GitProject]]: """ look into the provided event and see if it's one for a published github release """ action = nested_get(event, "action") logger.debug(f"action = {action}") release = nested_get(event, "release") if action == "published" and release: repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") if not (repo_namespace and repo_name): logger.warning( "We could not figure out the full name of the repository.") return None release_ref = nested_get(event, "release", "tag_name") if not release_ref: logger.warning("Release tag name is not set.") return None logger.info( f"New release event {release_ref} for repo {repo_namespace}/{repo_name}." ) gh_proj = GithubProject(repo=repo_name, namespace=repo_namespace, service=self.github_service) package_config = get_packit_config_from_repo(gh_proj, release_ref) return JobTriggerType.release, package_config, gh_proj return None
def parse_release_event(event) -> Optional[ReleaseEvent]: """ look into the provided event and see if it's one for a published github release; if it is, process it and return input for the job handler """ action = event.get("action") release = event.get("release") if action == "published" and release: logger.info(f"GitHub release {release} event, action = {action}.") repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None release_ref = nested_get(event, "release", "tag_name") if not release_ref: logger.warning("Release tag name is not set.") return None logger.info( f"New release event {release_ref} for repo {repo_namespace}/{repo_name}." ) https_url = event["repository"]["html_url"] return ReleaseEvent(repo_namespace, repo_name, release_ref, https_url) return None
def parse_distgit_event(event) -> Optional[DistGitEvent]: """ this corresponds to dist-git event when someone pushes new commits """ topic = event.get("topic") if topic == NewDistGitCommit.topic: logger.info(f"Dist-git commit event, topic: {topic}") repo_namespace = nested_get(event, "msg", "commit", "namespace") repo_name = nested_get(event, "msg", "commit", "repo") ref = nested_get(event, "msg", "commit", "branch") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None if not ref: logger.warning("Target branch for the new commits is not set.") return None logger.info( f"New commits added to dist-git repo {repo_namespace}/{repo_name}, branch {ref}." ) msg_id = event.get("msg_id") logger.info(f"msg_id = {msg_id}") branch = nested_get(event, "msg", "commit", "branch") return DistGitEvent(topic, repo_namespace, repo_name, ref, branch, msg_id) return None
def parse_release_event(event) -> Optional[ReleaseEvent]: """ https://developer.github.com/v3/activity/events/types/#releaseevent https://developer.github.com/v3/repos/releases/#get-a-single-release look into the provided event and see if it's one for a published github release; if it is, process it and return input for the job handler """ action = event.get("action") release = event.get("release") if action != "published" or not release: return None logger.info(f"GitHub release {release} {action!r} event.") repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None release_ref = nested_get(event, "release", "tag_name") if not release_ref: logger.warning("Release tag name is not set.") return None logger.info( f"New release event {release_ref!r} for repo {repo_namespace}/{repo_name}." ) https_url = event["repository"]["html_url"] return ReleaseEvent(repo_namespace, repo_name, release_ref, https_url)
def parse_gitlab_push_event(event) -> Optional[PushGitlabEvent]: """ Look into the provided event and see if it's one for a new push to the gitlab branch. https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#push-events """ if event.get("object_kind") != "push": return None raw_ref = event.get("ref") before = event.get("before") pusher = event.get("user_username") commits = event.get("commits") if not (raw_ref and commits and before and pusher): return None number_of_commits = event.get("total_commits_count") if not number_of_commits: logger.warning("No number of commits info from event.") raw_ref = raw_ref.split("/", maxsplit=2) if not raw_ref: logger.warning("No ref info from event.") ref = raw_ref[-1] head_commit = commits[-1]["id"] if not raw_ref: logger.warning("No commit_id info from event.") logger.info( f"Gitlab push event on '{raw_ref}': {before[:8]} -> {head_commit[:8]} " f"by {pusher} " f"({number_of_commits} {'commit' if number_of_commits == 1 else 'commits'})" ) repo_path_with_namespace = nested_get(event, "project", "path_with_namespace") if not repo_path_with_namespace: logger.warning("No full name of the repository.") return None repo_namespace, repo_name = repo_path_with_namespace.split("/") repo_url = nested_get(event, "project", "web_url") if not repo_url: logger.warning("No repo url info from event.") return PushGitlabEvent( repo_namespace=repo_namespace, repo_name=repo_name, git_ref=ref, project_url=repo_url, commit_sha=head_commit, )
def parse_push_pagure_event(event) -> Optional[PushPagureEvent]: """this corresponds to dist-git event when someone pushes new commits""" topic = event.get("topic") if topic != "org.fedoraproject.prod.git.receive": return None logger.info(f"Dist-git commit event, topic: {topic}") dg_repo_namespace = nested_get(event, "commit", "namespace") dg_repo_name = nested_get(event, "commit", "repo") if not (dg_repo_namespace and dg_repo_name): logger.warning("No full name of the repository.") return None dg_branch = nested_get(event, "commit", "branch") dg_commit = nested_get(event, "commit", "rev") if not (dg_branch and dg_commit): logger.warning("Target branch/rev for the new commits is not set.") return None logger.info( f"New commits added to dist-git repo {dg_repo_namespace}/{dg_repo_name}," f"rev: {dg_commit}, branch: {dg_branch}") dg_base_url = getenv("DISTGIT_URL", PROD_DISTGIT_URL) dg_project_url = f"{dg_base_url}{dg_repo_namespace}/{dg_repo_name}" return PushPagureEvent( repo_namespace=dg_repo_namespace, repo_name=dg_repo_name, git_ref=dg_branch, project_url=dg_project_url, commit_sha=dg_commit, )
def parse_github_push_event(event) -> Optional[PushGitHubEvent]: """ Look into the provided event and see if it's one for a new push to the github branch. """ raw_ref = event.get("ref") before = event.get("before") pusher = nested_get(event, "pusher", "name") # https://developer.github.com/v3/activity/events/types/#pushevent # > Note: The webhook payload example following the table differs # > significantly from the Events API payload described in the table. head_commit = ( event.get("head") or event.get("after") or event.get("head_commit") ) if not (raw_ref and head_commit and before and pusher): return None elif event.get("deleted"): logger.info( f"GitHub push event on '{raw_ref}' by {pusher} to delete branch" ) return None number_of_commits = event.get("size") if number_of_commits is None and "commits" in event: number_of_commits = len(event.get("commits")) ref = raw_ref.split("/", maxsplit=2)[-1] logger.info( f"GitHub push event on '{raw_ref}': {before[:8]} -> {head_commit[:8]} " f"by {pusher} " f"({number_of_commits} {'commit' if number_of_commits == 1 else 'commits'})" ) repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None repo_url = nested_get(event, "repository", "html_url") return PushGitHubEvent( repo_namespace=repo_namespace, repo_name=repo_name, git_ref=ref, project_url=repo_url, commit_sha=head_commit, )
def parse_pull_request_comment_event( event, ) -> Optional[PullRequestCommentGithubEvent]: """Look into the provided event and see if it is Github PR comment event.""" if not nested_get(event, "issue", "pull_request"): return None pr_id = nested_get(event, "issue", "number") action = event.get("action") if action not in {"created", "edited"} or not pr_id: return None comment = nested_get(event, "comment", "body") comment_id = nested_get(event, "comment", "id") logger.info( f"Github PR#{pr_id} comment: {comment!r} id#{comment_id} {action!r} event." ) base_repo_namespace = nested_get(event, "issue", "user", "login") base_repo_name = nested_get(event, "repository", "name") if not (base_repo_name and base_repo_namespace): logger.warning("No full name of the repository.") return None user_login = nested_get(event, "comment", "user", "login") if not user_login: logger.warning("No GitHub login name from event.") return None if user_login in { "packit-as-a-service[bot]", "packit-as-a-service-stg[bot]" }: logger.debug("Our own comment.") return None target_repo_namespace = nested_get(event, "repository", "owner", "login") target_repo_name = nested_get(event, "repository", "name") logger.info( f"Target repo: {target_repo_namespace}/{target_repo_name}.") https_url = event["repository"]["html_url"] return PullRequestCommentGithubEvent( action=PullRequestCommentAction[action], pr_id=pr_id, base_repo_namespace=base_repo_namespace, base_repo_name=None, base_ref=None, # the payload does not include this info target_repo_namespace=target_repo_namespace, target_repo_name=target_repo_name, project_url=https_url, actor=user_login, comment=comment, comment_id=comment_id, )
def run(self): # rev is a commit # we use branch on purpose so we get the latest thing # TODO: check if rev is HEAD on {branch}, warn then? branch = nested_get(self.event, "msg", "commit", "branch") # self.project is dist-git, we need to get upstream dg = DistGit(self.config, self.package_config) self.package_config.upstream_project_url = ( dg.get_project_url_from_distgit_spec()) if not self.package_config.upstream_project_url: raise PackitException( "URL in specfile is not set. We don't know where the upstream project lives." ) n, r = get_namespace_and_repo_name( self.package_config.upstream_project_url) up = self.upstream_service.get_project(repo=r, namespace=n) lp = LocalProject(git_project=up) api = PackitAPI(self.config, self.package_config, lp) api.sync_from_downstream( dist_git_branch=branch, upstream_branch="master", # TODO: this should be configurable )
def parse_installation_event(event) -> Optional[InstallationEvent]: """ Look into the provided event and see Github App installation details. """ # Check if installation key in JSON isn't enough, we have to check the account as well if not nested_get(event, "installation", "account"): return None action = event.get("action") # created or deleted installation_id = event["installation"]["id"] logger.info( f"Github App installation event. Action: {action}, " f"id: {installation_id}, account: {event['installation']['account']}, " f"sender: {event['sender']}") account_login = event["installation"]["account"]["login"] account_id = event["installation"]["account"]["id"] account_url = event["installation"]["account"]["url"] account_type = event["installation"]["account"][ "type"] # User or Organization created_at = event["installation"]["created_at"] sender_id = event["sender"]["id"] sender_login = event["sender"]["login"] return InstallationEvent( installation_id, account_login, account_id, account_url, account_type, created_at, sender_id, sender_login, )
def run(self) -> HandlerResults: # rev is a commit # we use branch on purpose so we get the latest thing # TODO: check if rev is HEAD on {branch}, warn then? branch = nested_get(self.event, "msg", "commit", "branch") # self.project is dist-git, we need to get upstream dg = DistGit(self.config, self.package_config) self.package_config.upstream_project_url = ( dg.get_project_url_from_distgit_spec() ) if not self.package_config.upstream_project_url: return HandlerResults( success=False, details={ "msg": "URL in specfile is not set. " "We don't know where the upstream project lives." }, ) n, r = get_namespace_and_repo_name(self.package_config.upstream_project_url) up = self.upstream_service.get_project(repo=r, namespace=n) self.local_project = LocalProject( git_project=up, working_dir=self.config.command_handler_work_dir ) self.api = PackitAPI(self.config, self.package_config, self.local_project) self.api.sync_from_downstream( dist_git_branch=branch, upstream_branch="master", # TODO: this should be configurable ) return HandlerResults(success=True, details={})
def handle_pull_request(self): if not self.job.metadata.get("targets"): logger.error( "'targets' value is required in packit config for copr_build job" ) pr_id_int = nested_get(self.event, "number") pr_id = str(pr_id_int) local_project = LocalProject(git_project=self.project, pr_id=pr_id, git_service=self.project.service) api = PackitAPI(self.config, self.package_config, local_project) default_project_name = f"{self.project.namespace}-{self.project.repo}-{pr_id}" owner = self.job.metadata.get("owner") or "packit" project = self.job.metadata.get("project") or default_project_name commit_sha = nested_get(self.event, "pull_request", "head", "sha") r = BuildStatusReporter(self.project, commit_sha) try: build_id, repo_url = api.run_copr_build( owner=owner, project=project, chroots=self.job.metadata.get("targets")) except FailedCreateSRPM: r.report("failure", "Failed to create SRPM.") return timeout = 60 * 60 * 2 # TODO: document this and enforce int in config timeout_config = self.job.metadata.get("timeout") if timeout_config: timeout = int(timeout_config) build_state = api.watch_copr_build(build_id, timeout, report_func=r.report) if build_state == "succeeded": msg = ( f"Congratulations! The build [has finished]({repo_url})" " successfully. :champagne:\n\n" "You can install the built RPMs by following these steps:\n\n" "* `sudo yum install -y dnf-plugins-core` on RHEL 8\n" "* `sudo dnf install -y dnf-plugins-core` on Fedora\n" f"* `dnf copr enable {owner}/{project}`\n" "* And now you can install the packages.\n" "\nPlease note that the RPMs should be used only in a testing environment." ) self.project.pr_comment(pr_id_int, msg)
def get_from_dict(cls, raw_dict: dict, validate=True) -> "PackageConfig": if validate: cls.validate(raw_dict) synced_files = raw_dict.get("synced_files", None) actions = raw_dict.get("actions", {}) raw_jobs = raw_dict.get("jobs", []) create_tarball_command = raw_dict.get("create_tarball_command", None) current_version_command = raw_dict.get("current_version_command", None) upstream_project_name = cls.get_deprecated_key( raw_dict, "upstream_project_name", "upstream_name") upstream_project_url = raw_dict.get("upstream_project_url", None) if raw_dict.get("dist_git_url", None): logger.warning( "dist_git_url is no longer being processed, " "it is generated from dist_git_base_url and downstream_package_name" ) downstream_package_name = cls.get_deprecated_key( raw_dict, "downstream_package_name", "package_name") specfile_path = raw_dict.get("specfile_path", None) if not specfile_path: if downstream_package_name: specfile_path = f"{downstream_package_name}.spec" logger.info(f"We guess that spec file is at {specfile_path}") else: # guess it? logger.warning("Path to spec file is not set.") dist_git_base_url = raw_dict.get("dist_git_base_url", None) dist_git_namespace = raw_dict.get("dist_git_namespace", None) upstream_ref = nested_get(raw_dict, "upstream_ref") allowed_gpg_keys = raw_dict.get("allowed_gpg_keys", None) create_pr = raw_dict.get("create_pr", False) pc = PackageConfig( specfile_path=specfile_path, synced_files=SyncFilesConfig.get_from_dict(synced_files, validate=False), actions={ActionName(a): cmd for a, cmd in actions.items()}, jobs=[ JobConfig.get_from_dict(raw_job, validate=False) for raw_job in raw_jobs ], upstream_project_name=upstream_project_name, downstream_package_name=downstream_package_name, upstream_project_url=upstream_project_url, dist_git_base_url=dist_git_base_url, dist_git_namespace=dist_git_namespace, create_tarball_command=create_tarball_command, current_version_command=current_version_command, upstream_ref=upstream_ref, allowed_gpg_keys=allowed_gpg_keys, create_pr=create_pr, ) return pc
def parse_gitlab_issue_comment_event(event) -> Optional[IssueCommentGitlabEvent]: """Look into the provided event and see if it is Gitlab Issue comment event.""" if event.get("object_kind") != "note": return None issue = event.get("issue") if not issue: return None issue_id = nested_get(event, "issue", "iid") if not issue_id: logger.warning("No issue id from the event.") return None comment = nested_get(event, "object_attributes", "note") comment_id = nested_get(event, "object_attributes", "id") if not (comment and comment_id): logger.warning("No note or note id from the event.") return None state = nested_get(event, "issue", "state") if not state: logger.warning("No state from the event.") return None if state != "opened": return None action = nested_get(event, "object_attributes", "action") if action not in {"reopen", "update"}: action = state logger.info( f"Gitlab issue ID: {issue_id} comment: {comment!r} {action!r} event." ) project_url = nested_get(event, "project", "web_url") if not project_url: logger.warning("Target project url not found in the event.") return None parsed_url = parse_git_repo(potential_url=project_url) logger.info( f"Project: " f"repo={parsed_url.repo} " f"namespace={parsed_url.namespace} " f"url={project_url}." ) username = nested_get(event, "user", "username") if not username: logger.warning("No Gitlab username from event.") return None return IssueCommentGitlabEvent( action=GitlabEventAction[action], issue_id=issue_id, repo_namespace=parsed_url.namespace, repo_name=parsed_url.repo, project_url=project_url, username=username, comment=comment, comment_id=comment_id, )
def _parse_tf_result_xunit(xunit: Optional[str]) -> List[TestResult]: """ Parse event["result"]["xunit"] to get tests results """ if not xunit: return [] xunit_dict = xmltodict.parse(xunit) return [ TestResult( name=testcase["@name"], result=TestingFarmResult(testcase["@result"]), log_url=nested_get(testcase, "logs", "log", 1, "@href", default=""), ) for testcase in nested_get( xunit_dict, "testsuites", "testsuite", "testcase", default=[]) ]
def process_jobs( self, trigger: JobTriggerType, package_config: PackageConfig, event: dict, project: GitProject, ) -> Dict[str, HandlerResults]: """ Run a job handler (if trigger matches) for every job defined in config. """ handlers_results = {} for job in package_config.jobs: if trigger == job.trigger: handler_kls = JOB_NAME_HANDLER_MAPPING.get(job.job, None) if not handler_kls: logger.warning(f"There is no handler for job {job}") continue handler = handler_kls( self.config, package_config, event, project, self.pagure_service, project.service, job, trigger, ) try: # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() if not whitelist.is_approved(project.namespace): logger.error( f"User {project.namespace} is not approved on whitelist!" ) # TODO also check blacklist, # but for that we need to know who triggered the action commit_sha = nested_get(event, "pull_request", "head", "sha") r = BuildStatusReporter(project, commit_sha) msg = "Account is not whitelisted!" r.report("failure", msg, url=FAQ_URL) handlers_results[job.job.value] = HandlerResults( success=False, details={"msg": msg} ) return handlers_results handlers_results[job.job.value] = handler.run() # don't break here, other handlers may react to the same event finally: handler.clean() return handlers_results
def parse_testing_farm_results_event( event) -> Optional[TestingFarmResultsEvent]: """ this corresponds to testing farm results event """ pipeline_id: str = nested_get(event, "pipeline", "id") if not pipeline_id: return None result: TestingFarmResult = TestingFarmResult(event.get("result")) environment: str = nested_get(event, "environment", "image") message: str = event.get("message") log_url: str = event.get("url") copr_repo_name: str = nested_get(event, "artifact", "copr-repo-name") copr_chroot: str = nested_get(event, "artifact", "copr-chroot") repo_name: str = nested_get(event, "artifact", "repo-name") repo_namespace: str = nested_get(event, "artifact", "repo-namespace") ref: str = nested_get(event, "artifact", "git-ref") https_url: str = nested_get(event, "artifact", "git-url") commit_sha: str = nested_get(event, "artifact", "commit-sha") tests: List[TestResult] = [ TestResult( name=raw_test["name"], result=TestingFarmResult(raw_test["result"]), log_url=raw_test.get("log"), ) for raw_test in event.get("tests", []) ] logger.info( f"Results from Testing farm event. Pipeline ID: {pipeline_id}") logger.debug(f"environment: {environment}, message: {message}, " f"log_url: {log_url}, artifact: {event.get('artifact')}") return TestingFarmResultsEvent( pipeline_id=pipeline_id, result=result, environment=environment, message=message, log_url=log_url, copr_repo_name=copr_repo_name, copr_chroot=copr_chroot, tests=tests, repo_namespace=repo_namespace, repo_name=repo_name, git_ref=ref, project_url=https_url, commit_sha=commit_sha, )
def parse_pr_event(event) -> Optional[PullRequestGithubEvent]: """ Look into the provided event and see if it's one for a new github PR. """ if not event.get("pull_request"): return None pr_id = event.get("number") action = event.get("action") if action not in {"opened", "reopened", "synchronize"} or not pr_id: return None logger.info(f"GitHub PR#{pr_id} {action!r} event.") # we can't use head repo here b/c the app is set up against the upstream repo # and not the fork, on the other hand, we don't process packit.yaml from # the PR but what's in the upstream base_repo_namespace = nested_get(event, "pull_request", "head", "repo", "owner", "login") base_repo_name = nested_get(event, "pull_request", "head", "repo", "name") if not (base_repo_name and base_repo_namespace): logger.warning("No full name of the repository.") return None base_ref = nested_get(event, "pull_request", "head", "sha") if not base_ref: logger.warning("Ref where the PR is coming from is not set.") return None user_login = nested_get(event, "pull_request", "user", "login") if not user_login: logger.warning("No GitHub login name from event.") return None target_repo_namespace = nested_get(event, "pull_request", "base", "repo", "owner", "login") target_repo_name = nested_get(event, "pull_request", "base", "repo", "name") logger.info( f"Target repo: {target_repo_namespace}/{target_repo_name}.") commit_sha = nested_get(event, "pull_request", "head", "sha") https_url = event["repository"]["html_url"] return PullRequestGithubEvent( action=PullRequestAction[action], pr_id=pr_id, base_repo_namespace=base_repo_namespace, base_repo_name=base_repo_name, base_ref=base_ref, target_repo_namespace=target_repo_namespace, target_repo_name=target_repo_name, project_url=https_url, commit_sha=commit_sha, user_login=user_login, )
def parse_issue_comment_event(event) -> Optional[IssueCommentEvent]: """Look into the provided event and see if it is Github issue comment event.""" if nested_get(event, "issue", "pull_request"): return None issue_id = nested_get(event, "issue", "number") action = event.get("action") if action != "created" or not issue_id: return None comment = nested_get(event, "comment", "body") comment_id = nested_get(event, "comment", "id") if not (comment and comment_id): logger.warning("No comment or comment id from the event.") return None logger.info( f"Github issue#{issue_id} comment: {comment!r} {action!r} event.") base_repo_namespace = nested_get(event, "repository", "owner", "login") base_repo_name = nested_get(event, "repository", "name") if not (base_repo_namespace and base_repo_name): logger.warning("No full name of the repository.") user_login = nested_get(event, "comment", "user", "login") if not user_login: logger.warning("No Github login name from event.") return None target_repo = nested_get(event, "repository", "full_name") logger.info(f"Target repo: {target_repo}.") https_url = nested_get(event, "repository", "html_url") return IssueCommentEvent( IssueCommentAction[action], issue_id, base_repo_namespace, base_repo_name, target_repo, https_url, user_login, comment, comment_id, )
def parse_distgit_event(event) -> Optional[DistGitEvent]: """ this corresponds to dist-git event when someone pushes new commits """ topic = event.get("topic") if topic != NewDistGitCommit.topic: return None logger.info(f"Dist-git commit event, topic: {topic}") repo_namespace = nested_get(event, "msg", "commit", "namespace") repo_name = nested_get(event, "msg", "commit", "repo") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None branch = nested_get(event, "msg", "commit", "branch") rev = nested_get(event, "msg", "commit", "rev") if not branch or not rev: logger.warning("Target branch/rev for the new commits is not set.") return None msg_id = event.get("msg_id") logger.info( f"New commits added to dist-git repo {repo_namespace}/{repo_name}," f"rev: {rev}, branch: {branch}, msg_id: {msg_id}" ) # TODO: get the right hostname without hardcoding project_url = f"https://src.fedoraproject.org/{repo_namespace}/{repo_name}" return DistGitEvent( topic=topic, repo_namespace=repo_namespace, repo_name=repo_name, ref=rev, branch=branch, msg_id=msg_id, project_url=project_url, )
def parse_pipeline_event(event) -> Optional[PipelineGitlabEvent]: """ Look into the provided event and see if it is Gitlab Pipeline event. https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#pipeline-events """ if event.get("object_kind") != "pipeline": return None # Project where the pipeline runs. In case of MR pipeline this can be # either source project or target project depending on pipeline type. project_url = nested_get(event, "project", "web_url") project_name = nested_get(event, "project", "name") pipeline_id = nested_get(event, "object_attributes", "id") # source branch name git_ref = nested_get(event, "object_attributes", "ref") # source commit sha commit_sha = nested_get(event, "object_attributes", "sha") status = nested_get(event, "object_attributes", "status") detailed_status = nested_get(event, "object_attributes", "detailed_status") # merge_request_event or push source = nested_get(event, "object_attributes", "source") # merge_request is null if source == "push" merge_request_url = nested_get(event, "merge_request", "url") return PipelineGitlabEvent( project_url=project_url, project_name=project_name, pipeline_id=pipeline_id, git_ref=git_ref, status=status, detailed_status=detailed_status, commit_sha=commit_sha, source=source, merge_request_url=merge_request_url, )
def parse_installation_event(event) -> Optional[InstallationEvent]: """ Look into the provided event and see Github App installation details. """ # Check if installation key in JSON isn't enough, we have to check the account as well if not nested_get(event, "installation", "account"): return None action = event["action"] if action not in {"created", "added"}: # We're currently not interested in removed/deleted/updated event. return None installation_id = event["installation"]["id"] # if action == 'created' then repos are in repositories # if action == 'added' then repos are in repositories_added repositories = event.get("repositories") or event.get( "repositories_added", []) repo_names = [repo["full_name"] for repo in repositories] logger.info( f"Github App installation {action!r} event. id: {installation_id}") logger.debug(f"account: {event['installation']['account']}, " f"repositories: {repo_names}, sender: {event['sender']}") # namespace (user/organization) into which the app has been installed account_login = event["installation"]["account"]["login"] account_id = event["installation"]["account"]["id"] account_url = event["installation"]["account"]["url"] account_type = event["installation"]["account"][ "type"] # User or Organization created_at = event["installation"]["created_at"] # user who installed the app into 'account' sender_id = event["sender"]["id"] sender_login = event["sender"]["login"] return InstallationEvent( installation_id, account_login, account_id, account_url, account_type, created_at, repo_names, sender_id, sender_login, )
def get(self): """ List all Celery tasks / jobs """ first, last = indices() tasks = [] # The db.keys() always returns all matched keys, but there's no better way with redis. # Use islice (instead of [first:last]) to at least create an iterator instead of new list. keys = db.keys("celery-task-meta-*") for key in islice(keys, first, last): data = db.get(key) if data: data = loads(data) event = nested_get(data, "result", "event") if event: # timestamp to datetime string data["result"]["event"] = Event.ts2str(event) tasks.append(data) resp = make_response(dumps(tasks), HTTPStatus.PARTIAL_CONTENT) resp.headers["Content-Range"] = f"tasks {first+1}-{last}/{len(keys)}" resp.headers["Content-Type"] = "application/json" return resp
def parse_pull_request_comment_event( event) -> Optional[PullRequestCommentEvent]: """ Look into the provided event and see if it is Github PR comment event. """ if not nested_get(event, "issue", "pull_request"): return None pr_id = nested_get(event, "issue", "number") action = event.get("action") if action in ["created", "edited"] and pr_id: logger.info(f"GitHub PR#{pr_id} comment event. Action: {action}.") base_repo_namespace = nested_get(event, "repository", "owner", "login") base_repo_name = nested_get(event, "repository", "name") if not (base_repo_name and base_repo_namespace): logger.warning("No full name of the repository.") return None github_login = nested_get(event, "comment", "user", "login") if not github_login: logger.warning("No GitHub login name from event.") return None target_repo = nested_get(event, "repository", "full_name") logger.info(f"Target repo: {target_repo}.") comment = nested_get(event, "comment", "body") https_url = event["repository"]["html_url"] return PullRequestCommentEvent( PullRequestCommentAction[action], pr_id, base_repo_namespace, base_repo_name, None, # the payload does not include this info target_repo, https_url, github_login, comment, ) return None
def get_job_input_from_github_pr( self, event: dict ) -> Optional[Tuple[JobTriggerType, PackageConfig, GitProject]]: """ look into the provided event and see if it's one for a new github pr """ action = nested_get(event, "action") logger.debug(f"action = {action}") pr_id = nested_get(event, "number") is_pr = nested_get(event, "pull_request") if not is_pr: logger.info("Not a pull request event.") return None if action in ["opened", "reopened", "synchronize"] and pr_id: # we can't use head repo here b/c the app is set up against the upstream repo # and not the fork, on the other hand, we don't process packit.yaml from # the PR but what's in the upstream base_repo_namespace = nested_get(event, "pull_request", "base", "repo", "owner", "login") base_repo_name = nested_get(event, "pull_request", "base", "repo", "name") if not (base_repo_name and base_repo_namespace): logger.warning( "We could not figure out the full name of the repository.") return None base_ref = nested_get(event, "pull_request", "head", "sha") if not base_ref: logger.warning("Ref where the PR is coming from is not set.") return None target_repo = nested_get(event, "repository", "full_name") logger.info( f"GitHub pull request {pr_id} event for repo {target_repo}.") gh_proj = get_github_project(self.config, repo=base_repo_name, namespace=base_repo_namespace) package_config = get_package_config_from_repo(gh_proj, base_ref) https_url = event["repository"]["html_url"] package_config.upstream_project_url = https_url return JobTriggerType.pull_request, package_config, gh_proj return None
def get_job_input_from_github_app_installation( self, event: dict ) -> Optional[Tuple[JobTriggerType, GithubAppData]]: """ look into the provided event and see github app installation details """ action = nested_get(event, "action") # created or deleted logger.debug(f"action = {action}") if not event.get("installation", None): return None # it is not enough to check if installation key in JSON, we have to check the account if not event["installation"].get("account", None): return None installation_id = event["installation"]["id"] account_login = event["installation"]["account"]["login"] account_id = event["installation"]["account"]["id"] account_url = event["installation"]["account"]["url"] account_type = event["installation"]["account"]["type"] # User or Organization created_at = event["installation"]["created_at"] sender_id = event["sender"]["id"] sender_login = event["sender"]["login"] github_app_data = GithubAppData( installation_id, account_login, account_id, account_url, account_type, created_at, sender_id, sender_login, ) return JobTriggerType.installation, github_app_data
def parse_merge_request_comment_event( event, ) -> Optional[MergeRequestCommentGitlabEvent]: """ Look into the provided event and see if it is Gitlab MR comment event. """ if event.get("object_kind") != "note": return None merge_request = event.get("merge_request") if not merge_request: return None state = nested_get(event, "merge_request", "state") if state != "opened": return None action = nested_get(event, "merge_request", "action") if action not in {"reopen", "update"}: action = state object_iid = nested_get(event, "merge_request", "iid") if not object_iid: logger.warning("No object iid from the event.") object_id = nested_get(event, "merge_request", "id") if not object_id: logger.warning("No object id from the event.") comment = nested_get(event, "object_attributes", "note") logger.info( f"Gitlab MR id#{object_id} iid#{object_iid} comment: {comment!r} {action!r} event." ) source_project_url = nested_get(event, "merge_request", "source", "web_url") if not source_project_url: logger.warning("Source project url not found in the event.") return None parsed_source_url = parse_git_repo(potential_url=source_project_url) logger.info(f"Source: " f"repo={parsed_source_url.repo} " f"namespace={parsed_source_url.namespace} " f"url={source_project_url}.") target_project_url = nested_get(event, "project", "web_url") if not target_project_url: logger.warning("Target project url not found in the event.") return None parsed_target_url = parse_git_repo(potential_url=target_project_url) logger.info(f"Target: " f"repo={parsed_target_url.repo} " f"namespace={parsed_target_url.namespace} " f"url={target_project_url}.") username = nested_get(event, "user", "username") if not username: logger.warning("No Gitlab username from event.") return None commit_sha = nested_get(event, "merge_request", "last_commit", "id") if not commit_sha: logger.warning("No commit_sha from the event.") return None return MergeRequestCommentGitlabEvent( action=GitlabEventAction[action], object_id=object_id, object_iid=object_iid, source_repo_namespace=parsed_source_url.namespace, source_repo_name=parsed_source_url.repo, target_repo_namespace=parsed_target_url.namespace, target_repo_name=parsed_target_url.repo, project_url=target_project_url, username=username, comment=comment, commit_sha=commit_sha, )