def run(self) -> TaskResults: # self.project is dist-git, we need to get upstream dg = DistGit(self.service_config, self.job_config) self.job_config.upstream_project_url = dg.get_project_url_from_distgit_spec( ) if not self.job_config.upstream_project_url: return TaskResults( success=False, details={ "msg": "URL in specfile is not set. " "We don't know where the upstream project lives." }, ) n, r = get_namespace_and_repo_name( self.job_config.upstream_project_url) up = self.project.service.get_project(repo=r, namespace=n) self.local_project = LocalProject( git_project=up, working_dir=self.service_config.command_handler_work_dir) self.api = PackitAPI(self.service_config, self.job_config, self.local_project) self.api.sync_from_downstream( # rev is a commit # we use branch on purpose so we get the latest thing # TODO: check if rev is HEAD on {branch}, warn then? dist_git_branch=self.branch, upstream_branch="master", # TODO: this should be configurable ) return TaskResults(success=True, details={})
def handle_srpm_end(self): url = get_srpm_build_info_url(self.build.id) if self.copr_event.status != COPR_API_SUCC_STATE: failed_msg = "SRPM build failed, check the logs for details." self.copr_build_helper.report_status_to_all( state=BaseCommitStatus.failure, description=failed_msg, url=url, ) self.build.set_status(PG_BUILD_STATUS_FAILURE) self.copr_build_helper.monitor_not_submitted_copr_builds( len(self.copr_build_helper.build_targets), "srpm_failure") return TaskResults(success=False, details={"msg": failed_msg}) for build in CoprBuildModel.get_all_by_build_id( str(self.copr_event.build_id)): # from waiting_for_srpm to pending build.set_status("pending") self.build.set_status(PG_BUILD_STATUS_SUCCESS) self.copr_build_helper.report_status_to_all( state=BaseCommitStatus.running, description= "SRPM build succeeded. Waiting for RPM build to start...", url=url, ) msg = "SRPM build in Copr has finished." logger.debug(msg) return TaskResults(success=True, details={"msg": msg})
def run(self): if not self.build: model = ("SRPMBuildDB" if self.copr_event.chroot == COPR_SRPM_CHROOT else "CoprBuildDB") msg = f"Copr build {self.copr_event.build_id} not in {model}." logger.warning(msg) return TaskResults(success=False, details={"msg": msg}) self.set_start_time() self.set_logs_url() if self.copr_event.chroot == COPR_SRPM_CHROOT: url = get_srpm_build_info_url(self.build.id) self.copr_build_helper.report_status_to_all( description="SRPM build is in progress...", state=BaseCommitStatus.running, url=url, ) msg = "SRPM build in Copr has started..." return TaskResults(success=True, details={"msg": msg}) self.pushgateway.copr_builds_started.inc() url = get_copr_build_info_url(self.build.id) self.build.set_status("pending") self.copr_build_helper.report_status_to_all_for_chroot( description="RPM build is in progress...", state=BaseCommitStatus.running, url=url, chroot=self.copr_event.chroot, ) msg = f"Build on {self.copr_event.chroot} in copr has started..." return TaskResults(success=True, details={"msg": msg})
def run_testing_farm_on_all(self): latest_copr_build = self.latest_copr_build if not latest_copr_build: return TaskResults( success=False, details={ "msg": f"No copr builds for {self.job_owner}/{self.job_project}" }, ) failed = {} for chroot in self.tests_targets: result = self.run_testing_farm( build_id=int(latest_copr_build.build_id), chroot=chroot ) if not result["success"]: failed[chroot] = result.get("details") if not failed: return TaskResults(success=True, details={}) return TaskResults( success=False, details={"msg": f"Failed testing farm targets: '{failed.keys()}'."}.update( failed ), )
def run(self) -> TaskResults: if self.action != GitlabEventAction.opened: logger.debug("Won't run BugzillaHandler for already opened MR.") return TaskResults(success=True) logger.debug( f"About to create a bugzilla based on MR " f"{self.target_repo_namespace}/{self.target_repo_name}/{self.data.identifier} " f"branch {self.target_repo_branch}") if not any( re.match(n, self.target_repo_namespace) for n in self.service_config.bugz_namespaces): logger.debug( f"We accept only {self.service_config.bugz_namespaces} namespaces" ) return TaskResults(success=True) if not any( re.match(b, self.target_repo_branch) for b in self.service_config.bugz_branches): logger.debug( f"We accept only {self.service_config.bugz_branches} branches") return TaskResults(success=True) if not self.bz_model: self._create_bug() self._set_status() return TaskResults(success=True)
def run(self) -> TaskResults: testing_farm_helper = TestingFarmJobHelper( service_config=self.service_config, package_config=self.package_config, project=self.project, metadata=self.data, db_trigger=self.db_trigger, job_config=self.job_config, ) user_can_merge_pr = self.project.can_merge_pr(self.data.user_login) if not (user_can_merge_pr or self.data.user_login in self.service_config.admins): self.project.pr_comment(self.db_trigger.pr_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return TaskResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) handler_results = TaskResults(success=True, details={}) logger.debug(f"Test job config: {testing_farm_helper.job_tests}") if testing_farm_helper.job_tests: testing_farm_helper.run_testing_farm_on_all() else: logger.debug("Testing farm not in the job config.") return handler_results
def run(self) -> TaskResults: local_project = LocalProject( git_project=self.project, working_dir=self.service_config.command_handler_work_dir, ) api = PackitAPI( config=self.service_config, # job_config and package_config are the same for PackitAPI # and we want to use job_config since people can override things in there package_config=self.job_config, upstream_local_project=local_project, ) user_can_merge_pr = self.project.can_merge_pr(self.data.user_login) if not (user_can_merge_pr or self.data.user_login in self.service_config.admins): self.project.issue_comment(self.db_trigger.issue_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return TaskResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) if not self.data.tag_name: msg = ( "There was an error while proposing a new update for the Fedora package: " "no upstream release found.") self.project.issue_comment(self.db_trigger.issue_id, msg) return TaskResults(success=False, details={"msg": "Propose update failed"}) sync_failed = False for branch in self.dist_git_branches_to_sync: msg = ( f"for the Fedora package `{self.job_config.downstream_package_name}`" f"with the tag `{self.data.tag_name}` in the `{branch}` branch.\n" ) try: new_pr = api.sync_release(dist_git_branch=branch, version=self.data.tag_name, create_pr=True) msg = f"Packit-as-a-Service proposed [a new update]({new_pr.url}) {msg}" self.project.issue_comment(self.db_trigger.issue_id, msg) except PackitException as ex: msg = f"There was an error while proposing a new update {msg} Traceback is: `{ex}`" self.project.issue_comment(self.db_trigger.issue_id, msg) logger.error(f"Error while running a build: {ex}") sync_failed = True if sync_failed: return TaskResults(success=False, details={"msg": "Propose update failed"}) # Close issue if propose-update was successful in all branches self.project.issue_close(self.db_trigger.issue_id) return TaskResults(success=True, details={})
def process_jobs(self, event: Event) -> Dict[str, TaskResults]: """ Create a Celery task for a job handler (if trigger matches) for every job defined in config. """ processing_results = {} if not event.package_config: # this happens when service receives events for repos which don't have packit config # success=True - it's not an error that people don't have packit.yaml in their repo processing_results[event.trigger.value] = TaskResults( success=True, details={"msg": "No packit config in repo"}) return processing_results handler_classes = get_handlers_for_event(event, event.package_config) if not handler_classes: logger.warning(f"There is no handler for {event.trigger} event.") return processing_results job_configs = [] for handler_kls in handler_classes: job_configs = get_config_for_handler_kls( handler_kls=handler_kls, event=event, package_config=event.package_config, ) # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() user_login = getattr(event, "user_login", None) if user_login and user_login in self.service_config.admins: logger.info(f"{user_login} is admin, you shall pass.") elif not whitelist.check_and_report( event, event.project, service_config=self.service_config, job_configs=job_configs, ): for job_config in job_configs: processing_results[job_config.type.value] = TaskResults( success=False, details={"msg": "Account is not whitelisted!"}) return processing_results # we want to run handlers for all possible jobs, not just the first one signatures = [ handler_kls.get_signature(event=event, job=job_config) for job_config in job_configs ] # https://docs.celeryproject.org/en/stable/userguide/canvas.html#groups group(signatures).apply_async() return get_processing_results(event=event, jobs=job_configs)
def run(self): if not self.build: # TODO: how could this happen? model = ("SRPMBuildDB" if self.copr_event.chroot == COPR_SRPM_CHROOT else "CoprBuildDB") msg = f"Copr build {self.copr_event.build_id} not in {model}." logger.warning(msg) return TaskResults(success=False, details={"msg": msg}) if self.build.status in [ PG_BUILD_STATUS_FAILURE, PG_BUILD_STATUS_SUCCESS, ]: msg = (f"Copr build {self.copr_event.build_id} is already" f" processed (status={self.copr_event.build.status}).") logger.info(msg) return TaskResults(success=True, details={"msg": msg}) self.set_end_time() self.set_srpm_url() if self.copr_event.chroot == COPR_SRPM_CHROOT: return self.handle_srpm_end() self.pushgateway.copr_builds_finished.inc() # if the build is needed only for test, it doesn't have the task_accepted_time if self.build.task_accepted_time: copr_build_time = measure_time(end=datetime.now(timezone.utc), begin=self.build.task_accepted_time) self.pushgateway.copr_build_finished_time.observe(copr_build_time) # https://pagure.io/copr/copr/blob/master/f/common/copr_common/enums.py#_42 if self.copr_event.status != COPR_API_SUCC_STATE: failed_msg = "RPMs failed to be built." self.copr_build_helper.report_status_to_all_for_chroot( state=BaseCommitStatus.failure, description=failed_msg, url=get_copr_build_info_url(self.build.id), chroot=self.copr_event.chroot, ) self.build.set_status(PG_BUILD_STATUS_FAILURE) return TaskResults(success=False, details={"msg": failed_msg}) self.report_successful_build() self.build.set_status(PG_BUILD_STATUS_SUCCESS) built_packages = self.copr_build_helper.get_built_packages( int(self.build.build_id), self.build.target) self.build.set_built_packages(built_packages) self.handle_testing_farm() return TaskResults(success=True, details={})
def run(self) -> TaskResults: """ Sync the upstream release to dist-git as a pull request. """ self.local_project = LocalProject( git_project=self.project, working_dir=self.service_config.command_handler_work_dir, ) self.api = PackitAPI(self.service_config, self.job_config, self.local_project) errors = {} for branch in get_branches(*self.job_config.metadata.dist_git_branches, default="master"): try: self.api.sync_release(dist_git_branch=branch, version=self.data.tag_name) except Exception as ex: sentry_integration.send_to_sentry(ex) errors[branch] = str(ex) if errors: branch_errors = "" for branch, err in sorted( errors.items(), key=lambda branch_error: branch_error[0]): err_without_new_lines = err.replace("\n", " ") branch_errors += f"| `{branch}` | `{err_without_new_lines}` |\n" body_msg = ( f"Packit failed on creating pull-requests in dist-git:\n\n" f"| dist-git branch | error |\n" f"| --------------- | ----- |\n" f"{branch_errors}\n\n" "You can re-trigger the update by adding `/packit propose-update`" " to the issue comment.\n") self.project.create_issue( title= f"[packit] Propose update failed for release {self.data.tag_name}", body=body_msg, ) return TaskResults( success=False, details={ "msg": "Propose update failed.", "errors": errors }, ) return TaskResults(success=True, details={})
def run(self) -> TaskResults: """ Discover information about organization/user which wants to install packit on his repository Try to allowlist automatically if mapping from github username to FAS account can prove that user is a packager. :return: TaskResults """ InstallationModel.create(event=self.installation_event) # try to add user to allowlist allowlist = Allowlist( fas_user=self.service_config.fas_user, fas_password=self.service_config.fas_password, ) if not allowlist.add_account(self.account_login, self.sender_login): # Create an issue in our repository, so we are notified when someone install the app self.project.create_issue( title= f"{self.account_type} {self.account_login} needs to be approved.", body= (f"Hi @{self.sender_login}, we need to approve you in " "order to start using Packit-as-a-Service. Someone from our team will " "get back to you shortly.\n\n" "For more info, please check out the documentation: " "http://packit.dev/packit-as-a-service/"), ) msg = f"{self.account_type} {self.account_login} needs to be approved manually!" else: msg = f"{self.account_type} {self.account_login} allowlisted!" logger.info(msg) return TaskResults(success=True, details={"msg": msg})
def test_trigger_build(copr_build, run_new_build): valid_commit_sha = "1111111111111111111111111111111111111111" package_config = PackageConfig() package_config.jobs = [] package_config.spec_source_id = 1 job_config = flexmock() job_config.type = JobType.build job_config.spec_source_id = 1 job_config.metadata = JobMetadataConfig() event = { "event_type": "CoprBuileEndEvent", "commit_sha": valid_commit_sha, } flexmock(TFJobHelper).should_receive("get_latest_copr_build").and_return(copr_build) if run_new_build: flexmock(TFJobHelper, job_owner="owner", job_project="project") flexmock(Signature).should_receive("apply_async").once() else: flexmock(TFJobHelper).should_receive("run_testing_farm").and_return( TaskResults(success=True, details={}) ) flexmock(cb).should_receive("get_valid_build_targets").and_return( {"target", "another-target"} ) tf_handler = TestingFarmHandler(package_config, job_config, event, "target") tf_handler.run()
def run_testing_farm_on_all(self): failed = {} for chroot in self.tests_targets: result = self.run_testing_farm(chroot) if not result["success"]: failed[chroot] = result.get("details") if not failed: return TaskResults(success=True, details={}) return TaskResults( success=False, details={ "msg": f"Failed testing farm targets: '{failed.keys()}'." }.update(failed), )
def test_pr_embedded_command_handler(mock_pr_comment_functionality, pr_embedded_command_comment_event, comments_list): flexmock(PullRequestModel).should_receive("get_or_create").with_args( pr_id=9, namespace="packit-service", repo_name="hello-world", project_url="https://github.com/packit-service/hello-world", ).and_return( flexmock(id=9, job_config_trigger_type=JobConfigTriggerType.pull_request)) pr_embedded_command_comment_event["comment"]["body"] = comments_list flexmock(CoprBuildJobHelper).should_receive("run_copr_build").and_return( TaskResults(success=True, details={})) flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) flexmock(Signature).should_receive("apply_async").once() processing_results = SteveJobs().process_message( pr_embedded_command_comment_event) event_dict, job, job_config, package_config = get_parameters_from_results( processing_results) results = run_copr_build_handler( package_config=package_config, event=event_dict, job_config=job_config, ) assert first_dict_value(results["job"])["success"]
def run(self): if not self.build: msg = f"Koji build {self.koji_build_event.build_id} not found in the database." logger.debug(msg) return TaskResults(success=True, details={"msg": msg}) msg = ( f"Build on {self.build.target} in koji changed state " f"from {self.koji_build_event.old_state} to {self.koji_build_event.state}." ) logger.debug(msg) new_commit_status = { KojiBuildState.building: BaseCommitStatus.running, KojiBuildState.complete: BaseCommitStatus.success, KojiBuildState.deleted: BaseCommitStatus.error, KojiBuildState.failed: BaseCommitStatus.failure, KojiBuildState.canceled: BaseCommitStatus.error, }.get(self.koji_build_event.state) if ( new_commit_status and self.build.status and self.build.status != KojiBuildState.building ): logger.warning( f"We should not overwrite the final state {self.build.status} " f"to {self.koji_build_event.state}. " f"Not updating the status." ) elif new_commit_status: self.build.set_status(new_commit_status.value) else: logger.debug( f"We don't react to this koji build state change: {self.koji_task_event.state}" ) if not self.build.web_url: self.build.set_web_url( KojiBuildEvent.get_koji_rpm_build_web_url( rpm_build_task_id=self.koji_build_event.rpm_build_task_id, koji_web_url=self.service_config.koji_web_url, ) ) # TODO: update logs URL (the access via task number dos not work for non-scratch builds) return TaskResults(success=True, details={"msg": msg})
def test_pr_test_command_handler(pr_embedded_command_comment_event): jobs = [{ "trigger": "pull_request", "job": "tests", "metadata": { "targets": "fedora-rawhide-x86_64" }, }] packit_yaml = ( "{'specfile_path': 'the-specfile.spec', 'synced_files': [], 'jobs': " + str(jobs) + "}") flexmock( GithubProject, full_repo_name="packit-service/hello-world", get_file_content=lambda path, ref: packit_yaml, get_files=lambda ref, filter_regex: ["the-specfile.spec"], get_web_url=lambda: "https://github.com/the-namespace/the-repo", get_pr=lambda pr_id: flexmock(head_commit="12345"), ) flexmock(Github, get_repo=lambda full_name_or_id: None) config = ServiceConfig() config.command_handler_work_dir = SANDCASTLE_WORK_DIR flexmock(ServiceConfig).should_receive("get_service_config").and_return( config) trigger = flexmock( job_config_trigger_type=JobConfigTriggerType.pull_request, id=123) flexmock(AddPullRequestDbTrigger).should_receive("db_trigger").and_return( trigger) flexmock(PullRequestModel).should_receive("get_by_id").with_args( 123).and_return(trigger) flexmock(LocalProject, refresh_the_arguments=lambda: None) flexmock(Allowlist, check_and_report=True) flexmock(PullRequestModel).should_receive("get_or_create").with_args( pr_id=9, namespace="packit-service", repo_name="hello-world", project_url="https://github.com/packit-service/hello-world", ).and_return( flexmock(id=9, job_config_trigger_type=JobConfigTriggerType.pull_request)) pr_embedded_command_comment_event["comment"]["body"] = "/packit test" flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) flexmock(Signature).should_receive("apply_async").once() flexmock(TestingFarmJobHelper).should_receive( "run_testing_farm_on_all").and_return( TaskResults(success=True, details={})) processing_results = SteveJobs().process_message( pr_embedded_command_comment_event) event_dict, job, job_config, package_config = get_parameters_from_results( processing_results) run_testing_farm_handler( package_config=package_config, event=event_dict, job_config=job_config, )
def run(self) -> TaskResults: logger.debug(f"Testing farm {self.pipeline_id} result:\n{self.result}") test_run_model = TFTTestRunModel.get_by_pipeline_id( pipeline_id=self.pipeline_id) if not test_run_model: logger.warning( f"Unknown pipeline_id received from the testing-farm: " f"{self.pipeline_id}") if test_run_model: test_run_model.set_status(self.result, created=self.created) if self.result == TestingFarmResult.running: status = BaseCommitStatus.running summary = self.summary or "Tests are running ..." elif self.result == TestingFarmResult.passed: status = BaseCommitStatus.success summary = self.summary or "Tests passed ..." elif self.result == TestingFarmResult.error: status = BaseCommitStatus.error summary = self.summary or "Error ..." else: status = BaseCommitStatus.failure summary = self.summary or "Tests failed ..." if self.result == TestingFarmResult.running: self.pushgateway.test_runs_started.inc() else: self.pushgateway.test_runs_finished.inc() test_run_time = measure_time(end=datetime.now(timezone.utc), begin=test_run_model.submitted_time) self.pushgateway.test_run_finished_time.observe(test_run_time) if test_run_model: test_run_model.set_web_url(self.log_url) trigger = JobTriggerModel.get_or_create( type=self.db_trigger.job_trigger_model_type, trigger_id=self.db_trigger.id, ) status_reporter = StatusReporter.get_instance( project=self.project, commit_sha=self.data.commit_sha, trigger_id=trigger.id if trigger else None, pr_id=self.data.pr_id, ) status_reporter.report( state=status, description=summary, url=get_testing_farm_info_url(test_run_model.id) if test_run_model else self.log_url, links_to_external_services={"Testing Farm": self.log_url}, check_names=TestingFarmJobHelper.get_test_check(self.copr_chroot), ) return TaskResults(success=True, details={})
def run(self) -> TaskResults: # TODO: once we turn handlers into respective celery tasks, we should iterate # here over *all* matching jobs and do them all, not just the first one logger.debug( f"Test job config: {self.testing_farm_job_helper.job_tests}") targets = list(self.testing_farm_job_helper.tests_targets) logger.debug(f"Targets to run the tests: {targets}") if self.build_required(): if self.testing_farm_job_helper.job_build: msg = "Build required, already handled by build job." else: msg = "Build required, CoprBuildHandler task sent." self.run_copr_build_handler( self.data.get_dict(), len(self.testing_farm_job_helper.build_targets), ) logger.info(msg) return TaskResults( success=True, details={"msg": msg}, ) failed: Dict[str, str] = {} if self.testing_farm_job_helper.skip_build: for target in targets: self.run_for_target(target=target, failed=failed) else: self.run_with_copr_builds(targets=targets, failed=failed) if not failed: return TaskResults(success=True, details={}) result_details = { "msg": f"Failed testing farm targets: '{failed.keys()}'." } result_details.update(failed) return TaskResults(success=False, details=result_details)
def get_processing_results(event: Event, jobs: List[JobConfig], success: bool = True) -> TaskResults: return TaskResults( success=success, details={ "event": event.get_dict(), "package_config": dump_package_config(event.package_config), "matching_jobs": [dump_job_config(job) for job in jobs], }, )
def run(self): build_job_helper = CoprBuildJobHelper( service_config=self.service_config, package_config=self.package_config, project=self.project, metadata=self.data, db_trigger=self.db_trigger, job_config=self.job_config, ) if self.copr_event.chroot == "srpm-builds": # we don't want to set the check status for this msg = "SRPM build in copr has started." logger.debug(msg) return TaskResults(success=True, details={"msg": msg}) if not self.build: msg = f"Copr build {self.copr_event.build_id} not in CoprBuildDB." logger.warning(msg) return TaskResults(success=False, details={"msg": msg}) start_time = ( datetime.utcfromtimestamp(self.copr_event.timestamp) if self.copr_event.timestamp else None ) self.build.set_start_time(start_time) url = get_copr_build_info_url_from_flask(self.build.id) self.build.set_status("pending") copr_build_logs = self.copr_event.get_copr_build_logs_url() self.build.set_build_logs_url(copr_build_logs) build_job_helper.report_status_to_all_for_chroot( description="RPM build is in progress...", state=CommitStatus.pending, url=url, chroot=self.copr_event.chroot, ) msg = f"Build on {self.copr_event.chroot} in copr has started..." return TaskResults(success=True, details={"msg": msg})
def run_copr_build_from_source_script(self) -> TaskResults: """ Run copr build using custom source method. """ try: pr_id = self.metadata.pr_id script = create_source_script( url=self.metadata.project_url, ref=self.metadata.git_ref, pr_id=str(pr_id) if pr_id else None, merge_pr=self.package_config.merge_pr_in_ci, target_branch=self.project.get_pr(pr_id).target_branch if pr_id else None, job_config=self.job_config, ) build_id, web_url = self.submit_copr_build(script=script) except Exception as ex: self.handle_build_submit_error(ex) return TaskResults( success=False, details={"msg": "Submit of the Copr build failed.", "error": str(ex)}, ) self._srpm_model, self.run_model = SRPMBuildModel.create_with_new_run( copr_build_id=str(build_id), commit_sha=self.metadata.commit_sha, trigger_model=self.db_trigger, copr_web_url=web_url, ) self.report_status_to_all( description="SRPM build in Copr was submitted...", state=BaseCommitStatus.pending, url=get_srpm_build_info_url(self.srpm_model.id), ) self.handle_rpm_build_start(build_id, web_url, waiting_for_srpm=True) return TaskResults(success=True, details={})
def run(self) -> TaskResults: logger.debug(f"Testing farm {self.pipeline_id} result:\n{self.result}") logger.debug(f"Testing farm test results:\n{self.tests}") test_run_model = TFTTestRunModel.get_by_pipeline_id( pipeline_id=self.pipeline_id) if not test_run_model: logger.warning( f"Unknown pipeline_id received from the testing-farm: " f"{self.pipeline_id}") if test_run_model: test_run_model.set_status(self.result) if self.result == TestingFarmResult.running: status = CommitStatus.pending if isinstance(self.project.service, GitlabService): # only Gitlab has 'running' state status = CommitStatus.running summary = self.summary or "Tests are running ..." elif self.result == TestingFarmResult.passed: status = CommitStatus.success summary = self.summary or "Tests passed ..." elif self.result == TestingFarmResult.error: status = CommitStatus.error if isinstance(self.project.service, GitlabService): # Gitlab has no 'error' state status = CommitStatus.failure summary = self.summary or "Error ..." else: status = CommitStatus.failure summary = self.summary or "Tests failed ..." if len(self.tests ) > 0 and self.tests[0].name == "/packit/install-and-verify": logger.debug("No-fmf scenario discovered.") summary = ("Installation passed" if status == CommitStatus.success else "Installation failed") if test_run_model: test_run_model.set_web_url(self.log_url) status_reporter = StatusReporter(project=self.project, commit_sha=self.data.commit_sha, pr_id=self.data.pr_id) status_reporter.report( state=status, description=summary, url=self.log_url, check_names=TestingFarmJobHelper.get_test_check(self.copr_chroot), ) return TaskResults(success=True, details={})
def run(self) -> TaskResults: logger.debug(f"Received testing-farm result:\n{self.result}") logger.debug(f"Received testing-farm test results:\n{self.tests}") test_run_model = TFTTestRunModel.get_by_pipeline_id( pipeline_id=self.pipeline_id ) if not test_run_model: logger.warning( f"Unknown pipeline_id received from the testing-farm: " f"{self.pipeline_id}" ) if test_run_model: test_run_model.set_status(self.result) if self.result == TestingFarmResult.passed: status = CommitStatus.success passed = True elif self.result == TestingFarmResult.error: status = CommitStatus.error passed = False else: status = CommitStatus.failure passed = False github_status_url = self.log_url if len(self.tests) == 1 and self.tests[0].name == "/install/copr-build": logger.debug("No-fmf scenario discovered.") short_msg = "Installation passed" if passed else "Installation failed" elif self.message.startswith( "Command '['git', 'clone'" ) and self.message.endswith("failed with exit code 128"): short_msg = "Problem with Testing-Farm cluster" github_status_url = "https://pagure.io/centos-infra/issue/85" else: short_msg = self.message if test_run_model: test_run_model.set_web_url(self.log_url) status_reporter = StatusReporter( project=self.project, commit_sha=self.data.commit_sha, pr_id=self.data.pr_id ) status_reporter.report( state=status, description=short_msg, url=github_status_url, check_names=TestingFarmJobHelper.get_test_check(self.copr_chroot), ) return TaskResults(success=True, details={})
def test_rebuild_failed( mock_pr_comment_functionality, pr_embedded_command_comment_event ): flexmock(PullRequestModel).should_receive("get_or_create").with_args( pr_id=9, namespace="packit-service", repo_name="hello-world", project_url="https://github.com/packit-service/hello-world", ).and_return( flexmock(id=9, job_config_trigger_type=JobConfigTriggerType.pull_request) ) pr_embedded_command_comment_event["comment"]["body"] = "/packit rebuild-failed" flexmock(CoprBuildJobHelper).should_receive("run_copr_build").and_return( TaskResults(success=True, details={}) ) flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) pr = flexmock(head_commit="12345") flexmock(GithubProject).should_receive("get_pr").and_return(pr) comment = flexmock() flexmock(pr).should_receive("get_comment").and_return(comment) flexmock(comment).should_receive("add_reaction").with_args("+1").once() flexmock(copr_build).should_receive("get_valid_build_targets").and_return(set()) model = flexmock(CoprBuildModel, status="failed", target="target") flexmock(model).should_receive("get_all_by").and_return(flexmock()) flexmock(AbstractForgeIndependentEvent).should_receive( "get_all_build_failed_targets" ).and_return({"target"}) flexmock(CoprBuildJobHelper).should_receive("report_status_to_build").with_args( description=TASK_ACCEPTED, state=BaseCommitStatus.pending, url="", ).once() flexmock(Signature).should_receive("apply_async").once() flexmock(Pushgateway).should_receive("push").twice().and_return() processing_results = SteveJobs().process_message(pr_embedded_command_comment_event) event_dict, job, job_config, package_config = get_parameters_from_results( processing_results ) assert event_dict["targets_override"] == ["target"] assert json.dumps(event_dict) results = run_copr_build_handler( package_config=package_config, event=event_dict, job_config=job_config, ) assert first_dict_value(results["job"])["success"]
def run(self) -> TaskResults: if self.data.event_type == PullRequestGithubEvent.__name__: user_can_merge_pr = self.project.can_merge_pr(self.data.user_login) if not (user_can_merge_pr or self.data.user_login in self.service_config.admins): self.koji_build_helper.report_status_to_all( description=PERMISSIONS_ERROR_WRITE_OR_ADMIN, state=CommitStatus.failure, ) return TaskResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) return self.koji_build_helper.run_koji_build()
def run(self) -> TaskResults: logger.debug( f"Handling labels/tags {self.labels} {self.action.value} to Pagure PR " f"{self.base_repo_owner}/{self.base_repo_namespace}/" f"{self.base_repo_name}/{self.data.identifier}") if self.labels.intersection(self.service_config.pr_accepted_labels): if not self.bz_model: self._create_bug() self._attach_patch() self._set_status() else: logger.debug( f"We accept only {self.service_config.pr_accepted_labels} labels/tags" ) return TaskResults(success=True)
def run(self) -> TaskResults: if self.data.event_type in ( PullRequestGithubEvent.__name__, MergeRequestGitlabEvent.__name__, ): user_can_merge_pr = self.project.can_merge_pr(self.data.user_login) if not (user_can_merge_pr or self.data.user_login in self.service_config.admins): self.copr_build_helper.report_status_to_all( description=PERMISSIONS_ERROR_WRITE_OR_ADMIN, state=CommitStatus.failure, url=FAQ_URL_HOW_TO_RETRIGGER, ) return TaskResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) return super().run()
def run(self) -> TaskResults: logger.debug(f"Received testing-farm result:\n{self.result}") logger.debug(f"Received testing-farm test results:\n{self.tests}") test_run_model = TFTTestRunModel.get_by_pipeline_id( pipeline_id=self.pipeline_id ) if not test_run_model: logger.warning( f"Unknown pipeline_id received from the testing-farm: " f"{self.pipeline_id}" ) if test_run_model: test_run_model.set_status(self.result) if self.result == TestingFarmResult.passed: status = CommitStatus.success passed = True else: status = CommitStatus.failure passed = False if len(self.tests) == 1 and self.tests[0].name == "/install/copr-build": logger.debug("No-fmf scenario discovered.") short_msg = "Installation passed" if passed else "Installation failed" else: short_msg = self.message if test_run_model: test_run_model.set_web_url(self.log_url) status_reporter = StatusReporter(self.project, self.data.commit_sha) status_reporter.report( state=status, description=short_msg, url=self.log_url, check_names=TestingFarmJobHelper.get_test_check(self.copr_chroot), ) return TaskResults(success=True, details={})
def test_pr_comment_build_handler(mock_pr_comment_functionality, pr_build_comment_event): flexmock(CoprBuildJobHelper).should_receive("run_copr_build").and_return( TaskResults(success=True, details={})) (flexmock(GithubProject).should_receive("can_merge_pr").with_args( "phracek").and_return(True).once()) flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) flexmock(Signature).should_receive("apply_async").once() processing_results = SteveJobs().process_message(pr_build_comment_event) event_dict, package_config, job = get_parameters_from_results( processing_results) results = run_pr_comment_copr_build_handler( package_config=package_config, event=event_dict, job_config=job, ) assert first_dict_value(results["job"])["success"]
def test_check_rerun_push_testing_farm_handler( mock_push_functionality, check_rerun_event_testing_farm ): flexmock(TestingFarmJobHelper).should_receive("run_testing_farm").once().and_return( TaskResults(success=True, details={}) ) flexmock(GithubProject).should_receive("get_files").and_return(["foo.spec"]) flexmock(GithubProject).should_receive("get_web_url").and_return( "https://github.com/the-namespace/the-repo" ) flexmock(GithubProject).should_receive("is_private").and_return(False) flexmock(TestingFarmJobHelper).should_receive("get_latest_copr_build").and_return( flexmock(status=PG_BUILD_STATUS_SUCCESS) ) flexmock(copr_build).should_receive("get_valid_build_targets").and_return( {"fedora-rawhide-x86_64", "fedora-34-x86_64"} ) flexmock(StatusReporterGithubChecks).should_receive("set_status").with_args( state=BaseCommitStatus.pending, description=TASK_ACCEPTED, check_name="testing-farm:fedora-rawhide-x86_64", url="", links_to_external_services=None, markdown_content=None, ).once() flexmock(Signature).should_receive("apply_async").once() flexmock(Pushgateway).should_receive("push").twice().and_return() processing_results = SteveJobs().process_message(check_rerun_event_testing_farm) event_dict, job, job_config, package_config = get_parameters_from_results( processing_results ) assert event_dict["targets_override"] == ["fedora-rawhide-x86_64"] assert json.dumps(event_dict) results = run_testing_farm_handler( package_config=package_config, event=event_dict, job_config=job_config, ) assert first_dict_value(results["job"])["success"]