Beispiel #1
0
def get_processing_results(event: Event,
                           jobs: List[JobConfig],
                           success: bool = True) -> TaskResults:
    return TaskResults(
        success=success,
        details={
            "event": event.get_dict(),
            "package_config": dump_package_config(event.package_config),
            "matching_jobs": [dump_job_config(job) for job in jobs],
        },
    )
Beispiel #2
0
    def run_copr_build_handler(self, event_data: dict, number_of_builds: int):
        for _ in range(number_of_builds):
            self.pushgateway.copr_builds_queued.inc()

        signature(
            TaskName.copr_build.value,
            kwargs={
                "package_config": dump_package_config(self.package_config),
                "job_config": dump_job_config(self.job_config),
                "event": event_data,
            },
        ).apply_async()
Beispiel #3
0
 def get_signature(cls, event: Event, job: Optional[JobConfig]) -> Signature:
     """
     Get the signature of a Celery task which will run the handler.
     https://docs.celeryproject.org/en/stable/userguide/canvas.html#signatures
     :param event: event which triggered the task
     :param job: job to process
     """
     logger.debug(f"Getting signature of a Celery task {cls.task_name}.")
     return signature(
         cls.task_name.value,
         kwargs={
             "package_config": dump_package_config(event.package_config),
             "job_config": dump_job_config(job),
             "event": event.get_dict(),
         },
     )
Beispiel #4
0
 def handle_testing_farm(self):
     if (self.copr_build_helper.job_tests and self.copr_event.chroot
             in self.copr_build_helper.tests_targets):
         signature(
             TaskName.testing_farm.value,
             kwargs={
                 "package_config": dump_package_config(self.package_config),
                 "job_config":
                 dump_job_config(self.copr_build_helper.job_tests),
                 "event": self.data.get_dict(),
                 "chroot": self.copr_event.chroot,
                 "build_id": self.build.id,
             },
         ).apply_async()
     else:
         logger.debug("Testing farm not in the job config.")
Beispiel #5
0
    def create_from(cls,
                    success: bool,
                    msg: str,
                    event: Event,
                    job_config: JobConfig = None):
        details = {
            "msg": msg,
            "event": event.get_dict(),
            "package_config": dump_package_config(event.package_config),
        }

        details.update({
            "job": job_config.type.value if job_config else None,
            "job_config": dump_job_config(job_config),
        })

        return cls(success=success, details=details)
    def run(self):
        build_job_helper = CoprBuildJobHelper(
            service_config=self.service_config,
            package_config=self.package_config,
            project=self.project,
            metadata=self.data,
            db_trigger=self.db_trigger,
            job_config=self.job_config,
        )

        if self.copr_event.chroot == "srpm-builds":
            # we don't want to set check for this
            msg = "SRPM build in copr has finished."
            logger.debug(msg)
            return TaskResults(success=True, details={"msg": msg})

        if not self.build:
            # TODO: how could this happen?
            msg = f"Copr build {self.copr_event.build_id} not in CoprBuildDB."
            logger.warning(msg)
            return TaskResults(success=False, details={"msg": msg})
        if self.build.status in [
                PG_COPR_BUILD_STATUS_FAILURE,
                PG_COPR_BUILD_STATUS_SUCCESS,
        ]:
            msg = (f"Copr build {self.copr_event.build_id} is already"
                   f" processed (status={self.copr_event.build.status}).")
            logger.info(msg)
            return TaskResults(success=True, details={"msg": msg})

        end_time = (datetime.utcfromtimestamp(self.copr_event.timestamp)
                    if self.copr_event.timestamp else None)
        self.build.set_end_time(end_time)
        url = get_copr_build_info_url_from_flask(self.build.id)

        # https://pagure.io/copr/copr/blob/master/f/common/copr_common/enums.py#_42
        if self.copr_event.status != COPR_API_SUCC_STATE:
            failed_msg = "RPMs failed to be built."
            build_job_helper.report_status_to_all_for_chroot(
                state=CommitStatus.failure,
                description=failed_msg,
                url=url,
                chroot=self.copr_event.chroot,
            )
            self.build.set_status(PG_COPR_BUILD_STATUS_FAILURE)
            return TaskResults(success=False, details={"msg": failed_msg})

        if (build_job_helper.job_build and build_job_helper.job_build.trigger
                == JobConfigTriggerType.pull_request and self.copr_event.pr_id
                and isinstance(self.project, (GithubProject, GitlabProject))
                and not self.was_last_packit_comment_with_congratulation() and
                self.job_config.notifications.pull_request.successful_build):
            msg = (
                f"Congratulations! One of the builds has completed. :champagne:\n\n"
                ":warning: Please note that our current plans include removal of these "
                "comments in the near future (at least 2 weeks after including this "
                "disclaimer), if you have serious concerns regarding their removal "
                "or would like to continue receiving them please reach out to us. "
                ":warning:\n\n"
                "You can install the built RPMs by following these steps:\n\n"
                "* `sudo yum install -y dnf-plugins-core` on RHEL 8\n"
                "* `sudo dnf install -y dnf-plugins-core` on Fedora\n"
                f"* `dnf copr enable {self.copr_event.owner}/{self.copr_event.project_name}`\n"
                "* And now you can install the packages.\n"
                "\nPlease note that the RPMs should be used only in a testing environment."
            )
            self.project.pr_comment(pr_id=self.copr_event.pr_id, body=msg)

        build_job_helper.report_status_to_build_for_chroot(
            state=CommitStatus.success,
            description="RPMs were built successfully.",
            url=url,
            chroot=self.copr_event.chroot,
        )
        build_job_helper.report_status_to_test_for_chroot(
            state=CommitStatus.pending,
            description="RPMs were built successfully.",
            url=url,
            chroot=self.copr_event.chroot,
        )
        self.build.set_status(PG_COPR_BUILD_STATUS_SUCCESS)

        if (build_job_helper.job_tests
                and self.copr_event.chroot in build_job_helper.tests_targets):
            signature(
                TaskName.testing_farm.value,
                kwargs={
                    "package_config": dump_package_config(self.package_config),
                    "job_config": dump_job_config(build_job_helper.job_tests),
                    "event": self.data.get_dict(),
                    "chroot": self.copr_event.chroot,
                    "build_id": self.build.id,
                },
            ).apply_async()
        else:
            logger.debug("Testing farm not in the job config.")

        return TaskResults(success=True, details={})
Beispiel #7
0
    def run(self):
        build_job_helper = CoprBuildJobHelper(
            service_config=self.service_config,
            package_config=self.package_config,
            project=self.project,
            metadata=self.data,
            db_trigger=self.db_trigger,
            job_config=self.job_config,
            pushgateway=self.pushgateway,
        )

        if self.copr_event.chroot == "srpm-builds":
            # we don't want to set check for this
            msg = "SRPM build in copr has finished."
            logger.debug(msg)
            return TaskResults(success=True, details={"msg": msg})

        if not self.build:
            # TODO: how could this happen?
            msg = f"Copr build {self.copr_event.build_id} not in CoprBuildDB."
            logger.warning(msg)
            return TaskResults(success=False, details={"msg": msg})
        if self.build.status in [
                PG_COPR_BUILD_STATUS_FAILURE,
                PG_COPR_BUILD_STATUS_SUCCESS,
        ]:
            msg = (f"Copr build {self.copr_event.build_id} is already"
                   f" processed (status={self.copr_event.build.status}).")
            logger.info(msg)
            return TaskResults(success=True, details={"msg": msg})

        self.pushgateway.copr_builds_finished.inc()

        # if the build is needed only for test, it doesn't have the task_accepted_time
        if self.build.task_accepted_time:
            copr_build_time = measure_time(end=datetime.now(timezone.utc),
                                           begin=self.build.task_accepted_time)
            self.pushgateway.copr_build_finished_time.observe(copr_build_time)

        end_time = (datetime.utcfromtimestamp(self.copr_event.timestamp)
                    if self.copr_event.timestamp else None)
        self.build.set_end_time(end_time)

        self.set_srpm_url(build_job_helper)

        url = get_copr_build_info_url(self.build.id)

        # https://pagure.io/copr/copr/blob/master/f/common/copr_common/enums.py#_42
        if self.copr_event.status != COPR_API_SUCC_STATE:
            failed_msg = "RPMs failed to be built."
            build_job_helper.report_status_to_all_for_chroot(
                state=BaseCommitStatus.failure,
                description=failed_msg,
                url=url,
                chroot=self.copr_event.chroot,
            )
            self.build.set_status(PG_COPR_BUILD_STATUS_FAILURE)
            return TaskResults(success=False, details={"msg": failed_msg})

        if (build_job_helper.job_build and build_job_helper.job_build.trigger
                == JobConfigTriggerType.pull_request and self.copr_event.pr_id
                and isinstance(self.project, (GithubProject, GitlabProject))
                and not self.was_last_packit_comment_with_congratulation() and
                self.job_config.notifications.pull_request.successful_build):
            msg = (
                f"Congratulations! One of the builds has completed. :champagne:\n\n"
                "You can install the built RPMs by following these steps:\n\n"
                "* `sudo yum install -y dnf-plugins-core` on RHEL 8\n"
                "* `sudo dnf install -y dnf-plugins-core` on Fedora\n"
                f"* `dnf copr enable {self.copr_event.owner}/{self.copr_event.project_name}`\n"
                "* And now you can install the packages.\n"
                "\nPlease note that the RPMs should be used only in a testing environment."
            )
            self.project.get_pr(self.copr_event.pr_id).comment(msg)

        build_job_helper.report_status_to_build_for_chroot(
            state=BaseCommitStatus.success,
            description="RPMs were built successfully.",
            url=url,
            chroot=self.copr_event.chroot,
        )
        build_job_helper.report_status_to_test_for_chroot(
            state=BaseCommitStatus.pending,
            description="RPMs were built successfully.",
            url=url,
            chroot=self.copr_event.chroot,
        )
        self.build.set_status(PG_COPR_BUILD_STATUS_SUCCESS)

        built_packages = build_job_helper.get_built_packages(
            int(self.build.build_id), self.build.target)
        self.build.set_built_packages(built_packages)

        if (build_job_helper.job_tests
                and self.copr_event.chroot in build_job_helper.tests_targets):
            signature(
                TaskName.testing_farm.value,
                kwargs={
                    "package_config": dump_package_config(self.package_config),
                    "job_config": dump_job_config(build_job_helper.job_tests),
                    "event": self.data.get_dict(),
                    "chroot": self.copr_event.chroot,
                    "build_id": self.build.id,
                },
            ).apply_async()
        else:
            logger.debug("Testing farm not in the job config.")

        return TaskResults(success=True, details={})