示例#1
0
    def process_message(self,
                        event: dict,
                        topic: str = None,
                        source: str = None) -> List[TaskResults]:
        """
        Entrypoint for message processing.

        :param event:  dict with webhook/fed-mes payload
        :param topic:  meant to be a topic provided by messaging subsystem (fedmsg, mqqt)
        :param source: source of message
        """

        if topic:
            # TODO: Check if we really use it.
            #  Ideally, we don't want to mix implementation and events
            #  (topics are related to events).
            # let's pre-filter messages: we don't need to get debug logs from processing
            # messages when we know beforehand that we are not interested in messages for such topic
            topics = [
                getattr(handler, "topic", None)
                for handler in JobHandler.get_all_subclasses()
            ]

            if topic not in topics:
                logger.debug(f"{topic} not in {topics}")
                return []

        event_object: Any
        if source == "centosmsg":
            event_object = CentosEventParser().parse_event(event)
        else:
            event_object = Parser.parse_event(event)

        if not (event_object and event_object.pre_check()):
            return []

        # CoprBuildEvent.get_project returns None when the build id is not known
        if not event_object.project:
            logger.warning("Cannot obtain project from this event! "
                           "Skipping private repository check!")
        elif event_object.project.is_private():
            service_with_namespace = (
                f"{event_object.project.service.hostname}/"
                f"{event_object.project.namespace}")
            if (service_with_namespace
                    not in self.service_config.enabled_private_namespaces):
                logger.info(
                    f"We do not interact with private repositories by default. "
                    f"Add `{service_with_namespace}` to the `enabled_private_namespaces` "
                    f"in the service configuration.")
                return []
            logger.debug(f"Working in `{service_with_namespace}` namespace "
                         f"which is private but enabled via configuration.")

        handler: Union[GithubAppInstallationHandler, TestingFarmResultsHandler,
                       CoprBuildStartHandler, CoprBuildEndHandler, ]
        processing_results = None

        # Bugzilla handler is run even the job is not configured in a package.
        # This's not in the condition below because we want to run process_jobs() as well.
        if isinstance(event_object, MergeRequestGitlabEvent):
            BugzillaHandler.get_signature(
                event=event_object,
                job=None,
            ).apply_async()

        # installation is handled differently b/c app is installed to GitHub account
        # not repository, so package config with jobs is missing
        if isinstance(event_object, InstallationEvent):
            GithubAppInstallationHandler.get_signature(event=event_object,
                                                       job=None).apply_async()
        else:
            # Processing the jobs from the config.
            processing_results = self.process_jobs(event_object)

        if processing_results is None:
            processing_results = [
                TaskResults.create_from(
                    success=True,
                    msg="Job created.",
                    job_config=None,
                    event=event_object,
                )
            ]

        return processing_results
示例#2
0
    def process_jobs(self, event: Event) -> List[TaskResults]:
        """
        Create a Celery task for a job handler (if trigger matches) for every job defined in config.
        """

        if isinstance(
                event,
            (PushGitHubEvent, PushGitlabEvent,
             PushPagureEvent)) and event.commit_sha.startswith("0000000"):
            return [
                TaskResults.create_from(
                    success=True,
                    msg="Triggered by deleting a branch",
                    job_config=None,
                    event=event,
                )
            ]

        if not event.package_config:
            # this happens when service receives events for repos which don't have packit config
            # success=True - it's not an error that people don't have packit.yaml in their repo
            return [
                TaskResults.create_from(
                    success=True,
                    msg="No packit config found in the repository.",
                    job_config=None,
                    event=event,
                )
            ]

        if isinstance(
                event,
            (
                PullRequestCommentGithubEvent,
                PullRequestCommentPagureEvent,
                IssueCommentEvent,
                MergeRequestCommentGitlabEvent,
                IssueCommentGitlabEvent,
            ),
        ):
            if not event.project.can_merge_pr(event.user_login):
                logger.debug(
                    f"User {event.user_login} not allowed to trigger packit via comments."
                )
                return [
                    TaskResults.create_from(
                        success=True,
                        msg=
                        f"User {event.user_login} not allowed to trigger packit via comments.",
                        job_config=None,
                        event=event,
                    )
                ]

        handler_classes = get_handlers_for_event(event, event.package_config)

        if not handler_classes:
            logger.debug(
                f"There is no handler for {event} event suitable for the configuration."
            )
            return []

        job_configs = []
        for handler_kls in handler_classes:
            # TODO: merge to to get_handlers_for_event so
            # so we don't need to go through the similar process twice.
            job_configs = get_config_for_handler_kls(
                handler_kls=handler_kls,
                event=event,
                package_config=event.package_config,
            )
            # check whitelist approval for every job to be able to track down which jobs
            # failed because of missing whitelist approval
            whitelist = Whitelist()
            user_login = getattr(event, "user_login", None)
            if user_login and user_login in self.service_config.admins:
                logger.info(f"{user_login} is admin, you shall pass.")
            elif not whitelist.check_and_report(
                    event,
                    event.project,
                    service_config=self.service_config,
                    job_configs=job_configs,
            ):
                processing_results = []
                for job_config in job_configs:
                    processing_results.append(
                        TaskResults.create_from(
                            success=False,
                            msg="Account is not whitelisted!",
                            job_config=job_config,
                            event=event,
                        ))
                return processing_results

            # we want to run handlers for all possible jobs, not just the first one
            signatures = [
                handler_kls.get_signature(event=event, job=job_config)
                for job_config in job_configs
            ]
            # https://docs.celeryproject.org/en/stable/userguide/canvas.html#groups
            group(signatures).apply_async()

        processing_results = []
        for job_config in job_configs:
            processing_results.append(
                TaskResults.create_from(
                    success=True,
                    msg="Job created.",
                    job_config=job_config,
                    event=event,
                ))
        return processing_results
示例#3
0
    def process_jobs(self, event: Event) -> List[TaskResults]:
        """
        Create a Celery task for a job handler (if trigger matches) for every job defined in config.
        """
        if isinstance(
                event,
                AbstractCommentEvent) and get_packit_commands_from_comment(
                    event.comment):
            # we require packit config file when event is triggered by /packit command
            event.fail_when_config_file_missing = True

        if not event.package_config:
            # this happens when service receives events for repos which don't have packit config
            # success=True - it's not an error that people don't have packit.yaml in their repo
            return [
                TaskResults.create_from(
                    success=True,
                    msg="No packit config found in the repository.",
                    job_config=None,
                    event=event,
                )
            ]

        handler_classes = get_handlers_for_event(event, event.package_config)

        if not handler_classes:
            logger.debug(
                f"There is no handler for {event} event suitable for the configuration."
            )
            return []

        allowlist = Allowlist()
        processing_results: List[TaskResults] = []

        for handler_kls in handler_classes:
            # TODO: merge to to get_handlers_for_event so
            # so we don't need to go through the similar process twice.
            job_configs = get_config_for_handler_kls(
                handler_kls=handler_kls,
                event=event,
                package_config=event.package_config,
            )

            # check allowlist approval for every job to be able to track down which jobs
            # failed because of missing allowlist approval
            if not allowlist.check_and_report(
                    event,
                    event.project,
                    service_config=self.service_config,
                    job_configs=job_configs,
            ):
                processing_results = []
                for job_config in job_configs:
                    processing_results.append(
                        TaskResults.create_from(
                            success=False,
                            msg="Account is not allowlisted!",
                            job_config=job_config,
                            event=event,
                        ))
                return processing_results

            signatures = []

            # we want to run handlers for all possible jobs, not just the first one
            for job_config in job_configs:
                handler = handler_kls(
                    package_config=event.package_config,
                    job_config=job_config,
                    event=event.get_dict(),
                )
                if not handler.pre_check():
                    continue

                if event.actor and not handler.check_if_actor_can_run_job_and_report(
                        actor=event.actor):
                    # For external contributors, we need to be more careful when running jobs.
                    # This is a handler-specific permission check
                    # for a user who trigger the action on a PR.
                    # e.g. We don't allow using internal TF for external contributors.
                    continue

                if isinstance(
                        handler,
                    (CoprBuildHandler, KojiBuildHandler, TestingFarmHandler)):
                    helper = (CoprBuildJobHelper if isinstance(
                        handler, (CoprBuildHandler,
                                  TestingFarmHandler)) else KojiBuildJobHelper)

                    job_helper = helper(
                        service_config=self.service_config,
                        package_config=event.package_config,
                        project=event.project,
                        metadata=EventData.from_event_dict(event.get_dict()),
                        db_trigger=event.db_trigger,
                        job_config=job_config,
                        targets_override=event.targets_override,
                    )

                    reporting_method = (job_helper.report_status_to_tests if
                                        isinstance(handler, TestingFarmHandler)
                                        else job_helper.report_status_to_build)

                    reporting_method(
                        description=TASK_ACCEPTED,
                        state=BaseCommitStatus.pending,
                        url="",
                    )
                    push_initial_metrics(event, handler,
                                         len(job_helper.build_targets))

                signatures.append(
                    handler_kls.get_signature(event=event, job=job_config))
                processing_results.append(
                    TaskResults.create_from(
                        success=True,
                        msg="Job created.",
                        job_config=job_config,
                        event=event,
                    ))
            # https://docs.celeryproject.org/en/stable/userguide/canvas.html#groups
            group(signatures).apply_async()

        return processing_results