def process_jobs(self, event: Event) -> Dict[str, HandlerResults]: """ Run a job handler (if trigger matches) for every job defined in config. """ handlers_results = {} package_config = event.get_package_config() if not package_config: # this happens when service receives events for repos which # don't have packit config, this is not an error # success=True - it's not an error that people don't have packit.yaml in their repo handlers_results[event.trigger.value] = HandlerResults( success=True, details={"msg": "No packit config in repo"} ) return handlers_results handler_classes = get_handlers_for_event(event, package_config) if not handler_classes: logger.warning(f"There is no handler for {event.trigger} event.") return handlers_results for handler_kls in handler_classes: job = get_config_for_handler_kls( handler_kls=handler_kls, event=event, package_config=package_config ) # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() github_login = getattr(event, "github_login", None) if github_login and github_login in self.config.admins: logger.info(f"{github_login} is admin, you shall pass") elif not whitelist.check_and_report( event, event.get_project(), config=self.config ): handlers_results[job.type.value] = HandlerResults( success=False, details={"msg": "Account is not whitelisted!"} ) return handlers_results logger.debug(f"Running handler: {str(handler_kls)}") handler = handler_kls(config=self.config, job_config=job, event=event) if handler.pre_check(): handlers_results[job.type.value] = handler.run_n_clean() # don't break here, other handlers may react to the same event return handlers_results
def process_jobs(self, event: Event) -> Dict[str, HandlerResults]: """ Run a job handler (if trigger matches) for every job defined in config. """ handlers_results = {} package_config = event.get_package_config() if not package_config: # this happens when service receives events for repos which # don't have packit config, this is not an error msg = "Failed to obtain package config!" logger.info(msg) handlers_results[event.trigger.value] = HandlerResults( success=False, details={"msg": msg}) return handlers_results for job in package_config.jobs: if event.trigger == job.trigger: handler_kls: Type[JobHandler] = JOB_NAME_HANDLER_MAPPING.get( job.job, None) if not handler_kls: logger.warning(f"There is no handler for job {job}") continue handler = handler_kls(self.config, job, event) try: # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() if not whitelist.check_and_report(event, event.get_project()): handlers_results[job.job.value] = HandlerResults( success=False, details={"msg": "Account is not whitelisted!"}, ) return handlers_results logger.debug(f"Running handler: {str(handler_kls)}") handlers_results[job.job.value] = handler.run() # don't break here, other handlers may react to the same event finally: handler.clean() return handlers_results
def get(self, id: str): """A specific Celery task details""" task = TaskResultModel.get_by_id(id) if not task: return "", HTTPStatus.NO_CONTENT data = task.to_dict() data["event"] = Event.ts2str(data["event"]) return data
def get_processing_results(event: Event, jobs: List[JobConfig], success: bool = True) -> TaskResults: return TaskResults( success=success, details={ "event": event.get_dict(), "package_config": dump_package_config(event.package_config), "matching_jobs": [dump_job_config(job) for job in jobs], }, )
def get(self): """ List all Celery tasks / jobs """ first, last = indices() tasks = [] for task in islice(TaskResultModel.get_all(), first, last): data = task.to_dict() data["event"] = Event.ts2str(data["event"]) tasks.append(data) resp = make_response(dumps(tasks), HTTPStatus.PARTIAL_CONTENT) resp.headers["Content-Range"] = f"tasks {first+1}-{last}/{len(tasks)}" resp.headers["Content-Type"] = "application/json" return resp
def get_signature(cls, event: Event, job: Optional[JobConfig]) -> Signature: """ Get the signature of a Celery task which will run the handler. https://docs.celeryproject.org/en/stable/userguide/canvas.html#signatures :param event: event which triggered the task :param job: job to process """ logger.debug(f"Getting signature of a Celery task {cls.task_name}.") return signature( cls.task_name.value, kwargs={ "package_config": dump_package_config(event.package_config), "job_config": dump_job_config(job), "event": event.get_dict(), }, )
def create_from(cls, success: bool, msg: str, event: Event, job_config: JobConfig = None): details = { "msg": msg, "event": event.get_dict(), "package_config": dump_package_config(event.package_config), } if job_config: details.update({ "job": job_config.type.value, "job_config": dump_job_config(job_config), }) return cls(success=success, details=details)
def test_handler_cleanup(tmpdir): t = Path(tmpdir) t.joinpath("a").mkdir() t.joinpath("b").write_text("a") t.joinpath("c").symlink_to("b") t.joinpath("d").symlink_to("a", target_is_directory=True) t.joinpath("e").symlink_to("nope", target_is_directory=False) t.joinpath("f").symlink_to("nopez", target_is_directory=True) t.joinpath(".g").write_text("g") t.joinpath(".h").symlink_to(".g", target_is_directory=False) c = ServiceConfig() c.command_handler_work_dir = t jc = JobConfig(JobType.copr_build, JobTriggerType.pull_request, {}) j = JobHandler(c, jc, Event(JobTriggerType.pull_request)) j._clean_workplace() assert len(list(t.iterdir())) == 0
def get(self): """ List all Celery tasks / jobs """ first, last = indices() tasks = [] # The db.keys() always returns all matched keys, but there's no better way with redis. # Use islice (instead of [first:last]) to at least create an iterator instead of new list. keys = db.keys("celery-task-meta-*") for key in islice(keys, first, last): data = db.get(key) if data: data = loads(data) event = nested_get(data, "result", "event") if event: # timestamp to datetime string data["result"]["event"] = Event.ts2str(event) tasks.append(data) resp = make_response(dumps(tasks), HTTPStatus.PARTIAL_CONTENT) resp.headers["Content-Range"] = f"tasks {first+1}-{last}/{len(keys)}" resp.headers["Content-Type"] = "application/json" return resp
def test_handler_cleanup(tmpdir, trick_p_s_with_k8s): t = Path(tmpdir) t.joinpath("a").mkdir() t.joinpath("b").write_text("a") t.joinpath("c").symlink_to("b") t.joinpath("d").symlink_to("a", target_is_directory=True) t.joinpath("e").symlink_to("nope", target_is_directory=False) t.joinpath("f").symlink_to("nopez", target_is_directory=True) t.joinpath(".g").write_text("g") t.joinpath(".h").symlink_to(".g", target_is_directory=False) c = ServiceConfig() c.command_handler_work_dir = t jc = JobConfig(type=JobType.copr_build, trigger=JobConfigTriggerType.pull_request, metadata={}) j = JobHandler(config=c, job_config=jc, event=Event(trigger=TheJobTriggerType.pull_request)) j._clean_workplace() assert len(list(t.iterdir())) == 0
def get(self): """List all Whitelisted FAS accounts""" return [Event.ts2str(event) for event in db.get_all().values()]
def get(self): """List all Github App installations""" return [ Event.ts2str(i["event_data"]) for i in Installation.db().get_all().values() ]