def run(self) -> HandlerResults: # self.project is dist-git, we need to get upstream dg = DistGit(self.config, self.package_config) self.package_config.upstream_project_url = ( dg.get_project_url_from_distgit_spec() ) if not self.package_config.upstream_project_url: return HandlerResults( success=False, details={ "msg": "URL in specfile is not set. " "We don't know where the upstream project lives." }, ) n, r = get_namespace_and_repo_name(self.package_config.upstream_project_url) up = self.project.service.get_project(repo=r, namespace=n) self.local_project = LocalProject( git_project=up, working_dir=self.config.command_handler_work_dir ) self.api = PackitAPI(self.config, self.package_config, self.local_project) self.api.sync_from_downstream( # rev is a commit # we use branch on purpose so we get the latest thing # TODO: check if rev is HEAD on {branch}, warn then? dist_git_branch=self.distgit_event.branch, upstream_branch="master", # TODO: this should be configurable ) return HandlerResults(success=True, details={})
def run(self): if self.event.chroot == "srpm-builds": # we don't want to set the check status for this msg = "SRPM build in copr has started" logger.debug(msg) return HandlerResults(success=True, details={"msg": msg}) # TODO: drop the code below once we move to PG completely; the build is present in event # pg build = CoprBuildModel.get_by_build_id(str(self.event.build_id), self.event.chroot) if not build: msg = f"Copr build {self.event.build_id} not in CoprBuildDB" logger.warning(msg) return HandlerResults(success=False, details={"msg": msg}) url = get_log_url(build.id) build.set_status("pending") copr_build_logs = get_copr_build_logs_url(self.event) build.set_build_logs_url(copr_build_logs) self.build_job_helper.report_status_to_all_for_chroot( description="RPM build has started...", state=CommitStatus.pending, url=url, chroot=self.event.chroot, ) msg = f"Build on {self.event.chroot} in copr has started..." return HandlerResults(success=True, details={"msg": msg})
def run(self) -> HandlerResults: testing_farm_helper = TestingFarmJobHelper( config=self.config, package_config=self.event.package_config, project=self.event.project, event=self.event, job=self.job, ) user_can_merge_pr = self.event.project.can_merge_pr( self.event.user_login) if not (user_can_merge_pr or self.event.user_login in self.config.admins): self.event.project.pr_comment(self.event.pr_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) handler_results = HandlerResults(success=True, details={}) logger.debug(f"Test job config: {testing_farm_helper.job_tests}") if testing_farm_helper.job_tests: testing_farm_helper.run_testing_farm_on_all() else: logger.debug("Testing farm not in the job config.") return handler_results
def run_copr_build(self) -> HandlerResults: if not (self.job_build or self.job_tests): msg = "No copr_build or tests job defined." # we can't report it to end-user at this stage return HandlerResults(success=False, details={"msg": msg}) self.report_status_to_all(description="Building SRPM ...", state=CommitStatus.pending) build_metadata = self._run_copr_build_and_save_output() srpm_build_model = SRPMBuild.create(build_metadata.srpm_logs) if build_metadata.srpm_failed: msg = "SRPM build failed, check the logs for details." self.report_status_to_all( state=CommitStatus.failure, description=msg, url=get_srpm_log_url(srpm_build_model.id), ) return HandlerResults(success=False, details={"msg": msg}) for chroot in self.build_chroots: copr_build = CoprBuild.get_or_create( pr_id=self.pr_id, build_id=str(build_metadata.copr_build_id), commit_sha=self.event.commit_sha, repo_name=self.project.repo, namespace=self.project.namespace, project_name=self.job_project, owner=self.job_owner, web_url=build_metadata.copr_web_url, target=chroot, status="pending", srpm_build=srpm_build_model, ) url = get_log_url(id_=copr_build.id) self.report_status_to_all_for_chroot( state=CommitStatus.pending, description="Building RPM ...", url=url, chroot=chroot, ) self.copr_build_model.build_id = build_metadata.copr_build_id self.copr_build_model.save() # release the hounds! celery_app.send_task( "task.babysit_copr_build", args=(build_metadata.copr_build_id, ), countdown=120, # do the first check in 120s ) return HandlerResults(success=True, details={})
def process_jobs(self, event: Event) -> Dict[str, HandlerResults]: """ Run a job handler (if trigger matches) for every job defined in config. """ handlers_results = {} if not event.package_config: # this happens when service receives events for repos which # don't have packit config, this is not an error # success=True - it's not an error that people don't have packit.yaml in their repo handlers_results[event.trigger.value] = HandlerResults( success=True, details={"msg": "No packit config in repo"} ) return handlers_results handler_classes = get_handlers_for_event(event, event.package_config) if not handler_classes: logger.warning(f"There is no handler for {event.trigger} event.") return handlers_results for handler_kls in handler_classes: job_configs = get_config_for_handler_kls( handler_kls=handler_kls, event=event, package_config=event.package_config, ) # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() user_login = getattr(event, "user_login", None) if user_login and user_login in self.config.admins: logger.info(f"{user_login} is admin, you shall pass.") elif not whitelist.check_and_report( event, event.project, config=self.config ): for job_config in job_configs: handlers_results[job_config.type.value] = HandlerResults( success=False, details={"msg": "Account is not whitelisted!"} ) return handlers_results # we want to run handlers for all possible jobs, not just the first one for job_config in job_configs: logger.debug(f"Running handler: {str(handler_kls)} for {job_config}") handler = handler_kls( config=self.config, job_config=job_config, event=event ) if handler.pre_check(): current_time = datetime.datetime.now().strftime(DATETIME_FORMAT) result_key = f"{job_config.type.value}-{current_time}" handlers_results[result_key] = handler.run_n_clean() return handlers_results
def run(self) -> HandlerResults: local_project = LocalProject( git_project=self.event.project, working_dir=self.config.command_handler_work_dir, ) api = PackitAPI( config=self.config, package_config=self.event.package_config, upstream_local_project=local_project, ) user_can_merge_pr = self.event.project.can_merge_pr( self.event.user_login) if not (user_can_merge_pr or self.event.user_login in self.config.admins): self.event.project.issue_comment(self.event.issue_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) if not self.event.tag_name: msg = ( "There was an error while proposing a new update for the Fedora package: " "no upstream release found.") self.event.project.issue_comment(self.event.issue_id, msg) return HandlerResults(success=False, details={"msg": "Propose update failed"}) sync_failed = False for branch in self.dist_git_branches_to_sync: msg = ( f"for the Fedora package `{self.event.package_config.downstream_package_name}`" f"with the tag `{self.event.tag_name}` in the `{branch}` branch.\n" ) try: new_pr = api.sync_release(dist_git_branch=branch, version=self.event.tag_name, create_pr=True) msg = f"Packit-as-a-Service proposed [a new update]({new_pr.url}) {msg}" self.event.project.issue_comment(self.event.issue_id, msg) except PackitException as ex: msg = f"There was an error while proposing a new update {msg} Traceback is: `{ex}`" self.event.project.issue_comment(self.event.issue_id, msg) logger.error(f"Error while running a build: {ex}") sync_failed = True if sync_failed: return HandlerResults(success=False, details={"msg": "Propose update failed"}) # Close issue if propose-update was successful in all branches self.event.project.issue_close(self.event.issue_id) return HandlerResults(success=True, details={})
def run(self) -> HandlerResults: """ Sync the upstream release to dist-git as a pull request. """ self.local_project = LocalProject( git_project=self.event.project, working_dir=self.config.command_handler_work_dir, ) self.api = PackitAPI(self.config, self.event.package_config, self.local_project) errors = {} for branch in get_branches(*self.job_config.metadata.dist_git_branches, default="master"): try: self.api.sync_release(dist_git_branch=branch, version=self.event.tag_name) except Exception as ex: sentry_integration.send_to_sentry(ex) errors[branch] = str(ex) if errors: branch_errors = "" for branch, err in sorted( errors.items(), key=lambda branch_error: branch_error[0]): err_without_new_lines = err.replace("\n", " ") branch_errors += f"| `{branch}` | `{err_without_new_lines}` |\n" body_msg = ( f"Packit failed on creating pull-requests in dist-git:\n\n" f"| dist-git branch | error |\n" f"| --------------- | ----- |\n" f"{branch_errors}\n\n" "You can re-trigger the update by adding `/packit propose-update`" " to the issue comment.\n") self.event.project.create_issue( title= f"[packit] Propose update failed for release {self.event.tag_name}", body=body_msg, ) return HandlerResults( success=False, details={ "msg": "Propose update failed.", "errors": errors }, ) return HandlerResults(success=True, details={})
def process_jobs(self, event: Event) -> Dict[str, HandlerResults]: """ Run a job handler (if trigger matches) for every job defined in config. """ handlers_results = {} package_config = event.get_package_config() if not package_config: # this happens when service receives events for repos which # don't have packit config, this is not an error # success=True - it's not an error that people don't have packit.yaml in their repo handlers_results[event.trigger.value] = HandlerResults( success=True, details={"msg": "No packit config in repo"} ) return handlers_results handler_classes = get_handlers_for_event(event, package_config) if not handler_classes: logger.warning(f"There is no handler for {event.trigger} event.") return handlers_results for handler_kls in handler_classes: job = get_config_for_handler_kls( handler_kls=handler_kls, event=event, package_config=package_config ) # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() github_login = getattr(event, "github_login", None) if github_login and github_login in self.config.admins: logger.info(f"{github_login} is admin, you shall pass") elif not whitelist.check_and_report( event, event.get_project(), config=self.config ): handlers_results[job.type.value] = HandlerResults( success=False, details={"msg": "Account is not whitelisted!"} ) return handlers_results logger.debug(f"Running handler: {str(handler_kls)}") handler = handler_kls(config=self.config, job_config=job, event=event) if handler.pre_check(): handlers_results[job.type.value] = handler.run_n_clean() # don't break here, other handlers may react to the same event return handlers_results
def run_testing_farm_on_all(self): failed = {} for chroot in self.tests_chroots: result = self.run_testing_farm(chroot) if not result["success"]: failed[chroot] = result.get("details") if not failed: return HandlerResults(success=True, details={}) return HandlerResults( success=False, details={ "msg": f"Failed testing farm targets: '{failed.keys()}'." }.update(failed), )
def run(self) -> HandlerResults: """ Discover information about organization/user which wants to install packit on his repository Try to whitelist automatically if mapping from github username to FAS account can prove that user is a packager. :return: HandlerResults """ InstallationModel.create(event=self.event) # try to add user to whitelist whitelist = Whitelist( fas_user=self.config.fas_user, fas_password=self.config.fas_password, ) account_login = self.event.account_login account_type = self.event.account_type if not whitelist.add_account(self.event): # Create an issue in our repository, so we are notified when someone install the app self.project.create_issue( title=f"{account_type} {account_login} needs to be approved.", body= (f"Hi @{self.event.sender_login}, we need to approve you in " "order to start using Packit-as-a-Service. Someone from our team will " "get back to you shortly.\n\n" "For more info, please check out the documentation: " "http://packit.dev/packit-as-a-service/"), ) msg = f"{account_type} {account_login} needs to be approved manually!" else: msg = f"{account_type} {account_login} whitelisted!" logger.info(msg) return HandlerResults(success=True, details={"msg": msg})
def test_issue_comment_propose_update_handler( mock_issue_comment_functionality, issue_comment_propose_update_event): flexmock(PackitAPI).should_receive("sync_release").and_return( HandlerResults(success=True, details={})) flexmock(SteveJobs, _is_private=False) results = SteveJobs().process_message(issue_comment_propose_update_event) assert results["jobs"]["pull_request_action"]["success"]
def run(self) -> HandlerResults: logger.debug(f"Received testing-farm result:\n{self.event.result}") logger.debug( f"Received testing-farm test results:\n{self.event.tests}") if self.event.result == TestingFarmResult.passed: status = CommitStatus.success passed = True else: status = CommitStatus.failure passed = False if (len(self.event.tests) == 1 and self.event.tests[0].name == "/install/copr-build"): logger.debug("No-fmf scenario discovered.") short_msg = "Installation passed" if passed else "Installation failed" else: short_msg = self.event.message status_reporter = StatusReporter(self.project, self.event.commit_sha) status_reporter.report( state=status, description=short_msg, url=self.event.log_url, check_names=TestingFarmJobHelper.get_test_check( self.event.copr_chroot), ) return HandlerResults(success=True, details={})
def process_comment_jobs( self, event: Union[PullRequestCommentEvent, IssueCommentEvent] ) -> HandlerResults: msg = f"comment '{event.comment}'" packit_command, pr_comment_error_msg = self.find_packit_command( str(event.comment)) if pr_comment_error_msg: return HandlerResults( success=True, details={"msg": pr_comment_error_msg}, ) # packit has command `copr-build`. But PullRequestCommentAction has enum `copr_build`. try: packit_action = CommentAction[packit_command[0].replace("-", "_")] except KeyError: return HandlerResults( success=True, details={ "msg": f"{msg} does not contain a valid packit-service command." }, ) handler_kls: Type[ CommentActionHandler] = MAP_COMMENT_ACTION_TO_HANDLER.get( packit_action, None) if not handler_kls: return HandlerResults( success=True, details={"msg": f"{msg} is not a packit-service command."}) # check whitelist approval for every job to be able to track down which jobs # failed because of missing whitelist approval whitelist = Whitelist() github_login = getattr(event, "github_login", None) if github_login and github_login in self.config.admins: logger.info(f"{github_login} is admin, you shall pass") elif not whitelist.check_and_report( event, event.get_project(), config=self.config): return HandlerResults( success=True, details={"msg": "Account is not whitelisted!"}) handler_instance: Handler = handler_kls(config=self.config, event=event) return handler_instance.run_n_clean()
def run(self) -> HandlerResults: self.local_project = LocalProject( git_project=self.project, working_dir=self.config.command_handler_work_dir) self.api = PackitAPI(self.config, self.package_config, self.local_project) collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: msg = "Only collaborators can trigger Packit-as-a-Service" self.project.issue_comment(self.event.issue_id, msg) return HandlerResults(success=True, details={"msg": msg}) if not self.event.tag_name: msg = ( "There was an error while proposing a new update for the Fedora package: " "no upstream release found.") self.project.issue_comment(self.event.issue_id, msg) return HandlerResults(success=False, details={"msg": "Propose update failed"}) sync_failed = False for branch in self.dist_git_branches_to_sync: msg = ( f"a new update for the Fedora package " f"`{self.package_config.downstream_package_name}`" f"with the tag `{self.event.tag_name}` in the `{branch}` branch.\n" ) try: self.api.sync_release(dist_git_branch=branch, version=self.event.tag_name) msg = f"Packit-as-a-Service proposed {msg}" self.project.issue_comment(self.event.issue_id, msg) except PackitException as ex: msg = f"There was an error while proposing {msg} Traceback is: `{ex}`" self.project.issue_comment(self.event.issue_id, msg) logger.error(f"error while running a build: {ex}") sync_failed = True if sync_failed: return HandlerResults(success=False, details={"msg": "Propose update failed"}) # Close issue if propose-update was successful in all branches self.project.issue_close(self.event.issue_id) return HandlerResults(success=True, details={})
def _process_timeout(self): msg = f"You have reached 10-minute timeout while creating SRPM. {self.msg_retrigger}" self.project.pr_comment(self.event.pr_id, msg) msg = "Timeout reached while creating a SRPM." self.report_status_to_all( state="error", description=msg, ) return HandlerResults(success=False, details={"msg": msg})
def run(self) -> HandlerResults: collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: msg = "Only collaborators can trigger Packit-as-a-Service" self.project.pr_comment(self.event.pr_id, msg) return HandlerResults(success=True, details={"msg": msg}) handler_results = HandlerResults(success=True, details={}) logger.debug(f"Test job config: {self.testing_farm_helper.job_tests}") if self.testing_farm_helper.job_tests: self.testing_farm_helper.run_testing_farm_on_all() else: logger.debug("Testing farm not in the job config.") return handler_results
def run(self): build_job_helper = CoprBuildJobHelper( config=self.config, package_config=self.event.package_config, project=self.event.project, event=self.event, ) if self.event.chroot == "srpm-builds": # we don't want to set the check status for this msg = "SRPM build in copr has started." logger.debug(msg) return HandlerResults(success=True, details={"msg": msg}) # TODO: drop the code below once we move to PG completely; the build is present in event # pg build = CoprBuildModel.get_by_build_id( str(self.event.build_id), self.event.chroot ) if not build: msg = f"Copr build {self.event.build_id} not in CoprBuildDB." logger.warning(msg) return HandlerResults(success=False, details={"msg": msg}) start_time = ( datetime.utcfromtimestamp(self.event.timestamp) if self.event.timestamp else None ) build.set_start_time(start_time) url = get_copr_build_log_url_from_flask(build.id) build.set_status("pending") copr_build_logs = get_copr_build_logs_url(self.event) build.set_build_logs_url(copr_build_logs) build_job_helper.report_status_to_all_for_chroot( description="RPM build is in progress...", state=CommitStatus.pending, url=url, chroot=self.event.chroot, ) msg = f"Build on {self.event.chroot} in copr has started..." return HandlerResults(success=True, details={"msg": msg})
def run(self) -> HandlerResults: collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: self.project.pr_comment(self.event.pr_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) handler_results = HandlerResults(success=True, details={}) logger.debug(f"Test job config: {self.testing_farm_helper.job_tests}") if self.testing_farm_helper.job_tests: self.testing_farm_helper.run_testing_farm_on_all() else: logger.debug("Testing farm not in the job config.") return handler_results
def test_pr_comment_build_handler(mock_pr_comment_functionality, pr_build_comment_event): flexmock(CoprBuildJobHelper).should_receive("run_copr_build").and_return( HandlerResults(success=True, details={})) (flexmock(GithubProject).should_receive("can_merge_pr").with_args( "phracek").and_return(True).once()) flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) results = SteveJobs().process_message(pr_build_comment_event) for value in results["jobs"].values(): assert value["success"]
def run(self) -> HandlerResults: e = self.event logger.debug( f"Handling labels/tags {e.labels} {e.action.value} to Pagure PR " f"{e.base_repo_owner}/{e.base_repo_namespace}/{e.base_repo_name}/{e.identifier}" ) if e.labels.intersection(self.config.pr_accepted_labels): logger.debug(f"About to create a bug @ {self.config.bugzilla_url}") else: logger.debug(f"We accept only {self.config.pr_accepted_labels} labels/tags") return HandlerResults(success=True)
def run(self): if self.event.chroot == "srpm-builds": # we don't want to set the check status for this msg = "SRPM build in copr has started" logger.debug(msg) return HandlerResults(success=True, details={"msg": msg}) # TODO: drop the code below once we move to PG completely; the build is present in event # pg build_pg = CoprBuild.get_by_build_id( str(self.event.build_id), self.event.chroot ) if not build_pg: logger.info( f"build {self.event.build_id} is not in pg, falling back to redis" ) # redis - old school build = CoprBuildDB().get_build(self.event.build_id) if not build: # TODO: how could this happen? msg = f"Copr build {self.event.build_id} not in CoprBuildDB" logger.warning(msg) return HandlerResults(success=False, details={"msg": msg}) if build_pg: url = get_log_url(build_pg.id) build_pg.set_status("pending") copr_build_logs = get_copr_build_logs_url(self.event) build_pg.set_build_logs_url(copr_build_logs) else: url = copr_url_from_event(self.event) self.build_job_helper.report_status_to_all_for_chroot( description="RPM build has started...", state=CommitStatus.pending, url=url, chroot=self.event.chroot, ) msg = f"Build on {self.event.chroot} in copr has started..." return HandlerResults(success=True, details={"msg": msg})
def run(self) -> HandlerResults: collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: msg = "Only collaborators can trigger Packit-as-a-Service" self.project.pr_comment(self.event.pr_id, msg) return HandlerResults(success=True, details={"msg": msg}) cbh = CoprBuildJobHelper(self.config, self.package_config, self.project, self.event) handler_results = cbh.run_copr_build() return handler_results
def run(self) -> HandlerResults: if isinstance(self.event, PullRequestEvent): collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: self.koji_build_helper.report_status_to_all( description=PERMISSIONS_ERROR_WRITE_OR_ADMIN, state=CommitStatus.failure, ) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) return super().run()
def _process_failed_srpm_build(self, ex): sentry_integration.send_to_sentry(ex) msg = ( f"There was an error while creating SRPM. {self.msg_retrigger}\n" "\nOutput:" "\n```\n" f"{ex}" "\n```" ) self.project.pr_comment(self.event.pr_id, msg) short_msg = "Failed to create SRPM." self.report_status_to_all(description=short_msg, state="error") return HandlerResults(success=False, details={"msg": short_msg})
def handle_pull_request(self): collaborators = self.project.who_can_merge_pr() cbh = CoprBuildJobHelper(self.config, self.package_config, self.project, self.event) if self.event.github_login not in collaborators | self.config.admins: msg = "Only collaborators can trigger Packit-as-a-Service" cbh.report_status_to_all("failure", msg) return HandlerResults(success=False, details={"msg": msg}) handler_results = cbh.run_copr_build() return handler_results
def run(self) -> HandlerResults: is_copr_build: Callable[ [JobConfig], bool] = lambda job: job.job == JobType.copr_build if self.job.job == JobType.tests and any( filter(is_copr_build, self.package_config.jobs)): return HandlerResults( success=False, details={ "msg": "Skipping build for testing. The COPR build is defined in the config." }, ) if self.event.trigger == JobTriggerType.pull_request: return self.handle_pull_request() # We do not support this workflow officially # elif self.triggered_by == JobTriggerType.release: # self.handle_release() else: return HandlerResults( success=False, details={"msg": f"No handler for {str(self.event.trigger)}"}, )
def run(self) -> HandlerResults: self.local_project = LocalProject( git_project=self.project, working_dir=self.config.command_handler_work_dir) self.api = PackitAPI(self.config, self.package_config, self.local_project) self.api.sync_pr( pr_id=self.pr_event.pr_id, dist_git_branch=self.job.metadata.get("dist-git-branch", "master"), # TODO: figure out top upstream commit for source-git here ) return HandlerResults(success=True, details={})
def _process_general_exception(self, ex): sentry_integration.send_to_sentry(ex) msg = f"There was an error while running a copr build:\n```\n{ex}\n```\n" logger.error(msg) self.project.pr_comment(self.event.pr_id, f"{msg}\n{self.msg_retrigger}") self.report_status_to_build( state="failure", description="Build failed, check latest comment for details.", ) self.report_status_to_tests( state="error", description="Build failed, check latest comment for details.", ) return HandlerResults(success=False, details={"msg": msg})
def run(self) -> HandlerResults: collaborators = self.project.who_can_merge_pr() if self.event.github_login not in collaborators | self.config.admins: self.project.pr_comment(self.event.pr_id, PERMISSIONS_ERROR_WRITE_OR_ADMIN) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) cbh = CoprBuildJobHelper(self.config, self.package_config, self.project, self.event) handler_results = cbh.run_copr_build() return handler_results
def run(self) -> HandlerResults: if isinstance(self.event, PullRequestGithubEvent): user_can_merge_pr = self.event.project.can_merge_pr( self.event.user_login) if not (user_can_merge_pr or self.event.user_login in self.config.admins): self.koji_build_helper.report_status_to_all( description=PERMISSIONS_ERROR_WRITE_OR_ADMIN, state=CommitStatus.failure, ) return HandlerResults( success=True, details={"msg": PERMISSIONS_ERROR_WRITE_OR_ADMIN}) return super().run()