def get_flask_application(): # Sentry does not work in the service for now # SENTRY_SECRET is not passed to the service pod or container # https://github.com/packit-service/deployment/blob/master/openshift/packit-service.yml.j2 # configure_sentry( # runner_type="packit-service", # celery_integration=True, # sqlalchemy_integration=True, # flask_integration=True, # ) app = Flask(__name__) app.register_blueprint(blueprint) app.register_blueprint(builds_blueprint) s = ServiceConfig.get_service_config() # https://flask.palletsprojects.com/en/1.1.x/config/#SERVER_NAME # also needs to contain port if it's not 443 app.config["SERVER_NAME"] = s.server_name app.config["PREFERRED_URL_SCHEME"] = "https" if getenv("DEPLOYMENT") in ("dev", "stg"): app.config["DEBUG"] = True app.logger.setLevel(logging.DEBUG) logger = logging.getLogger("packit_service") logger.info( f"server name = {s.server_name}, all HTTP requests need to use this URL!" ) log_service_versions() # no need to thank me, just buy me a beer logger.debug(f"URL map = {app.url_map}") return app
def test_copr_build_check_names(pull_request_event): flexmock(AddPullRequestDbTrigger).should_receive("db_trigger").and_return( flexmock(job_config_trigger_type=JobConfigTriggerType.release) ) helper = build_helper( event=pull_request_event, metadata=JobMetadataConfig(targets=["bright-future-x86_64"], owner="nobody"), ) flexmock(StatusReporter).should_receive("set_status").with_args( state=CommitStatus.pending, description="Building SRPM ...", check_name="packit-stg/rpm-build-bright-future-x86_64", url="", ).and_return() flexmock(StatusReporter).should_receive("set_status").with_args( state=CommitStatus.pending, description="Building RPM ...", check_name="packit-stg/rpm-build-bright-future-x86_64", url="https://localhost:5000/copr-build/1/logs", ).and_return() flexmock(GitProject).should_receive("set_commit_status").and_return().never() flexmock(SRPMBuildModel).should_receive("create").and_return(SRPMBuildModel()) flexmock(CoprBuildModel).should_receive("get_or_create").and_return( CoprBuildModel(id=1) ) flexmock(PullRequestEvent).should_receive("db_trigger").and_return(flexmock()) flexmock(PackitAPI).should_receive("run_copr_build").and_return(1, None) flexmock(Celery).should_receive("send_task").once() config = ServiceConfig.get_service_config() config.disable_sentry = True assert helper.run_copr_build()["success"]
def get_project(self) -> Optional[GitProject]: if not (self.project_url or self.db_trigger): return None return ServiceConfig.get_service_config().get_project( url=self.project_url or self.db_trigger.project.project_url )
def get_test_check(cls, chroot: str = None) -> str: config = ServiceConfig.get_service_config() deployment_str = ( "packit" if config.deployment == Deployment.prod else "packit-stg" ) chroot_str = f"-{chroot}" if chroot else "" return f"{deployment_str}/{cls.status_name_test}{chroot_str}"
def get_flask_application(): configure_sentry( runner_type="packit-service", celery_integration=True, sqlalchemy_integration=True, flask_integration=True, ) app = Flask(__name__) app.register_blueprint(blueprint) app.register_blueprint(builds_blueprint) s = ServiceConfig.get_service_config() # https://flask.palletsprojects.com/en/1.1.x/config/#SERVER_NAME # also needs to contain port if it's not 443 app.config["SERVER_NAME"] = s.server_name app.config["PREFERRED_URL_SCHEME"] = "https" if getenv("DEPLOYMENT") in ("dev", "stg"): app.config["DEBUG"] = True app.logger.setLevel(logging.DEBUG) logger = logging.getLogger("packit_service") logger.info( f"server name = {s.server_name}, all HTTP requests need to use this URL!" ) log_service_versions() # no need to thank me, just buy me a beer logger.debug(f"URL map = {app.url_map}") return app
def get_request_details(cls, request_id: str) -> Dict[str, Any]: """Testing Farm sends only request/pipeline id in a notification. We need to get more details ourselves.""" self = cls( service_config=ServiceConfig.get_service_config(), package_config=PackageConfig(), project=None, metadata=None, db_trigger=None, job_config=JobConfig( # dummy values to be able to construct the object type=JobType.tests, trigger=JobConfigTriggerType.pull_request, ), ) response = self.send_testing_farm_request( endpoint=f"requests/{request_id}", method="GET" ) if not response or response.status_code != 200: msg = f"Failed to get request/pipeline {request_id} details from TF. {response.reason}" logger.error(msg) return {} details = response.json() # logger.debug(f"Request/pipeline {request_id} details: {details}") return details
def get_testing_farm_check(chroot: str = None) -> str: config = ServiceConfig.get_service_config() if config.deployment == Deployment.prod: if chroot: return f"{PACKIT_PROD_TESTING_FARM_CHECK}-{chroot}" return PACKIT_PROD_TESTING_FARM_CHECK if chroot: return f"{PACKIT_STG_TESTING_FARM_CHECK}-{chroot}" return PACKIT_STG_TESTING_FARM_CHECK
def get_build_check(chroot: str = None) -> str: config = ServiceConfig.get_service_config() if config.deployment == Deployment.prod: if chroot: return f"{PACKIT_PROD_CHECK}-{chroot}" return PACKIT_PROD_CHECK if chroot: return f"{PACKIT_STG_CHECK}-{chroot}" return PACKIT_STG_CHECK
def parse_koji_build_event(event) -> Optional[KojiBuildEvent]: if event.get("topic" ) != "org.fedoraproject.prod.buildsys.build.state.change": return None # Some older messages had a different structure content = event.get("body") or event.get("msg") if not content: return None build_id = content.get("build_id") task_id = content.get("task_id") logger.info(f"Koji event: build_id={build_id} task_id={task_id}") new_state = (KojiBuildState.from_number(raw_new) if (raw_new := content.get("new")) is not None else None) old_state = (KojiBuildState.from_number(raw_old) if (raw_old := content.get("old")) is not None else None) version = content.get("version") epoch = content.get("epoch") # "release": "1.fc36" release, _ = content.get("release").split(".") # "request": [ # "git+https://src.fedoraproject.org/rpms/packit.git#0eb3e12005cb18f15d3054020f7ac934c01eae08", # "rawhide", # {} # ], raw_git_ref, fedora_target, _ = content.get("request") project_url = (raw_git_ref.split("#")[0].removeprefix( "git+").removesuffix(".git")) package_name, commit_hash = raw_git_ref.split("/")[-1].split(".git#") branch_name = fedora_target.removesuffix("-candidate") return KojiBuildEvent( build_id=build_id, state=new_state, package_name=package_name, branch_name=branch_name, commit_sha=commit_hash, namespace="rmps", repo_name=package_name, project_url=project_url, epoch=epoch, version=version, release=release, rpm_build_task_id=task_id, web_url=KojiBuildEvent.get_koji_rpm_build_web_url( rpm_build_task_id=task_id, koji_web_url=ServiceConfig.get_service_config().koji_web_url, ), old_state=old_state, )
def db_trigger(self) -> Optional[AbstractTriggerDbType]: if not self._db_trigger and self.source == "merge_request_event": # Can't use self.project because that can be either source or target project. # We need target project here. Let's derive it from self.merge_request_url m = fullmatch(r"(\S+)/-/merge_requests/(\d+)", self.merge_request_url) if m: project = ServiceConfig.get_service_config().get_project( url=m[1]) self._db_trigger = PullRequestModel.get_or_create( pr_id=int(m[2]), namespace=project.namespace, repo_name=project.repo, project_url=m[1], ) return self._db_trigger
def babysit_copr_build(self, build_id: int): """ check status of a copr build and update it in DB """ logger.debug(f"getting copr build ID {build_id} from DB") builds = CoprBuild.get_all_by_build_id(build_id) if builds: copr_client = CoprClient.create_from_config_file() build_copr = copr_client.build_proxy.get(build_id) if not build_copr.ended_on: logger.info("The copr build is still in progress") self.retry() logger.info(f"The status is {build_copr.state}") # copr doesn't tell status of how a build in the chroot went: # https://bugzilla.redhat.com/show_bug.cgi?id=1813227 for build in builds: if build.status != "pending": logger.info( f"DB state says {build.status}, " "things were taken care of already, skipping." ) continue event = CoprBuildEvent( topic=FedmsgTopic.copr_build_finished.value, build_id=build_id, build={}, chroot=build.target, status=( COPR_API_SUCC_STATE if build_copr.state == COPR_SUCC_STATE else COPR_API_FAIL_STATE ), owner=build.owner, project_name=build.project_name, pkg=build_copr.source_package.get( "name", "" ), # this seems to be the SRPM name build_pg=build, ) CoprBuildEndHandler( ServiceConfig.get_service_config(), job_config=None, event=event ).run() else: logger.warning(f"Copr build {build_id} not in DB.")
def config(self): if self._config is None: self._config = ServiceConfig.get_service_config() return self._config
def service_config(self) -> ServiceConfig: if not self._service_config: self._service_config = ServiceConfig.get_service_config() return self._service_config
def get_srpm_build_check() -> str: config = ServiceConfig.get_service_config() if config.deployment == Deployment.prod: return PACKIT_PROD_SRPM_CHECK return PACKIT_STG_SRPM_CHECK
def get_account_check() -> str: config = ServiceConfig.get_service_config() if config.deployment == Deployment.prod: return PACKIT_PROD_ACCOUNT_CHECK return PACKIT_STG_ACCOUNT_CHECK
def get_project(self) -> GitProject: return ServiceConfig.get_service_config().get_project( url=self.project_url or self.db_trigger.project.project_url)
def get_project(self) -> GitProject: return ServiceConfig.get_service_config().get_project(self.project_url)
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import logging from http import HTTPStatus from flask import request from flask_restplus import Namespace, Resource, fields from packit_service.celerizer import celery_app from packit_service.config import ServiceConfig from packit_service.service.api.errors import ValidationFailed logger = logging.getLogger("packit_service") config = ServiceConfig.get_service_config() ns = Namespace("testing-farm", description="Testing Farm") payload_artifact = ns.model( "Testing Farm results artifact", { "commit-sha": fields.String(required=True, example="08bfc38f15082bdf9ba964c3bbd04878666d1d56"), "copr-chroot": fields.String(required=True, example="fedora-30-x86_64"), "copr-repo-name": fields.String(required=True, example="packit/packit-service-hello-world-14"), "git-ref":
def check_copr_build(build_id: int) -> bool: """ Check the copr_build with given id and refresh the status if needed. Used in the babysit task. :param build_id: id of the copr_build (CoprBuildModel.build.id) :return: True if in case of successful run, False when we need to retry """ logger.debug(f"Getting copr build ID {build_id} from DB.") builds = CoprBuildModel.get_all_by_build_id(build_id) if not builds: logger.warning(f"Copr build {build_id} not in DB.") return True copr_client = CoprClient.create_from_config_file() build_copr = copr_client.build_proxy.get(build_id) if not build_copr.ended_on: logger.info("The copr build is still in progress.") return False logger.info(f"The status is {build_copr.state!r}.") for build in builds: if build.status != "pending": logger.info( f"DB state says {build.status!r}, " "things were taken care of already, skipping." ) continue chroot_build = copr_client.build_chroot_proxy.get(build_id, build.target) event = CoprBuildEvent( topic=FedmsgTopic.copr_build_finished.value, build_id=build_id, build=build, chroot=build.target, status=( COPR_API_SUCC_STATE if chroot_build.state == COPR_SUCC_STATE else COPR_API_FAIL_STATE ), owner=build.owner, project_name=build.project_name, pkg=build_copr.source_package.get( "name", "" ), # this seems to be the SRPM name timestamp=chroot_build.ended_on, ) job_configs = get_config_for_handler_kls( handler_kls=CoprBuildEndHandler, event=event, package_config=event.get_package_config(), ) for job_config in job_configs: CoprBuildEndHandler( ServiceConfig.get_service_config(), job_config=job_config, event=event, ).run() return True
def parse_check_rerun_event( event, ) -> Optional[Union[CheckRerunCommitEvent, CheckRerunPullRequestEvent, CheckRerunReleaseEvent]]: """Look into the provided event and see if it is Github check rerun event.""" if not (nested_get(event, "check_run") and nested_get(event, "action") == "rerequested"): return None check_name = nested_get(event, "check_run", "name") logger.info(f"Github check run {check_name} rerun event.") deployment = ServiceConfig.get_service_config().deployment app = nested_get(event, "check_run", "app", "slug") if (deployment == Deployment.prod and app != "packit-as-a-service" ) or (deployment == Deployment.stg and app != "packit-as-a-service-stg"): logger.warning(f"Check run created by {app} and not us.") return None check_name_job, check_name_target = None, None if ":" in check_name: # e.g. "rpm-build:fedora-34-x86_64" check_name_job, _, check_name_target = check_name.partition(":") if check_name_job not in MAP_CHECK_PREFIX_TO_HANDLER: logger.warning( f"{check_name_job} not in {list(MAP_CHECK_PREFIX_TO_HANDLER.keys())}" ) check_name_job = None elif "/" in check_name: # for backward compatibility, e.g. packit/rpm-build-fedora-34-x86_64 # TODO: remove this (whole elif) after some time _, _, check_name_job_info = check_name.partition("/") for job_name in MAP_CHECK_PREFIX_TO_HANDLER.keys(): if check_name_job_info.startswith(job_name): check_name_job = job_name # e.g. [rpm-build-]fedora-34-x86_64 check_name_target = check_name_job_info[(len(job_name) + 1): # noqa ] break if not (check_name_job and check_name_target): logger.warning(f"We were not able to parse the job and target " f"from the check run name {check_name}.") return None repo_namespace = nested_get(event, "repository", "owner", "login") repo_name = nested_get(event, "repository", "name") actor = nested_get(event, "sender", "login") if not (repo_namespace and repo_name): logger.warning("No full name of the repository.") return None https_url = event["repository"]["html_url"] commit_sha = nested_get(event, "check_run", "head_sha") external_id = nested_get(event, "check_run", "external_id") if not external_id: logger.warning( "No external_id to identify the original trigger provided.") return None job_trigger = JobTriggerModel.get_by_id(int(external_id)) if not job_trigger: logger.warning(f"Job trigger with ID {external_id} not found.") return None db_trigger = job_trigger.get_trigger_object() logger.info(f"Original trigger: {db_trigger}") event = None if isinstance(db_trigger, PullRequestModel): event = CheckRerunPullRequestEvent( repo_namespace=repo_namespace, repo_name=repo_name, project_url=https_url, commit_sha=commit_sha, pr_id=db_trigger.pr_id, check_name_job=check_name_job, check_name_target=check_name_target, db_trigger=db_trigger, actor=actor, ) elif isinstance(db_trigger, ProjectReleaseModel): event = CheckRerunReleaseEvent( repo_namespace=repo_namespace, repo_name=repo_name, project_url=https_url, commit_sha=commit_sha, tag_name=db_trigger.tag_name, check_name_job=check_name_job, check_name_target=check_name_target, db_trigger=db_trigger, actor=actor, ) elif isinstance(db_trigger, GitBranchModel): event = CheckRerunCommitEvent( repo_namespace=repo_namespace, repo_name=repo_name, project_url=https_url, commit_sha=commit_sha, git_ref=db_trigger.name, check_name_job=check_name_job, check_name_target=check_name_target, db_trigger=db_trigger, actor=actor, ) return event