def reschedule_build_chroot(): response = {} if "build_id" in flask.request.json and "chroot" in flask.request.json: build = ComplexLogic.get_build_safe(flask.request.json["build_id"]) else: response["result"] = "bad request" response["msg"] = "Request missing `build_id` and/or `chroot`" return flask.jsonify(response) if build: if build.canceled: response["result"] = "noop" response["msg"] = "build was cancelled, ignoring" else: chroot = flask.request.json["chroot"] build_chroot = build.chroots_dict_by_name.get(chroot) run_statuses = set([StatusEnum("starting"), StatusEnum("running")]) if build_chroot and build_chroot.status in run_statuses: log.info("rescheduling build {} chroot: {}".format(build.id, build_chroot.name)) BuildsLogic.update_state_from_dict(build, { "chroot": chroot, "status": StatusEnum("pending") }) db.session.commit() response["result"] = "done" else: response["result"] = "noop" response["msg"] = "build is not in running states, ignoring" else: response["result"] = "noop" response["msg"] = "Build {} wasn't found".format(flask.request.json["build_id"]) return flask.jsonify(response)
def test_delete_build_exceptions(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db): for bc in self.b4_bc: bc.status = StatusEnum("succeeded") bc.ended_on = time.time() self.u1.admin = False self.db.session.add_all(self.b4_bc) self.db.session.add(self.b4) self.db.session.add(self.u1) self.db.session.commit() with pytest.raises(InsufficientRightsException): BuildsLogic.delete_build(self.u1, self.b4) self.b1_bc[0].status = StatusEnum("running") self.db.session.add(self.b1_bc[0]) self.db.session.commit() with pytest.raises(ActionInProgressException): BuildsLogic.delete_build(self.u1, self.b1) self.copr_persistent = models.Copr(name=u"persistent_copr", user=self.u2, persistent=True) self.build_persistent = models.Build(copr=self.copr_persistent, package=self.p2, user=self.u2, submitted_on=100) with pytest.raises(InsufficientRightsException): BuildsLogic.delete_build(self.u2, self.build_persistent)
def cancelable(self): """ Find out if this build is cancelable. Build is cancelabel only when it's pending (not started) """ return self.status == StatusEnum("pending") or \ self.status == StatusEnum("importing")
def repeatable(self): """ Find out if this build is repeatable. Build is repeatable only if it's not pending, starting or running """ return self.status not in [ StatusEnum("pending"), StatusEnum("starting"), StatusEnum("running"), ]
def status(self): """ Return build status according to build status of its chroots """ if self.canceled: return StatusEnum("canceled") for state in [ "running", "starting", "importing", "pending", "failed", "succeeded", "skipped" ]: if StatusEnum(state) in self.chroot_states: return StatusEnum(state)
def cancel_build(cls, user, build): if not user.can_build_in(build.copr): raise exceptions.InsufficientRightsException( "You are not allowed to cancel this build.") if not build.cancelable: if build.status == StatusEnum("starting") or build.status == StatusEnum("running"): err_msg = "Cannot cancel build {} which is still running".format(build.id) else: err_msg = "Cannot cancel build {}".format(build.id) raise exceptions.RequestCannotBeExecuted(err_msg) build.canceled = True for chroot in build.build_chroots: chroot.status = 2 # canceled if chroot.ended_on is not None: chroot.ended_on = time.time()
def test_delete_build_as_admin(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db): self.b4.pkgs = "http://example.com/copr-keygen-1.58-1.fc20.src.rpm" expected_dir = self.b4.result_dir_name for bc in self.b4_bc: bc.status = StatusEnum("succeeded") bc.ended_on = time.time() self.u1.admin = True self.db.session.add_all(self.b4_bc) self.db.session.add(self.b4) self.db.session.add(self.u1) self.db.session.commit() expected_chroots_to_delete = set() for bchroot in self.b4_bc: expected_chroots_to_delete.add(bchroot.name) assert len(ActionsLogic.get_many().all()) == 0 BuildsLogic.delete_build(self.u1, self.b4) self.db.session.commit() assert len(ActionsLogic.get_many().all()) == 1 action = ActionsLogic.get_many().one() delete_data = json.loads(action.data) assert "chroots" in delete_data assert delete_data["result_dir_name"] == expected_dir assert expected_chroots_to_delete == set(delete_data["chroots"]) with pytest.raises(NoResultFound): BuildsLogic.get(self.b4.id).one()
def test_build_put_cancel( self, f_users, f_coprs, f_mock_chroots, f_builds, f_users_api, ): for bc in self.b1_bc: bc.status = StatusEnum("pending") bc.ended_on = None self.b1.ended_on = None self.db.session.add_all(self.b1_bc) self.db.session.add(self.b1) self.db.session.commit() href = "/api_2/builds/{}".format(self.b1.id) build_dict = {"state": "canceled"} r = self.request_rest_api_with_auth(href, method="put", content=build_dict) assert r.status_code == 201 r2 = self.tc.get(r.headers["Location"]) assert r2.status_code == 200 obj = json.loads(r2.data.decode("utf-8")) assert obj["build"]["state"] == "canceled"
def test_build_put_wrong_user( self, f_users, f_coprs, f_mock_chroots, f_builds, f_users_api, ): login = self.u2.api_login token = self.u2.api_token for bc in self.b1_bc: bc.status = StatusEnum("pending") bc.ended_on = None self.b1.ended_on = None self.db.session.add_all(self.b1_bc) self.db.session.add(self.b1) self.db.session.commit() href = "/api_2/builds/{}".format(self.b1.id) build_dict = {"state": "canceled"} r = self.request_rest_api_with_auth(href, method="put", login=login, token=token, content=build_dict) assert r.status_code == 403
def reschedule_all_running(): """ Add-hoc handle. Remove after implementation of persistent task handling in copr-backend """ to_reschedule = \ BuildsLogic.get_build_tasks(StatusEnum("starting")).all() + \ BuildsLogic.get_build_tasks(StatusEnum("running")).all() if to_reschedule: for build_chroot in to_reschedule: build_chroot.status = StatusEnum("pending") db.session.add(build_chroot) db.session.commit() return "OK", 200
def get_queues_size(): # todo: check if count works slowly waiting = BuildsLogic.get_build_task_queue().count() running = BuildsLogic.get_build_tasks(StatusEnum("running")).count() importing = BuildsLogic.get_build_tasks( helpers.StatusEnum("importing")).count() return dict(waiting=waiting, running=running, importing=importing)
def state(self): """ Return text representation of status of this build chroot """ if self.status is not None: return StatusEnum(self.status) return "unknown"
def test_build_queue_4(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db): time_now = int(time.time()) for build_chroots in [self.b1_bc, self.b2_bc]: for build_chroot in build_chroots: build_chroot.status = StatusEnum("running") build_chroot.started_on = time_now - 2 * MAX_BUILD_TIMEOUT build_chroot.ended_on = None for build_chroots in [self.b3_bc, self.b4_bc]: for build_chroot in build_chroots: build_chroot.status = StatusEnum("failed") build_chroot.started_on = time_now - 2 * MAX_BUILD_TIMEOUT build_chroot.ended_on = None self.db.session.commit() data = BuildsLogic.get_build_task_queue().all() assert len(data) == 2 assert set([data[0], data[1]]) == set([self.b1_bc[0], self.b2_bc[0]])
def test_delete_build_exceptions( self, f_users, f_coprs, f_mock_chroots, f_builds, f_db): for bc in self.b4_bc: bc.status = StatusEnum("succeeded") bc.ended_on = time.time() self.u1.admin = False self.db.session.add_all(self.b4_bc) self.db.session.add(self.b4) self.db.session.add(self.u1) self.db.session.commit() with pytest.raises(InsufficientRightsException): BuildsLogic.delete_build(self.u1, self.b4) self.b1_bc[0].status = StatusEnum("running") self.db.session.add(self.b1_bc[0]) self.db.session.commit() with pytest.raises(ActionInProgressException): BuildsLogic.delete_build(self.u1, self.b1)
def f_builds(self): self.p1 = models.Package(copr=self.c1, name="hello-world", source_type=0) self.p2 = models.Package(copr=self.c2, name="hello-world", source_type=0) self.b1 = models.Build(copr=self.c1, package=self.p1, user=self.u1, submitted_on=50) self.b2 = models.Build(copr=self.c1, package=self.p1, user=self.u2, submitted_on=10) self.b3 = models.Build(copr=self.c2, package=self.p2, user=self.u2, submitted_on=10) self.b4 = models.Build(copr=self.c2, package=self.p2, user=self.u2, submitted_on=100) self.basic_builds = [self.b1, self.b2, self.b3, self.b4] self.b1_bc = [] self.b2_bc = [] self.b3_bc = [] self.b4_bc = [] for build, build_chroots in zip( [self.b1, self.b2, self.b3, self.b4], [self.b1_bc, self.b2_bc, self.b3_bc, self.b4_bc]): status = None if build is self.b1: # this build is going to be deleted status = StatusEnum("succeeded") for chroot in build.copr.active_chroots: buildchroot = models.BuildChroot( build=build, mock_chroot=chroot, status=status, git_hash="12345", ) if build is self.b1 or build is self.b2: buildchroot.started_on = 139086644000 buildchroot.ended_on = 149086644000 build.ended_on = 149086644000 build_chroots.append(buildchroot) self.db.session.add(buildchroot) self.db.session.add_all([self.b1, self.b2, self.b3, self.b4])
def test_copr_build_non_submitter_cannot_cancel_build( self, f_users, f_coprs, f_mock_chroots, f_builds, f_db): for bc in self.b1_bc: bc.status = StatusEnum("pending") bc.ended_on = None self.db.session.add_all(self.b1_bc) self.db.session.add_all([self.u1, self.c1, self.b1]) self.test_client.post("/coprs/{0}/{1}/cancel_build/{2}/".format( self.u1.name, self.c1.name, self.b1.id), data={}, follow_redirects=True) assert self.models.Build.query.first().canceled is False
def update_state_from_dict(cls, build, upd_dict): """ :param build: :param upd_dict: example: { "builds":[ { "id": 1, "copr_id": 2, "started_on": 139086644000 }, { "id": 2, "copr_id": 1, "status": 0, "chroot": "fedora-18-x86_64", "results": "http://server/results/foo/bar/", "ended_on": 139086644000 }] } """ log.info("Updating build: {} by: {}".format(build.id, upd_dict)) if "chroot" in upd_dict: # update respective chroot status for build_chroot in build.build_chroots: if build_chroot.name == upd_dict["chroot"]: if "status" in upd_dict and build_chroot.status not in BuildsLogic.terminal_states: build_chroot.status = upd_dict["status"] if upd_dict.get("status") in BuildsLogic.terminal_states: build_chroot.ended_on = upd_dict.get( "ended_on") or time.time() if upd_dict.get("status") == StatusEnum("starting"): build_chroot.started_on = upd_dict.get( "started_on") or time.time() db.session.add(build_chroot) for attr in ["results", "built_packages"]: value = upd_dict.get(attr, None) if value: setattr(build, attr, value) if build.max_ended_on is not None: build.ended_on = build.max_ended_on db.session.add(build)
def get_queues_size(): # todo: check if count works slowly waiting = BuildsLogic.get_build_task_queue(is_background=False).count() waiting_bg = BuildsLogic.get_build_task_queue(is_background=True).count() running = BuildsLogic.get_build_tasks(StatusEnum("running")).count() importing = BuildsLogic.get_build_tasks(helpers.StatusEnum("importing"), background=False).count() importing_bg = BuildsLogic.get_build_tasks(helpers.StatusEnum("importing"), background=True).count() return dict( waiting=waiting, running=running, importing=importing, waiting_bg=waiting_bg, importing_bg=importing_bg )
def fork_build(self, build, fcopr, fpackage): fbuild = self.create_object(models.Build, build, exclude=["id", "copr_id", "package_id"]) fbuild.copr = fcopr fbuild.package = fpackage fbuild.build_chroots = [ self.create_object(models.BuildChroot, c, exclude=["id", "build_id"]) for c in build.build_chroots ] for chroot in fbuild.build_chroots: chroot.status = StatusEnum("forked") db.session.add(fbuild) db.session.commit() return fbuild
def starting_build(): """ Check if the build is not cancelled and set it to running state """ result = {"can_start": False} if "build_id" in flask.request.json and "chroot" in flask.request.json: build = ComplexLogic.get_build_safe(flask.request.json["build_id"]) chroot = flask.request.json.get("chroot") if build and chroot and not build.canceled: log.info("mark build {} chroot {} as starting".format(build.id, chroot)) BuildsLogic.update_state_from_dict(build, { "chroot": chroot, "status": StatusEnum("starting") }) db.session.commit() result["can_start"] = True return flask.jsonify(result)
def test_collection_ok_by_state(self, f_users, f_coprs, f_mock_chroots_many, f_build_many_chroots, f_db, f_users_api): self.db.session.commit() for status in StatusEnum.vals.values(): expected_chroots = set([ name for name, chroot_status in self.status_by_chroot.items() if chroot_status == status ]) href = "/api_2/build_tasks?state={}&limit=50".format( StatusEnum(status)) r0 = self.tc.get(href) assert r0.status_code == 200 obj = json.loads(r0.data.decode("utf-8")) assert len(obj["build_tasks"]) == len(expected_chroots) assert set(bt["build_task"]["chroot_name"] for bt in obj["build_tasks"]) == expected_chroots assert parse_qs(urlparse(obj["_links"]["self"]["href"]).query) \ == parse_qs(urlparse(href).query)
def filter_by_state(cls, query, state): return query.filter(models.BuildChroot.status == StatusEnum(state))
def add(cls, user, pkgs, copr, source_type=None, source_json=None, repos=None, chroots=None, timeout=None, enable_net=True, git_hashes=None, skip_import=False): if chroots is None: chroots = [] coprs_logic.CoprsLogic.raise_if_unfinished_blocking_action( copr, "Can't build while there is an operation in progress: {action}") users_logic.UsersLogic.raise_if_cant_build_in_copr( user, copr, "You don't have permissions to build in this copr.") if not repos: repos = copr.repos # todo: eliminate pkgs and this check if " " in pkgs or "\n" in pkgs or "\t" in pkgs or pkgs.strip() != pkgs: raise exceptions.MalformedArgumentException( "Trying to create a build using src_pkg " "with bad characters. Forgot to split?") # just temporary to keep compatibility if not source_type or not source_json: source_type = helpers.BuildSourceEnum("srpm_link") source_json = json.dumps({"url": pkgs}) build = models.Build( user=user, pkgs=pkgs, copr=copr, repos=repos, source_type=source_type, source_json=source_json, submitted_on=int(time.time()), enable_net=bool(enable_net), ) if timeout: build.timeout = timeout or DEFAULT_BUILD_TIMEOUT db.session.add(build) # add BuildChroot object for each active (or selected) chroot # this copr is assigned to if not chroots: chroots = copr.active_chroots status = helpers.StatusEnum("importing") if skip_import: status = StatusEnum("pending") for chroot in chroots: git_hash = None if git_hashes: git_hash = git_hashes.get(chroot.name) buildchroot = models.BuildChroot(build=build, status=status, mock_chroot=chroot, git_hash=git_hash) db.session.add(buildchroot) return build
class BuildsLogic(object): @classmethod def get(cls, build_id): return models.Build.query.filter(models.Build.id == build_id) # todo: move methods operating with BuildChroot to BuildChrootLogic @classmethod def get_build_tasks(cls, status): return models.BuildChroot.query.filter(models.BuildChroot.status == status) \ .order_by(models.BuildChroot.build_id.desc()) @classmethod def get_recent_tasks(cls, user=None, limit=None): if not limit: limit = 100 query = models.Build.query \ .filter(models.Build.ended_on.isnot(None)) \ .order_by(models.Build.ended_on.desc()) if user is not None: query = query.filter(models.Build.user_id == user.id) query = query \ .order_by(models.Build.id.desc()) \ .limit(limit) return query @classmethod def get_build_importing_queue(cls): """ Returns BuildChroots which are waiting to be uploaded to dist git """ query = (models.BuildChroot.query.join( models.Build).filter(models.Build.canceled == false()).filter( models.BuildChroot.status == helpers.StatusEnum("importing"))) query = query.order_by(models.BuildChroot.build_id.asc()) return query @classmethod def get_build_task_queue(cls): """ Returns BuildChroots which are - waiting to be built or - older than 2 hours and unfinished """ # todo: filter out build without package query = ( models.BuildChroot.query.join(models.Build). filter(models.Build.canceled == false()).filter( or_( models.BuildChroot.status == helpers.StatusEnum("pending"), models.BuildChroot.status == helpers.StatusEnum( "starting"), and_( # We are moving ended_on to the BuildChroot, now it should be reliable, # so we don't want to reschedule failed chroots # models.BuildChroot.status.in_([ # # Bug 1206562 - Cannot delete Copr because it incorrectly thinks # # there are unfinished builds. Solution: `failed` but unfinished # # (ended_on is null) builds should be rescheduled. # # todo: we need to be sure that correct `failed` set is set together wtih `ended_on` # helpers.StatusEnum("running"), # helpers.StatusEnum("failed") #]), models.BuildChroot.status == helpers.StatusEnum( "running"), models.BuildChroot.started_on < int(time.time() - 1.1 * MAX_BUILD_TIMEOUT), models.BuildChroot.ended_on.is_(None))))) query = query.order_by(models.BuildChroot.build_id.asc()) return query @classmethod def get_multiple(cls): return models.Build.query.order_by(models.Build.id.desc()) @classmethod def get_multiple_by_copr(cls, copr): """ Get collection of builds in copr sorted by build_id descending """ return cls.get_multiple().filter(models.Build.copr == copr) @classmethod def get_multiple_by_owner(cls, user): """ Get collection of builds in copr sorted by build_id descending form the copr owned by `user` """ return cls.get_multiple().join( models.Build.copr).filter(models.Copr.owner == user) @classmethod def get_copr_builds_list(cls, copr): query_select = """ SELECT build.id, MAX(package.name) AS pkg_name, build.pkg_version, build.submitted_on, MIN(statuses.started_on) AS started_on, MAX(statuses.ended_on) AS ended_on, order_to_status(MIN(statuses.st)) AS status, build.canceled, MIN("group".name) AS group_name, MIN(copr.name) as copr_name, MIN("user".username) as owner_name FROM build LEFT OUTER JOIN package ON build.package_id = package.id LEFT OUTER JOIN (SELECT build_chroot.build_id, started_on, ended_on, status_to_order(status) AS st FROM build_chroot) AS statuses ON statuses.build_id=build.id LEFT OUTER JOIN copr ON copr.id = build.copr_id LEFT OUTER JOIN "user" ON copr.owner_id = "user".id LEFT OUTER JOIN "group" ON copr.group_id = "group".id WHERE build.copr_id = {copr_id} GROUP BY build.id; """.format(copr_id=copr.id) if db.engine.url.drivername == "sqlite": def sqlite_status_to_order(x): if x == 0: return 0 elif x == 3: return 1 elif x == 6: return 2 elif x == 7: return 3 elif x == 4: return 4 elif x == 1: return 5 elif x == 5: return 6 return 1000 def sqlite_order_to_status(x): if x == 0: return 0 elif x == 1: return 3 elif x == 2: return 6 elif x == 3: return 7 elif x == 4: return 4 elif x == 5: return 1 elif x == 6: return 5 return 1000 conn = db.engine.connect() conn.connection.create_function("status_to_order", 1, sqlite_status_to_order) conn.connection.create_function("order_to_status", 1, sqlite_order_to_status) result = conn.execute(text(query_select)) else: result = db.engine.execute(text(query_select)) return result @classmethod def join_group(cls, query): return query.join(models.Copr).outerjoin(models.Group) @classmethod def get_multiple_by_name(cls, username, coprname): query = cls.get_multiple() return (query.join(models.Build.copr).options( db.contains_eager(models.Build.copr)).join( models.Copr.owner).filter(models.Copr.name == coprname).filter( models.User.username == username)) @classmethod def get_importing(cls): """ Return builds that are waiting for dist git to import the sources. """ query = (models.Build.query.join(models.Build.copr).join( models.User).options(db.contains_eager(models.Build.copr)).options( db.contains_eager("copr.owner")).filter( (models.Build.started_on == None) | (models.Build.started_on < int(time.time() - 7200))). filter(models.Build.ended_on == None).filter( models.Build.canceled == False).order_by( models.Build.submitted_on.asc())) return query @classmethod def get_waiting(cls): """ Return builds that aren't both started and finished (if build start submission fails, we still want to mark the build as non-waiting, if it ended) this has very different goal then get_multiple, so implement it alone """ query = ( models.Build.query.join(models.Build.copr).join(models.User).join( models.BuildChroot).options( db.contains_eager(models.Build.copr)).options( db.contains_eager("copr.owner")). filter((models.BuildChroot.started_on.is_(None)) | (models.BuildChroot.started_on < int(time.time() - 7200)) ).filter(models.BuildChroot.ended_on.is_(None)).filter( models.Build.canceled == false()).order_by( models.Build.submitted_on.asc())) return query @classmethod def get_by_ids(cls, ids): return models.Build.query.filter(models.Build.id.in_(ids)) @classmethod def get_by_id(cls, build_id): return models.Build.query.filter(models.Build.id == build_id) @classmethod def create_new_from_other_build(cls, user, copr, source_build, chroot_names=None, **build_options): # check which chroots we need chroots = [] for chroot in copr.active_chroots: if chroot.name in chroot_names: chroots.append(chroot) # I don't want to import anything, just rebuild what's in dist git skip_import = True git_hashes = {} for chroot in source_build.build_chroots: if not chroot.git_hash: # I got an old build from time we didn't use dist git # So I'll submit it as a new build using it's link skip_import = False git_hashes = None flask.flash( "This build is not in Dist Git. Trying to import the package again." ) break git_hashes[chroot.name] = chroot.git_hash # try: build = cls.add(user=user, pkgs=source_build.pkgs, copr=copr, chroots=chroots, source_type=source_build.source_type, source_json=source_build.source_json, enable_net=build_options.get("enabled_net", source_build.enable_net), git_hashes=git_hashes, skip_import=skip_import) build.package_id = source_build.package_id build.pkg_version = source_build.pkg_version if user.proven: if "timeout" in build_options: build.timeout = build_options["timeout"] return build @classmethod def create_new_from_url(cls, user, copr, srpm_url, chroot_names=None, **build_options): """ :type user: models.User :type copr: models.Copr :type chroot_names: List[str] :rtype: models.Build """ if chroot_names is None: chroots = [c for c in copr.active_chroots] else: chroots = [] for chroot in copr.active_chroots: if chroot.name in chroot_names: chroots.append(chroot) source_type = helpers.BuildSourceEnum("srpm_link") source_json = json.dumps({"url": srpm_url}) # try: build = cls.add(user=user, pkgs=srpm_url, copr=copr, chroots=chroots, source_type=source_type, source_json=source_json, enable_net=build_options.get("enabled_net", copr.build_enable_net)) if user.proven: if "timeout" in build_options: build.timeout = build_options["timeout"] return build @classmethod def create_new_from_tito(cls, user, copr, git_url, git_dir, git_branch, tito_test, chroot_names=None, **build_options): """ :type user: models.User :type copr: models.Copr :type chroot_names: List[str] :rtype: models.Build """ if chroot_names is None: chroots = [c for c in copr.active_chroots] else: chroots = [] for chroot in copr.active_chroots: if chroot.name in chroot_names: chroots.append(chroot) source_type = helpers.BuildSourceEnum("git_and_tito") source_json = json.dumps({ "git_url": git_url, "git_dir": git_dir, "git_branch": git_branch, "tito_test": tito_test }) # try: build = cls.add(user=user, pkgs="", copr=copr, chroots=chroots, source_type=source_type, source_json=source_json, enable_net=build_options.get("enabled_net", copr.build_enable_net)) if user.proven: if "timeout" in build_options: build.timeout = build_options["timeout"] return build @classmethod def create_new_from_mock(cls, user, copr, scm_type, scm_url, scm_branch, spec, chroot_names=None, **build_options): """ :type user: models.User :type copr: models.Copr :type chroot_names: List[str] :rtype: models.Build """ if chroot_names is None: chroots = [c for c in copr.active_chroots] else: chroots = [] for chroot in copr.active_chroots: if chroot.name in chroot_names: chroots.append(chroot) source_type = helpers.BuildSourceEnum("mock_scm") source_json = json.dumps({ "scm_type": scm_type, "scm_url": scm_url, "scm_branch": scm_branch, "spec": spec }) # try: build = cls.add(user=user, pkgs="", copr=copr, chroots=chroots, source_type=source_type, source_json=source_json, enable_net=build_options.get("enabled_net", copr.build_enable_net)) if user.proven: if "timeout" in build_options: build.timeout = build_options["timeout"] return build @classmethod def create_new_from_upload(cls, user, copr, f_uploader, orig_filename, chroot_names=None, **build_options): """ :type user: models.User :type copr: models.Copr :param f_uploader(file_path): function which stores data at the given `file_path` :return: """ tmp = tempfile.mkdtemp(dir=app.config["SRPM_STORAGE_DIR"]) tmp_name = os.path.basename(tmp) filename = secure_filename(orig_filename) file_path = os.path.join(tmp, filename) f_uploader(file_path) # make the pkg public pkg_url = "https://{hostname}/tmp/{tmp_dir}/{srpm}".format( hostname=app.config["PUBLIC_COPR_HOSTNAME"], tmp_dir=tmp_name, srpm=filename) # check which chroots we need chroots = [] for chroot in copr.active_chroots: if chroot.name in chroot_names: chroots.append(chroot) # create json describing the build source source_type = helpers.BuildSourceEnum("srpm_upload") source_json = json.dumps({"tmp": tmp_name, "pkg": filename}) try: build = cls.add(user=user, pkgs=pkg_url, copr=copr, chroots=chroots, source_type=source_type, source_json=source_json, enable_net=build_options.get( "enabled_net", copr.build_enable_net)) if user.proven: if "timeout" in build_options: build.timeout = build_options["timeout"] except Exception: shutil.rmtree( tmp) # todo: maybe we should delete in some cleanup procedure? raise return build @classmethod def add(cls, user, pkgs, copr, source_type=None, source_json=None, repos=None, chroots=None, timeout=None, enable_net=True, git_hashes=None, skip_import=False): if chroots is None: chroots = [] coprs_logic.CoprsLogic.raise_if_unfinished_blocking_action( copr, "Can't build while there is an operation in progress: {action}") users_logic.UsersLogic.raise_if_cant_build_in_copr( user, copr, "You don't have permissions to build in this copr.") if not repos: repos = copr.repos # todo: eliminate pkgs and this check if " " in pkgs or "\n" in pkgs or "\t" in pkgs or pkgs.strip() != pkgs: raise exceptions.MalformedArgumentException( "Trying to create a build using src_pkg " "with bad characters. Forgot to split?") # just temporary to keep compatibility if not source_type or not source_json: source_type = helpers.BuildSourceEnum("srpm_link") source_json = json.dumps({"url": pkgs}) build = models.Build( user=user, pkgs=pkgs, copr=copr, repos=repos, source_type=source_type, source_json=source_json, submitted_on=int(time.time()), enable_net=bool(enable_net), ) if timeout: build.timeout = timeout or DEFAULT_BUILD_TIMEOUT db.session.add(build) # add BuildChroot object for each active (or selected) chroot # this copr is assigned to if not chroots: chroots = copr.active_chroots status = helpers.StatusEnum("importing") if skip_import: status = StatusEnum("pending") for chroot in chroots: git_hash = None if git_hashes: git_hash = git_hashes.get(chroot.name) buildchroot = models.BuildChroot(build=build, status=status, mock_chroot=chroot, git_hash=git_hash) db.session.add(buildchroot) return build @classmethod def rebuild_package(cls, package): build = models.Build(user=None, pkgs=None, package_id=package.id, copr=package.copr, repos=package.copr.repos, source_type=package.source_type, source_json=package.source_json, submitted_on=int(time.time()), enable_net=package.enable_net, timeout=DEFAULT_BUILD_TIMEOUT) db.session.add(build) chroots = package.copr.active_chroots status = helpers.StatusEnum("importing") for chroot in chroots: buildchroot = models.BuildChroot(build=build, status=status, mock_chroot=chroot, git_hash=None) db.session.add(buildchroot) return build terminal_states = { StatusEnum("failed"), StatusEnum("succeeded"), StatusEnum("canceled") } @classmethod def get_chroots_from_dist_git_task_id(cls, task_id): """ Returns a list of BuildChroots identified with task_id task_id consists of a name of git branch + build id Example: 42-f22 -> build id 42, chroots fedora-22-* """ build_id, branch = task_id.split("-") build = cls.get_by_id(build_id).one() build_chroots = build.build_chroots os, version = helpers.branch_to_os_version(branch) chroot_halfname = "{}-{}".format(os, version) matching = [ch for ch in build_chroots if chroot_halfname in ch.name] return matching @classmethod def delete_local_srpm(cls, build): """ Deletes the source rpm locally stored for upload (if exists) """ # is it hosted on the copr frontend? if build.source_type == helpers.BuildSourceEnum("srpm_upload"): data = json.loads(build.source_json) tmp = data["tmp"] storage_path = app.config["SRPM_STORAGE_DIR"] try: shutil.rmtree(os.path.join(storage_path, tmp)) except: pass @classmethod def update_state_from_dict(cls, build, upd_dict): """ :param build: :param upd_dict: example: { "builds":[ { "id": 1, "copr_id": 2, "started_on": 139086644000 }, { "id": 2, "copr_id": 1, "status": 0, "chroot": "fedora-18-x86_64", "results": "http://server/results/foo/bar/", "ended_on": 139086644000 }] } """ log.info("Updating build: {} by: {}".format(build.id, upd_dict)) if "chroot" in upd_dict: # update respective chroot status for build_chroot in build.build_chroots: if build_chroot.name == upd_dict["chroot"]: if "status" in upd_dict and build_chroot.status not in BuildsLogic.terminal_states: build_chroot.status = upd_dict["status"] if upd_dict.get("status") in BuildsLogic.terminal_states: build_chroot.ended_on = upd_dict.get( "ended_on") or time.time() if upd_dict.get("status") == StatusEnum("starting"): build_chroot.started_on = upd_dict.get( "started_on") or time.time() db.session.add(build_chroot) for attr in ["results", "built_packages"]: value = upd_dict.get(attr, None) if value: setattr(build, attr, value) if build.max_ended_on is not None: build.ended_on = build.max_ended_on db.session.add(build) @classmethod def cancel_build(cls, user, build): if not user.can_build_in(build.copr): raise exceptions.InsufficientRightsException( "You are not allowed to cancel this build.") if not build.cancelable: raise exceptions.RequestCannotBeExecuted( "Cannot cancel build {}".format(build.id)) build.canceled = True for chroot in build.build_chroots: chroot.status = 2 # canceled if chroot.ended_on is not None: chroot.ended_on = time.time() @classmethod def delete_build(cls, user, build): """ :type user: models.User :type build: models.Build """ if not user.can_edit(build.copr): raise exceptions.InsufficientRightsException( "You are not allowed to delete build `{}`.".format(build.id)) if not build.deletable: # from celery.contrib import rdb; rdb.set_trace() raise exceptions.ActionInProgressException( "You can not delete build `{}` which is not finished.".format( build.id), "Unfinished build") # Only failed, finished, succeeded get here. if build.state not in ["cancelled" ]: # has nothing in backend to delete ActionsLogic.send_delete_build(build) for build_chroot in build.build_chroots: db.session.delete(build_chroot) db.session.delete(build) @classmethod def last_modified(cls, copr): """ Get build datetime (as epoch) of last successful build :arg copr: object of copr """ builds = cls.get_multiple_by_copr(copr) last_build = (builds.join(models.BuildChroot).filter( (models.BuildChroot.status == helpers.StatusEnum("succeeded")) | (models.BuildChroot.status == helpers.StatusEnum("skipped"))). filter(models.Build.ended_on.isnot(None)).order_by( models.Build.ended_on.desc())).first() if last_build: return last_build.ended_on else: return None @classmethod def filter_is_finished(cls, query, is_finished): # todo: check that ended_on is set correctly for all cases # e.g.: failed dist-git import, cancellation if is_finished: return query.filter(models.Build.ended_on.isnot(None)) else: return query.filter(models.Build.ended_on.is_(None)) @classmethod def filter_by_group_name(cls, query, group_name): return query.filter(models.Group.name == group_name)
def render_packages(self): """ NOTE: individual records for the same package must be "grouped" together in self.monitor_data """ packages = [] results = {} current_package_id = None for row in self.monitor_data: if row["package_id"] != current_package_id: if current_package_id: packages.append({ "pkg_name": row["package_name"], "pkg_version": None, "results": results }) current_package_id = row["package_id"] results = {} build_chroot_name = "{}-{}-{}".format(row["mock_chroot_os_release"], row["mock_chroot_os_version"], row["mock_chroot_arch"]) if build_chroot_name in [chroot.name for chroot in self.copr.active_chroots]: results[build_chroot_name] = { "build_id": row["build_id"], "status": StatusEnum(row["build_chroot_status"]), "pkg_version": row["build_pkg_version"] } packages.append({ "pkg_name": row["package_name"], "pkg_version": None, "results": results }) return packages
class BuildChroot(db.Model, helpers.Serializer): """ Representation of Build<->MockChroot relation """ mock_chroot_id = db.Column(db.Integer, db.ForeignKey("mock_chroot.id"), primary_key=True) mock_chroot = db.relationship("MockChroot", backref=db.backref("builds")) build_id = db.Column(db.Integer, db.ForeignKey("build.id"), primary_key=True) build = db.relationship("Build", backref=db.backref("build_chroots")) git_hash = db.Column(db.String(40)) status = db.Column(db.Integer, default=StatusEnum("importing")) started_on = db.Column(db.Integer) ended_on = db.Column(db.Integer) last_deferred = db.Column(db.Integer) @property def name(self): """ Textual representation of name of this chroot """ return self.mock_chroot.name @property def state(self): """ Return text representation of status of this build chroot """ if self.status is not None: return StatusEnum(self.status) return "unknown" @property def task_id(self): return "{}-{}".format(self.build_id, self.name) @property def import_task_id(self): return "{}-{}".format(self.build_id, helpers.chroot_to_branch(self.name)) @property def dist_git_url(self): if app.config["DIST_GIT_URL"]: return "{}/{}.git/commit/?id={}".format( app.config["DIST_GIT_URL"], self.build.package.dist_git_repo, self.git_hash) return None @property def import_log_url(self): if app.config["COPR_DIST_GIT_LOGS_URL"]: return "{}/{}.log".format(app.config["COPR_DIST_GIT_LOGS_URL"], self.import_task_id) return None @property def result_dir_url(self): return "/".join( [app.config["BACKEND_BASE_URL"], u"results", self.result_dir]) @property def result_dir(self): # hide changes occurred after migration to dist-git # if build has defined dist-git, it means that new schema should be used # otherwise use older structure # old: results/valtri/ruby/fedora-rawhide-x86_64/rubygem-aws-sdk-resources-2.1.11-1.fc24/ # new: results/asamalik/rh-perl520/epel-7-x86_64/00000187-rh-perl520/ parts = [self.build.copr.owner_name] parts.extend([ self.build.copr.name, self.name, ]) if self.git_hash is not None and self.build.package: parts.append(self.build.result_dir_name) else: parts.append(self.build.src_pkg_name) return os.path.join(*parts) def __str__(self): return "<BuildChroot: {}>".format(self.to_dict())
def has_importing_chroot(self): return StatusEnum("importing") in self.chroot_states
def has_unfinished_chroot(self): return StatusEnum("pending") in self.chroot_states or \ StatusEnum("starting") in self.chroot_states or \ StatusEnum("running") in self.chroot_states
def has_pending_chroot(self): # FIXME bad name # used when checking if the repo is initialized and results can be set # i think this is the only purpose - check return StatusEnum("pending") in self.chroot_states or \ StatusEnum("starting") in self.chroot_states