async def run(self): """ Run the worker task. """ while True: session = None try: task = await self.task_queue.get() if task is None: logger.info("worker: got emtpy task, aborting...") break with Session() as session: handled = False args = task.get("clone") if args: handled = True await self._clone(args, session) if not handled: args = task.get("build") if args: handled = True await self._build(args, session) if not handled: args = task.get("buildlatest") if args: handled = True await self._buildlatest(args, session) if not handled: args = task.get("rebuild") if args: handled = True await self._rebuild(args, session) if not handled: args = task.get("schedule") handled = True await self._schedule(session) if not handled: logger.error("worker got unknown task %s", str(task)) self.task_queue.task_done() except Exception as exc: logger.exception(exc) logger.info("terminating worker task")
async def GitClone(build_id, repo_id, task_queue): with Session() as session: repo = (session.query(SourceRepository).filter( SourceRepository.id == repo_id).first()) build = session.query(Build).filter(Build.id == build_id).first() if not build: logger.error("BuildProcess: build %d not found", build_id) return repo.set_busy() session.commit() write_log_title(build_id, "Clone Respository") logger.info("cloning repository '%s' into '%s'", repo.url, str(repo.src_path)) write_log(build_id, "I: cloning repository '{}'\n".format(repo.url)) if not repo.path.exists(): repo.path.mkdir() if repo.src_path.exists(): logger.info("clone task: removing git repo %s", str(repo.src_path)) shutil.rmtree(str(repo.src_path)) ret = await run_git( "git clone --config http.sslVerify=false {}".format(repo.url), str(repo.path), build_id) if ret != 0: logger.error("error running git clone") repo.set_error() await build.set_failed() session.commit() return git_commands = ["git config http.sslverify false", "git lfs install"] for git_command in git_commands: ret = await run_git(git_command, str(repo.src_path), build_id) if ret != 0: logger.error("error running git command: %s", git_command) return write_log(build_id, "\n") repo.set_ready() session.commit() args = {"buildlatest": [repo.id, build_id]} await task_queue.put(args)
async def ws_logs_connected(ws_client): token = ws_client.cirrina.request.match_info["token"] with Session() as session: build = (session.query(Build).join(BuildTask).filter( BuildTask.task_id == token).first()) if not build: logger.error("file_upload: no build found for token '%s'", token) # FIXME: disconnect return ws_client filename = get_log_file_path(build.id) ws_client.cirrina.logfile = open(filename, "w", encoding="utf-8") return ws_client
async def file_upload(request, tempfile, filename, size): token = request.match_info["token"] logger.debug("file uploaded: %s (%s) %dbytes, token %s", tempfile, filename, size, token) with Session() as session: build = (session.query(Build).join(BuildTask).filter( BuildTask.task_id == token).first()) if not build: logger.error("file_upload: no build found for token '%s'", token) return web.Response(status=400, text="Invalid file upload.") buildout_path = Path(Configuration().working_dir) / "buildout" / str( build.id) # FIXME: do not overwrite os.rename(tempfile, str(buildout_path / filename)) return web.Response( text="file uploaded: {} ({} bytes)".format(filename, size))
async def ScheduleBuilds(): logger.info("scheduler: checking for pending builds") with Session() as session: needed_builds = (session.query(Build).filter( Build.buildstate == "needs_build", Build.buildtype == "deb").all()) for build in needed_builds: if not chroot_ready(build, session): continue repo_id = build.sourcerepository_id for projectversion in build.buildconfiguration.projectversions: projectversion_id = projectversion.id # build.log_state("scheduler: projectversion %d, repo %d needs build" % (projectversion_id, repo_id)) repo_deps = [] if build.parent.build_after: for build_after_repo in build.parent.build_after: repo_deps.append(build_after_repo.id) else: repo_deps_query = """ WITH RECURSIVE getparents(sourcerepository, dependency) AS ( SELECT s1.sourcerepository, s1.dependency FROM buildorder s1 WHERE s1.sourcerepository = :repo_id UNION ALL SELECT s2.sourcerepository, s2.dependency FROM buildorder s2, getparents s1 WHERE s2.sourcerepository = s1.dependency ) SELECT * from getparents; """ results = session.execute(repo_deps_query, {"repo_id": repo_id}) for row in results: repo_deps.append(row[1]) if not repo_deps: # build.log_state("scheduler: no build order dependencies, scheduling...") await schedule_build(build, session) break pv_deps = get_projectversion_deps(projectversion.id, session) projectversion_ids = [projectversion_id] # FIXME: no mirrors projectversion_ids.extend(pv_deps) ready = True for dep_repo_id in repo_deps: dep_repo = (session.query(SourceRepository).filter( SourceRepository.id == dep_repo_id).first()) if not dep_repo: logger.warning("scheduler: repo %d not found", dep_repo_id) continue # FIXME: buildconfig arch dependent! # find running builds in the same and dependent projectversions found_running = False for pv_id in projectversion_ids: # build.log_state("scheduler: trying to find running build order dependency in projectversion %d" % pv_id) # check no build order dep is needs_build, building, publishing, ... # FIXME: this needs maybe checking of source packages as well? running_builds = (session.query(Build).join( Build.buildconfiguration).filter( or_( Build.buildstate == "new", Build.buildstate == "needs_build", Build.buildstate == "scheduled", Build.buildstate == "building", Build.buildstate == "needs_publish", Build.buildstate == "publishing", ), Build.buildtype == "deb", Build.sourcerepository_id == dep_repo_id, BuildConfiguration.projectversions.any( ProjectVersion.id == pv_id), ).all()) if running_builds: found_running = True projectversion = ( session.query(ProjectVersion).filter( ProjectVersion.id == pv_id).first()) if not projectversion: pvname = "unknown" logger.warning( "scheduler: projectversion %d not found", pv_id) else: pvname = projectversion.fullname builds = [str(b.id) for b in running_builds] write_log( build.id, "W: waiting for repo {} to finish builds ({}) in projectversion {}\n" .format(dep_repo.name, ", ".join(builds), pvname)) break if found_running: ready = False break # find successful builds in the same and dependent projectversions found = False for pv_id in projectversion_ids: successful_builds = (session.query(Build).join( Build.buildconfiguration).filter( Build.buildstate == "successful", Build.buildtype == "deb", Build.sourcerepository_id == dep_repo_id, BuildConfiguration.projectversions.any( ProjectVersion.id == pv_id), ).all()) if successful_builds: found = True break if not found: ready = False projectversion = (session.query(ProjectVersion).filter( ProjectVersion.id == pv_id).first()) if not projectversion: pvname = "unknown" logger.warning( "scheduler: projectversion %d not found", pv_id) else: pvname = projectversion.fullname write_log( build.id, "W: waiting for repo {} to be built in projectversion {}\n" .format(dep_repo.name, pvname)) break if ready: # build.log_state("scheduler: found all required build order dependencies, scheduling...") await schedule_build(build, session)
async def BuildProcess(task_queue, aptly_queue, parent_build_id, repo_id, git_ref, ci_branch): with Session() as session: parent = session.query(Build).filter( Build.id == parent_build_id).first() if not parent: logger.error("BuildProcess: parent build {} not found".format( parent_build_id)) return write_log_title(parent_build_id, "Molior Build") repo = session.query(SourceRepository).filter( SourceRepository.id == repo_id).first() if not repo: logger.error("source repository %d not found", repo_id) write_log(parent_build_id, "E: source repository {} not found\n".format(repo_id)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() session.commit() return write_log(parent_build_id, "I: git checkout {}\n".format(git_ref)) # Checkout ret = await asyncio.ensure_future( GitCheckout(repo.src_path, git_ref, parent_build_id)) if not ret: write_log(parent_build_id, "E: git checkout failed\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return write_log(parent_build_id, "\nI: get build information\n") info = None try: info = await GetBuildInfo(repo.src_path, git_ref) except Exception as exc: logger.exception(exc) if not info: write_log(parent_build_id, "E: Error getting build information\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return targets = get_targets(info.plain_targets, repo, session) if not targets: repo.log_state( "unknown target projectversions in debian/molior.yml") write_log( parent_build_id, "E: the repository is not added to any projectversions referenced in debian/molior.yml\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_failed() session.commit() return # check if it is a CI build # i.e. if gittag does not match version in debian/changelog is_ci = False gittag = "" async def outh(line): nonlocal gittag gittag += line process = Launchy(shlex.split("git describe --tags --abbrev=40"), outh, outh, cwd=str(repo.src_path)) await process.launch() ret = await process.wait() if ret != 0: logger.error("error running git describe") else: v = strip_epoch_version(info.version) if not re.match("^v?{}$".format(v.replace("~", "-")), gittag): is_ci = True ci_cfg = Configuration().ci_builds ci_enabled = ci_cfg.get("enabled") if ci_cfg else False if is_ci and not ci_enabled: repo.log_state("CI builds are not enabled in configuration") write_log(parent_build_id, "E: CI builds are not enabled in configuration\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return parent.is_ci = is_ci session.commit() if is_ci: # create CI version with git hash suffix info.origversion = info.version if is_ci: info.version += "+git{}.{}".format( info.tag_dt.strftime("%Y%m%d%H%M%S"), git_ref[:6]) # check if CI builds enabled in any project version found = False for target in targets: projectversion = session.query(ProjectVersion).filter( ProjectVersion.ci_builds_enabled == True, # noqa: E712 ProjectVersion.id == target.projectversion_id).first() if projectversion: found = True break if not found: repo.log_state( "CI builds not enabled in specified projectversions, not building..." ) write_log( parent_build_id, "E: CI builds not enabled in specified projectversions, not building...\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return # Check if source build already exists build = session.query(Build).filter( Build.buildtype == "source", Build.sourcerepository == repo, Build.version == info.version).first() if build: repo.log_state( "source package already built for version {}".format( info.version)) write_log( parent_build_id, "E: source package already built for version {}\n".format( info.version)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_successful() session.commit() args = {"schedule": []} await task_queue.put(args) return # Use commiter name as maintainer for CI builds if is_ci: t = info.author_name.split(" ", 2) if len(t) == 2: firstname = t[0] lastname = t[1] else: firstname = t[0] lastname = "" email = info.author_email else: firstname = info.firstname lastname = info.lastname email = info.email maintainer = session.query(Maintainer).filter( Maintainer.email == email).first() if not maintainer: repo.log_state("creating new maintainer: %s %s <%s>" % (firstname, lastname, email)) write_log( parent_build_id, "I: creating new maintainer: %s %s <%s>\n" % (firstname, lastname, email)) maintainer = Maintainer(firstname=firstname, surname=lastname, email=email) session.add(maintainer) session.commit() # FIXME: assert version == git tag build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="source", buildconfiguration=None, parent_id=parent_build_id, sourcerepository=repo, maintainer=maintainer, ) session.add(build) session.commit() build.log_state("created") await build_added(build) # add build order dependencies build_after = get_buildorder(repo.src_path) build_after_deps = [] found = False for dep_git in build_after: dep_repo = session.query(SourceRepository).filter( SourceRepository.url == dep_git).first() if not dep_repo: build.log_state("Error: build after repo '%s' not found" % dep_git) write_log(parent_build_id, "E: build after repo '%s' not found\n" % dep_git) # FIXME: write to build log continue found = True build.log_state("adding build after dependency to: %s" % dep_git) write_log(parent_build_id, "I: adding build after dependency to: %s\n" % dep_git) build_after_deps.append(dep_repo) if found: build.build_after = build_after_deps session.commit() projectversion_ids = [] build_configs = get_buildconfigs(targets, session) found = False for build_config in build_configs: projectversion_ids.extend([ projectversion.id for projectversion in build_config.projectversions ]) # FIXME: filter for buildtype? deb_build = (session.query(Build).filter( Build.buildconfiguration == build_config, Build.versiontimestamp == info.tag_stamp, Build.version == info.version, ).first()) if deb_build: logger.warning("already built %s", repo.name) write_log(parent_build_id, "E: already built {}\n".format(repo.name)) continue # FIXME: why projectversion[0] ?? if build_config.projectversions[0].is_locked: repo.log_state( "build to locked projectversion '%s-%s' not permitted" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: build to locked projectversion '%s-%s' not permitted\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue if is_ci and not build_config.projectversions[0].ci_builds_enabled: repo.log_state( "CI builds not enabled in projectversion '%s-%s'" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: CI builds not enabled in projectversion '%s-%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue found = True write_log( parent_build_id, "I: creating build for projectversion '%s/%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) deb_build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="deb", buildconfiguration=build_config, parent_id=build.id, sourcerepository=repo, maintainer=maintainer, ) session.add(deb_build) session.commit() deb_build.log_state("created") await build_added(deb_build) # FIXME: if not found, abort? session.commit() # make list unique, filter duplicates (multiple archs) projectversion_ids = list(set(projectversion_ids)) await build.set_building() session.commit() write_log(parent_build_id, "I: building source package\n") async def fail(): write_log(parent_build_id, "E: building source package failed\n") write_log_title(build.id, "Done", no_footer_newline=True, no_header_newline=True) repo.set_ready() await build.set_failed() session.commit() # FIXME: cancel deb builds, or only create deb builds after source build ok # Build Source Package write_log_title(build.id, "Source Build") try: ret = await BuildDebSrc(repo_id, repo.src_path, build.id, info.version, is_ci, "{} {}".format(firstname, lastname), email) except Exception as exc: logger.exception(exc) await fail() return if not ret: await fail() return await build.set_needs_publish() session.commit() repo.set_ready() session.commit() write_log(parent_build_id, "I: publishing source package\n") await aptly_queue.put({"src_publish": [build.id, projectversion_ids]})
async def run(self): """ Run the worker task. """ await startup_mirror(self.task_queue) while True: try: task = await self.aptly_queue.get() if task is None: logger.info("aptly worker: got emtpy task, aborting...") break with Session() as session: handled = False if not handled: args = task.get("src_publish") if args: handled = True await self._src_publish(args, session) if not handled: args = task.get("publish") if args: handled = True await self._publish(args, session) if not handled: args = task.get("create_mirror") if args: handled = True await self._create_mirror(args, session) if not handled: args = task.get("update_mirror") if args: handled = True await self._update_mirror(args, session) if not handled: args = task.get("drop_publish") if args: handled = True await self._drop_publish(args, session) if not handled: args = task.get("init_repository") if args: handled = True await self._init_repository(args, session) if not handled: logger.error("aptly worker got unknown task %s", str(task)) self.aptly_queue.task_done() except Exception as exc: logger.exception(exc) logger.info("terminating aptly worker task")
async def create_schroot(task_queue, chroot_id, build_id, dist, name, version, arch): """ Creates a sbuild chroot and other build environments. Args: dist (str): The distrelease version (str): The version arch (str): The architecture Returns: bool: True on success """ with Session() as session: build = session.query(Build).filter(Build.id == build_id).first() if not build: logger.error("aptly worker: mirror build with id %d not found", build_id) return False write_log_title(build_id, "Chroot Environment") await build.set_building() session.commit() logger.info("creating build environments for %s-%s-%s", dist, version, arch) write_log( build_id, "Creating build environments for %s-%s-%s\n\n" % (dist, version, arch)) async def outh(line): write_log(build_id, "%s\n" % line) process = Launchy([ "sudo", "run-parts", "-a", "build", "-a", dist, "-a", name, "-a", version, "-a", arch, "/etc/molior/mirror-hooks.d" ], outh, outh) await process.launch() ret = await process.wait() if not ret == 0: logger.error("error creating build env") write_log(build_id, "Error creating build environment\n") write_log_title(build_id, "Done", no_footer_newline=True) await build.set_failed() session.commit() return False await build.set_needs_publish() session.commit() await build.set_publishing() session.commit() process = Launchy([ "sudo", "run-parts", "-a", "publish", "-a", dist, "-a", name, "-a", version, "-a", arch, "/etc/molior/mirror-hooks.d" ], outh, outh) await process.launch() ret = await process.wait() if not ret == 0: logger.error("error publishing build env") write_log(build_id, "Error publishing build environment\n") write_log_title(build_id, "Done", no_footer_newline=True) await build.set_publish_failed() session.commit() return False write_log(build_id, "\n") write_log_title(build_id, "Done", no_footer_newline=True) await build.set_successful() session.commit() chroot = session.query(Chroot).filter(Chroot.id == chroot_id).first() chroot.ready = True session.commit() # Schedule builds args = {"schedule": []} await task_queue.put(args) return True
async def startup_mirror(task_queue): """ Starts a finalize_mirror task in the asyncio event loop for all mirrors which have the state 'updating' """ loop = asyncio.get_event_loop() apt = get_aptly_connection() with Session() as session: # get mirrors in updating state query = session.query(ProjectVersion) # pylint: disable=no-member query = query.join(Project, Project.id == ProjectVersion.project_id) query = query.filter(Project.is_mirror.is_(True)) query = query.filter( or_(ProjectVersion.mirror_state == "updating", ProjectVersion.mirror_state == "publishing")) if not query.count(): return mirrors = query.all() tasks = await apt.get_tasks() for mirror in mirrors: # FIXME: only one buildvariant supported base_mirror = "" base_mirror_version = "" taskname = "Update mirror" buildstate = "building" if mirror.mirror_state == "publishing": taskname = "Publish snapshot:" buildstate = "publishing" if not mirror.project.is_basemirror: base_mirror = mirror.buildvariants[0].base_mirror.project.name base_mirror_version = mirror.buildvariants[0].base_mirror.name task_name = "{} {}-{}-{}-{}".format(taskname, base_mirror, base_mirror_version, mirror.project.name, mirror.name) else: task_name = "{} {}-{}".format(taskname, mirror.project.name, mirror.name) m_tasks = None if tasks: m_tasks = [task for task in tasks if task["Name"] == task_name] if not m_tasks: # No task on aptly found mirror.mirror_state = "error" session.commit() # pylint: disable=no-member continue m_task = max(m_tasks, key=operator.itemgetter("ID")) build = (session.query(Build).filter( Build.buildtype == "mirror", Build.buildstate == buildstate, Build.projectversion_id == mirror.id, ).first()) if not build: # No task on aptly found mirror.mirror_state = "error" session.commit() # pylint: disable=no-member continue components = mirror.mirror_components.split(",") loop.create_task( finalize_mirror( task_queue, build.id, base_mirror, base_mirror_version, mirror.project.name, mirror.name, components, m_task.get("ID"), ))
async def finalize_mirror(task_queue, build_id, base_mirror, base_mirror_version, mirror, version, components, task_id): """ """ try: mirrorname = "{}-{}".format(mirror, version) logger.info("finalizing mirror %s task %d, build_%d", mirrorname, task_id, build_id) with Session() as session: # FIXME: get entry from build.projectversion_id query = session.query(ProjectVersion) # pylint: disable=no-member query = query.join(Project, Project.id == ProjectVersion.project_id) query = query.filter(Project.is_mirror.is_(True)) query = query.filter(ProjectVersion.name == version) entry = query.filter(Project.name == mirror).first() if not entry: logger.error("finalize mirror: mirror '%s' not found", mirrorname) return build = session.query(Build).filter(Build.id == build_id).first() if not build: logger.error("aptly worker: mirror build with id %d not found", build_id) return apt = get_aptly_connection() if entry.mirror_state == "updating": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error( "update mirror %s get progress exception: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("update mirror %s progress error", mirrorname) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return logger.info( "mirrored %d/%d files (%.02f%%), %.02f/%.02fGB (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], (upd_progress["TotalDownloadSize"] - upd_progress["RemainingDownloadSize"]) / 1024.0 / 1024.0 / 1024.0, upd_progress["TotalDownloadSize"] / 1024.0 / 1024.0 / 1024.0, upd_progress["PercentSize"], ) await asyncio.sleep(2) await apt.delete_task(task_id) write_log(build.id, "I: creating snapshot\n") await build.set_publishing() session.commit() # snapshot after initial download logger.info("creating snapshot for: %s", mirrorname) try: task_id = await apt.mirror_snapshot( base_mirror, base_mirror_version, mirror, version) except AptlyError as exc: logger.error("error creating mirror %s snapshot: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return while True: try: task_state = await apt.get_task_state(task_id) except Exception: logger.exception("error getting mirror %s state", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # States: # 0: init, 1: running, 2: success, 3: failed if task_state["State"] == 2: break if task_state["State"] == 3: logger.error("creating mirror %s snapshot failed", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # FIMXE: why sleep ? await asyncio.sleep(2) await apt.delete_task(task_id) entry.mirror_state = "publishing" session.commit() # pylint: disable=no-member # publish new snapshot write_log(build.id, "I: publishing mirror\n") logger.info("publishing snapshot: %s", mirrorname) try: task_id = await apt.mirror_publish( base_mirror, base_mirror_version, mirror, version, entry.mirror_distribution, components) except Exception as exc: logger.error("error publishing mirror %s snapshot: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete(base_mirror, base_mirror_version, mirror, version) return if entry.mirror_state == "publishing": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error("error publishing mirror %s: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return # States: # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("error publishing mirror %s snapshot", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return logger.info( "published %d/%d packages (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], ) await asyncio.sleep(2) if entry.project.is_basemirror: for arch_name in entry.mirror_architectures[1:-1].split(","): arch = (session.query(Architecture).filter( Architecture.name == arch_name).first()) # pylint: disable=no-member if not arch: await build.set_publish_failed() logger.error( "finalize mirror: architecture '%s' not found", arch_name) return buildvariant = BuildVariant(base_mirror=entry, architecture=arch) session.add(buildvariant) # pylint: disable=no-member write_log(build.id, "I: starting chroot environments build\n") chroot_build = Build( version=version, git_ref=None, ci_branch=None, is_ci=None, versiontimestamp=None, sourcename=mirror, buildstate="new", buildtype="chroot", projectversion_id=build.projectversion_id, buildconfiguration=None, parent_id=build.id, sourcerepository=None, maintainer=None, ) session.add(chroot_build) session.commit() chroot_build.log_state("created") await build_added(chroot_build) await chroot_build.set_needs_build() session.commit() await chroot_build.set_scheduled() session.commit() chroot = Chroot(buildvariant=buildvariant, ready=False) session.add(chroot) session.commit() loop = asyncio.get_event_loop() loop.create_task( create_schroot( task_queue, chroot.id, chroot_build.id, buildvariant.base_mirror.mirror_distribution, buildvariant.base_mirror.project.name, buildvariant.base_mirror.name, buildvariant.architecture.name, )) entry.is_locked = True entry.mirror_state = "ready" session.commit() # pylint: disable=no-member await build.set_successful() session.commit() logger.info("mirror %s succesfully created", mirrorname) write_log_title(build.id, "Done") except Exception as exc: logger.exception(exc)