def test_deb_build_successful_other_failed(): """ Tests whether a debian was set to successful correctly with multiple builds and the other build has failed """ deb_build = Build(id=1337, buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") other_build = Build(buildtype="source") other_build.buildstate = "build_failed" deb_build.parent.children = [deb_build, other_build] deb_build.log_state = MagicMock() deb_build.parent.log_state = MagicMock() deb_build.parent.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_successful()) assert deb_build.buildstate == "successful" assert deb_build.parent.parent.buildstate != "successful"
def test_src_build_failed(): """ Tests whether a sourcebuild was set to failed correctly """ src_build = Build(buildtype="source") src_build.parent = Build(buildtype="build") src_build.log_state = MagicMock() src_build.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(src_build.set_failed()) assert src_build.buildstate == "build_failed" assert src_build.parent.buildstate == "build_failed"
def test_deb_build_publish_failed(): """ Tests whether a debian was set to publish failed when the publish failed """ deb_build = Build(buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") deb_build.log_state = MagicMock() deb_build.parent.log_state = MagicMock() deb_build.parent.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_publish_failed()) assert deb_build.buildstate == "publish_failed" assert deb_build.parent.parent.buildstate == "build_failed"
async def BuildProcess(task_queue, aptly_queue, parent_build_id, repo_id, git_ref, ci_branch): with Session() as session: parent = session.query(Build).filter( Build.id == parent_build_id).first() if not parent: logger.error("BuildProcess: parent build {} not found".format( parent_build_id)) return write_log_title(parent_build_id, "Molior Build") repo = session.query(SourceRepository).filter( SourceRepository.id == repo_id).first() if not repo: logger.error("source repository %d not found", repo_id) write_log(parent_build_id, "E: source repository {} not found\n".format(repo_id)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() session.commit() return write_log(parent_build_id, "I: git checkout {}\n".format(git_ref)) # Checkout ret = await asyncio.ensure_future( GitCheckout(repo.src_path, git_ref, parent_build_id)) if not ret: write_log(parent_build_id, "E: git checkout failed\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return write_log(parent_build_id, "\nI: get build information\n") info = None try: info = await GetBuildInfo(repo.src_path, git_ref) except Exception as exc: logger.exception(exc) if not info: write_log(parent_build_id, "E: Error getting build information\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return targets = get_targets(info.plain_targets, repo, session) if not targets: repo.log_state( "unknown target projectversions in debian/molior.yml") write_log( parent_build_id, "E: the repository is not added to any projectversions referenced in debian/molior.yml\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_failed() session.commit() return # check if it is a CI build # i.e. if gittag does not match version in debian/changelog is_ci = False gittag = "" async def outh(line): nonlocal gittag gittag += line process = Launchy(shlex.split("git describe --tags --abbrev=40"), outh, outh, cwd=str(repo.src_path)) await process.launch() ret = await process.wait() if ret != 0: logger.error("error running git describe") else: v = strip_epoch_version(info.version) if not re.match("^v?{}$".format(v.replace("~", "-")), gittag): is_ci = True ci_cfg = Configuration().ci_builds ci_enabled = ci_cfg.get("enabled") if ci_cfg else False if is_ci and not ci_enabled: repo.log_state("CI builds are not enabled in configuration") write_log(parent_build_id, "E: CI builds are not enabled in configuration\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return parent.is_ci = is_ci session.commit() if is_ci: # create CI version with git hash suffix info.origversion = info.version if is_ci: info.version += "+git{}.{}".format( info.tag_dt.strftime("%Y%m%d%H%M%S"), git_ref[:6]) # check if CI builds enabled in any project version found = False for target in targets: projectversion = session.query(ProjectVersion).filter( ProjectVersion.ci_builds_enabled == True, # noqa: E712 ProjectVersion.id == target.projectversion_id).first() if projectversion: found = True break if not found: repo.log_state( "CI builds not enabled in specified projectversions, not building..." ) write_log( parent_build_id, "E: CI builds not enabled in specified projectversions, not building...\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return # Check if source build already exists build = session.query(Build).filter( Build.buildtype == "source", Build.sourcerepository == repo, Build.version == info.version).first() if build: repo.log_state( "source package already built for version {}".format( info.version)) write_log( parent_build_id, "E: source package already built for version {}\n".format( info.version)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_successful() session.commit() args = {"schedule": []} await task_queue.put(args) return # Use commiter name as maintainer for CI builds if is_ci: t = info.author_name.split(" ", 2) if len(t) == 2: firstname = t[0] lastname = t[1] else: firstname = t[0] lastname = "" email = info.author_email else: firstname = info.firstname lastname = info.lastname email = info.email maintainer = session.query(Maintainer).filter( Maintainer.email == email).first() if not maintainer: repo.log_state("creating new maintainer: %s %s <%s>" % (firstname, lastname, email)) write_log( parent_build_id, "I: creating new maintainer: %s %s <%s>\n" % (firstname, lastname, email)) maintainer = Maintainer(firstname=firstname, surname=lastname, email=email) session.add(maintainer) session.commit() # FIXME: assert version == git tag build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="source", buildconfiguration=None, parent_id=parent_build_id, sourcerepository=repo, maintainer=maintainer, ) session.add(build) session.commit() build.log_state("created") await build_added(build) # add build order dependencies build_after = get_buildorder(repo.src_path) build_after_deps = [] found = False for dep_git in build_after: dep_repo = session.query(SourceRepository).filter( SourceRepository.url == dep_git).first() if not dep_repo: build.log_state("Error: build after repo '%s' not found" % dep_git) write_log(parent_build_id, "E: build after repo '%s' not found\n" % dep_git) # FIXME: write to build log continue found = True build.log_state("adding build after dependency to: %s" % dep_git) write_log(parent_build_id, "I: adding build after dependency to: %s\n" % dep_git) build_after_deps.append(dep_repo) if found: build.build_after = build_after_deps session.commit() projectversion_ids = [] build_configs = get_buildconfigs(targets, session) found = False for build_config in build_configs: projectversion_ids.extend([ projectversion.id for projectversion in build_config.projectversions ]) # FIXME: filter for buildtype? deb_build = (session.query(Build).filter( Build.buildconfiguration == build_config, Build.versiontimestamp == info.tag_stamp, Build.version == info.version, ).first()) if deb_build: logger.warning("already built %s", repo.name) write_log(parent_build_id, "E: already built {}\n".format(repo.name)) continue # FIXME: why projectversion[0] ?? if build_config.projectversions[0].is_locked: repo.log_state( "build to locked projectversion '%s-%s' not permitted" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: build to locked projectversion '%s-%s' not permitted\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue if is_ci and not build_config.projectversions[0].ci_builds_enabled: repo.log_state( "CI builds not enabled in projectversion '%s-%s'" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: CI builds not enabled in projectversion '%s-%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue found = True write_log( parent_build_id, "I: creating build for projectversion '%s/%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) deb_build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="deb", buildconfiguration=build_config, parent_id=build.id, sourcerepository=repo, maintainer=maintainer, ) session.add(deb_build) session.commit() deb_build.log_state("created") await build_added(deb_build) # FIXME: if not found, abort? session.commit() # make list unique, filter duplicates (multiple archs) projectversion_ids = list(set(projectversion_ids)) await build.set_building() session.commit() write_log(parent_build_id, "I: building source package\n") async def fail(): write_log(parent_build_id, "E: building source package failed\n") write_log_title(build.id, "Done", no_footer_newline=True, no_header_newline=True) repo.set_ready() await build.set_failed() session.commit() # FIXME: cancel deb builds, or only create deb builds after source build ok # Build Source Package write_log_title(build.id, "Source Build") try: ret = await BuildDebSrc(repo_id, repo.src_path, build.id, info.version, is_ci, "{} {}".format(firstname, lastname), email) except Exception as exc: logger.exception(exc) await fail() return if not ret: await fail() return await build.set_needs_publish() session.commit() repo.set_ready() session.commit() write_log(parent_build_id, "I: publishing source package\n") await aptly_queue.put({"src_publish": [build.id, projectversion_ids]})
async def _create_mirror(self, args, session): ( mirror, url, mirror_distribution, components, keys, keyserver, is_basemirror, architectures, version, key_url, basemirror_id, download_sources, download_installer, ) = args build = Build( version=version, git_ref=None, ci_branch=None, is_ci=False, versiontimestamp=None, sourcename=mirror, buildstate="new", buildtype="mirror", buildconfiguration=None, sourcerepository=None, maintainer=None, ) build.log_state("created") session.add(build) await build_added(build) session.commit() write_log_title(build.id, "Create Mirror") mirror_project = ( session.query(Project) # pylint: disable=no-member .filter(Project.name == mirror, Project.is_mirror.is_(True)).first()) if not mirror_project: mirror_project = Project(name=mirror, is_mirror=True, is_basemirror=is_basemirror) session.add(mirror_project) # pylint: disable=no-member project_version = ( session.query(ProjectVersion).join(Project).filter( # pylint: disable=no-member Project.name == mirror, Project.is_mirror.is_(True)).filter( ProjectVersion.name == version).first()) if project_version: write_log( build.id, "W: mirror with name '%s' and version '%s' already exists\n" % (mirror, version)) logger.error( "mirror with name '%s' and version '%s' already exists", mirror, version) await build.set_successful() session.commit() return True base_mirror = None base_mirror_version = None db_buildvariant = None if not is_basemirror: db_basemirror = ( session.query(ProjectVersion) # pylint: disable=no-member .filter(ProjectVersion.id == basemirror_id).first()) if not db_basemirror: write_log( build.id, "E: could not find a basemirror with id '%d'\n" % basemirror_id) logger.error("could not find a basemirror with id '%d'", basemirror_id) await build.set_failed() session.commit() return False base_mirror = db_basemirror.project.name base_mirror_version = db_basemirror.name db_buildvariant = ( session.query(BuildVariant) # pylint: disable=no-member .filter(BuildVariant.base_mirror_id == basemirror_id).first()) if not db_buildvariant: write_log( build.id, "E: could not find a buildvariant for basemirror with id '%d'\n" % db_basemirror.id) logger.error( "could not find a buildvariant for basemirror with id '%d'", db_basemirror.id) await build.set_failed() session.commit() return False mirror_project_version = ProjectVersion( name=version, project=mirror_project, mirror_url=url, mirror_distribution=mirror_distribution, mirror_components=",".join(components), mirror_architectures="{" + ",".join(architectures) + "}", mirror_with_sources=download_sources, mirror_with_installer=download_installer, ) if db_buildvariant: mirror_project_version.buildvariants.append(db_buildvariant) session.add(mirror_project_version) session.commit() build.projectversion_id = mirror_project_version.id session.commit() write_log(build.id, "I: adding GPG keys\n") apt = get_aptly_connection() if key_url: try: await apt.gpg_add_key(key_url=key_url) except AptlyError as exc: write_log(build.id, "E: Error adding keys from '%s'\n" % key_url) logger.error("key error: %s", exc) await build.set_failed() return False elif keyserver and keys: try: await apt.gpg_add_key(key_server=keyserver, keys=keys) except AptlyError as exc: write_log(build.id, "E: Error adding keys %s\n" % str(keys)) logger.error("key error: %s", exc) await build.set_failed() return False write_log(build.id, "I: creating mirror\n") try: await apt.mirror_create( mirror, version, base_mirror, base_mirror_version, url, mirror_distribution, components, architectures, download_sources=download_sources, download_udebs=download_installer, download_installer=download_installer, ) except NotFoundError as exc: write_log(build.id, "E: aptly seems to be not available: %s\n" % str(exc)) logger.error("aptly seems to be not available: %s", str(exc)) await build.set_failed() return False except AptlyError as exc: write_log( build.id, "E: failed to create mirror %s on aptly: %s\n" % (mirror, str(exc))) logger.error("failed to create mirror %s on aptly: %s", mirror, str(exc)) await build.set_failed() return False args = { "update_mirror": [ build.id, mirror_project_version.id, base_mirror, base_mirror_version, mirror, version, components ] } await self.aptly_queue.put(args)
async def finalize_mirror(task_queue, build_id, base_mirror, base_mirror_version, mirror, version, components, task_id): """ """ try: mirrorname = "{}-{}".format(mirror, version) logger.info("finalizing mirror %s task %d, build_%d", mirrorname, task_id, build_id) with Session() as session: # FIXME: get entry from build.projectversion_id query = session.query(ProjectVersion) # pylint: disable=no-member query = query.join(Project, Project.id == ProjectVersion.project_id) query = query.filter(Project.is_mirror.is_(True)) query = query.filter(ProjectVersion.name == version) entry = query.filter(Project.name == mirror).first() if not entry: logger.error("finalize mirror: mirror '%s' not found", mirrorname) return build = session.query(Build).filter(Build.id == build_id).first() if not build: logger.error("aptly worker: mirror build with id %d not found", build_id) return apt = get_aptly_connection() if entry.mirror_state == "updating": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error( "update mirror %s get progress exception: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("update mirror %s progress error", mirrorname) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return logger.info( "mirrored %d/%d files (%.02f%%), %.02f/%.02fGB (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], (upd_progress["TotalDownloadSize"] - upd_progress["RemainingDownloadSize"]) / 1024.0 / 1024.0 / 1024.0, upd_progress["TotalDownloadSize"] / 1024.0 / 1024.0 / 1024.0, upd_progress["PercentSize"], ) await asyncio.sleep(2) await apt.delete_task(task_id) write_log(build.id, "I: creating snapshot\n") await build.set_publishing() session.commit() # snapshot after initial download logger.info("creating snapshot for: %s", mirrorname) try: task_id = await apt.mirror_snapshot( base_mirror, base_mirror_version, mirror, version) except AptlyError as exc: logger.error("error creating mirror %s snapshot: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return while True: try: task_state = await apt.get_task_state(task_id) except Exception: logger.exception("error getting mirror %s state", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # States: # 0: init, 1: running, 2: success, 3: failed if task_state["State"] == 2: break if task_state["State"] == 3: logger.error("creating mirror %s snapshot failed", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # FIMXE: why sleep ? await asyncio.sleep(2) await apt.delete_task(task_id) entry.mirror_state = "publishing" session.commit() # pylint: disable=no-member # publish new snapshot write_log(build.id, "I: publishing mirror\n") logger.info("publishing snapshot: %s", mirrorname) try: task_id = await apt.mirror_publish( base_mirror, base_mirror_version, mirror, version, entry.mirror_distribution, components) except Exception as exc: logger.error("error publishing mirror %s snapshot: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete(base_mirror, base_mirror_version, mirror, version) return if entry.mirror_state == "publishing": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error("error publishing mirror %s: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return # States: # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("error publishing mirror %s snapshot", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return logger.info( "published %d/%d packages (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], ) await asyncio.sleep(2) if entry.project.is_basemirror: for arch_name in entry.mirror_architectures[1:-1].split(","): arch = (session.query(Architecture).filter( Architecture.name == arch_name).first()) # pylint: disable=no-member if not arch: await build.set_publish_failed() logger.error( "finalize mirror: architecture '%s' not found", arch_name) return buildvariant = BuildVariant(base_mirror=entry, architecture=arch) session.add(buildvariant) # pylint: disable=no-member write_log(build.id, "I: starting chroot environments build\n") chroot_build = Build( version=version, git_ref=None, ci_branch=None, is_ci=None, versiontimestamp=None, sourcename=mirror, buildstate="new", buildtype="chroot", projectversion_id=build.projectversion_id, buildconfiguration=None, parent_id=build.id, sourcerepository=None, maintainer=None, ) session.add(chroot_build) session.commit() chroot_build.log_state("created") await build_added(chroot_build) await chroot_build.set_needs_build() session.commit() await chroot_build.set_scheduled() session.commit() chroot = Chroot(buildvariant=buildvariant, ready=False) session.add(chroot) session.commit() loop = asyncio.get_event_loop() loop.create_task( create_schroot( task_queue, chroot.id, chroot_build.id, buildvariant.base_mirror.mirror_distribution, buildvariant.base_mirror.project.name, buildvariant.base_mirror.name, buildvariant.architecture.name, )) entry.is_locked = True entry.mirror_state = "ready" session.commit() # pylint: disable=no-member await build.set_successful() session.commit() logger.info("mirror %s succesfully created", mirrorname) write_log_title(build.id, "Done") except Exception as exc: logger.exception(exc)