def test_deb_build_successful_other_failed(): """ Tests whether a debian was set to successful correctly with multiple builds and the other build has failed """ deb_build = Build(id=1337, buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") other_build = Build(buildtype="source") other_build.buildstate = "build_failed" deb_build.parent.children = [deb_build, other_build] deb_build.log_state = MagicMock() deb_build.parent.log_state = MagicMock() deb_build.parent.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_successful()) assert deb_build.buildstate == "successful" assert deb_build.parent.parent.buildstate != "successful"
def test_src_build_failed(): """ Tests whether a sourcebuild was set to failed correctly """ src_build = Build(buildtype="source") src_build.parent = Build(buildtype="build") logmock(src_build) loop = asyncio.get_event_loop() loop.run_until_complete(src_build.set_failed()) assert src_build.buildstate == "build_failed" assert src_build.parent.buildstate == "build_failed"
def test_deb_build_successful_other_failed(): """ Tests whether a debian was set to successful correctly with multiple builds and the other build has failed """ deb_build = Build(id=1337, buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") other_build = Build(buildtype="source") other_build.buildstate = "build_failed" deb_build.parent.children = [deb_build, other_build] logmock(deb_build) loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_successful()) assert deb_build.buildstate == "successful" assert deb_build.parent.parent.buildstate != "successful"
def test_deb_build_failed(): """ Tests whether a debian build was set to failed correctly """ deb_build = Build(buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") logmock(deb_build) loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_failed()) assert deb_build.buildstate == "build_failed" assert deb_build.parent.parent.buildstate == "build_failed"
def test_deb_build_failed(): """ Tests whether a debian build was set to failed correctly """ src_build = Build(buildtype="deb") src_build.parent = Build(buildtype="source") src_build.parent.parent = Build(buildtype="build") src_build.log_state = MagicMock() src_build.parent.log_state = MagicMock() src_build.parent.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(src_build.set_failed()) assert src_build.buildstate == "build_failed" assert src_build.parent.parent.buildstate == "build_failed"
def test_deb_build_successful_only_build(): """ Tests whether a debian was set to successful correctly """ deb_build = Build(id=1337, buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") deb_build.parent.children = [deb_build] logmock(deb_build) loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_successful()) assert deb_build.buildstate == "successful" assert deb_build.parent.parent.buildstate == "successful"
def test_deb_build_publish_failed(): """ Tests whether a debian was set to publish failed when the publish failed """ deb_build = Build(buildtype="deb") deb_build.parent = Build(buildtype="source") deb_build.parent.parent = Build(buildtype="build") deb_build.log_state = MagicMock() deb_build.parent.log_state = MagicMock() deb_build.parent.parent.log_state = MagicMock() with patch("molior.molior.buildlogger.write_log"), \ patch("molior.molior.buildlogger.write_log_title"): loop = asyncio.get_event_loop() loop.run_until_complete(deb_build.set_publish_failed()) assert deb_build.buildstate == "publish_failed" assert deb_build.parent.parent.buildstate == "build_failed"
async def BuildProcess(task_queue, aptly_queue, parent_build_id, repo_id, git_ref, ci_branch): with Session() as session: parent = session.query(Build).filter( Build.id == parent_build_id).first() if not parent: logger.error("BuildProcess: parent build {} not found".format( parent_build_id)) return write_log_title(parent_build_id, "Molior Build") repo = session.query(SourceRepository).filter( SourceRepository.id == repo_id).first() if not repo: logger.error("source repository %d not found", repo_id) write_log(parent_build_id, "E: source repository {} not found\n".format(repo_id)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() session.commit() return write_log(parent_build_id, "I: git checkout {}\n".format(git_ref)) # Checkout ret = await asyncio.ensure_future( GitCheckout(repo.src_path, git_ref, parent_build_id)) if not ret: write_log(parent_build_id, "E: git checkout failed\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return write_log(parent_build_id, "\nI: get build information\n") info = None try: info = await GetBuildInfo(repo.src_path, git_ref) except Exception as exc: logger.exception(exc) if not info: write_log(parent_build_id, "E: Error getting build information\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_failed() repo.set_ready() session.commit() return targets = get_targets(info.plain_targets, repo, session) if not targets: repo.log_state( "unknown target projectversions in debian/molior.yml") write_log( parent_build_id, "E: the repository is not added to any projectversions referenced in debian/molior.yml\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_failed() session.commit() return # check if it is a CI build # i.e. if gittag does not match version in debian/changelog is_ci = False gittag = "" async def outh(line): nonlocal gittag gittag += line process = Launchy(shlex.split("git describe --tags --abbrev=40"), outh, outh, cwd=str(repo.src_path)) await process.launch() ret = await process.wait() if ret != 0: logger.error("error running git describe") else: v = strip_epoch_version(info.version) if not re.match("^v?{}$".format(v.replace("~", "-")), gittag): is_ci = True ci_cfg = Configuration().ci_builds ci_enabled = ci_cfg.get("enabled") if ci_cfg else False if is_ci and not ci_enabled: repo.log_state("CI builds are not enabled in configuration") write_log(parent_build_id, "E: CI builds are not enabled in configuration\n") write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return parent.is_ci = is_ci session.commit() if is_ci: # create CI version with git hash suffix info.origversion = info.version if is_ci: info.version += "+git{}.{}".format( info.tag_dt.strftime("%Y%m%d%H%M%S"), git_ref[:6]) # check if CI builds enabled in any project version found = False for target in targets: projectversion = session.query(ProjectVersion).filter( ProjectVersion.ci_builds_enabled == True, # noqa: E712 ProjectVersion.id == target.projectversion_id).first() if projectversion: found = True break if not found: repo.log_state( "CI builds not enabled in specified projectversions, not building..." ) write_log( parent_build_id, "E: CI builds not enabled in specified projectversions, not building...\n" ) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) await parent.set_successful() repo.set_ready() session.commit() return # Check if source build already exists build = session.query(Build).filter( Build.buildtype == "source", Build.sourcerepository == repo, Build.version == info.version).first() if build: repo.log_state( "source package already built for version {}".format( info.version)) write_log( parent_build_id, "E: source package already built for version {}\n".format( info.version)) write_log_title(parent_build_id, "Done", no_footer_newline=True, no_header_newline=False) repo.set_ready() await parent.set_successful() session.commit() args = {"schedule": []} await task_queue.put(args) return # Use commiter name as maintainer for CI builds if is_ci: t = info.author_name.split(" ", 2) if len(t) == 2: firstname = t[0] lastname = t[1] else: firstname = t[0] lastname = "" email = info.author_email else: firstname = info.firstname lastname = info.lastname email = info.email maintainer = session.query(Maintainer).filter( Maintainer.email == email).first() if not maintainer: repo.log_state("creating new maintainer: %s %s <%s>" % (firstname, lastname, email)) write_log( parent_build_id, "I: creating new maintainer: %s %s <%s>\n" % (firstname, lastname, email)) maintainer = Maintainer(firstname=firstname, surname=lastname, email=email) session.add(maintainer) session.commit() # FIXME: assert version == git tag build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="source", buildconfiguration=None, parent_id=parent_build_id, sourcerepository=repo, maintainer=maintainer, ) session.add(build) session.commit() build.log_state("created") await build_added(build) # add build order dependencies build_after = get_buildorder(repo.src_path) build_after_deps = [] found = False for dep_git in build_after: dep_repo = session.query(SourceRepository).filter( SourceRepository.url == dep_git).first() if not dep_repo: build.log_state("Error: build after repo '%s' not found" % dep_git) write_log(parent_build_id, "E: build after repo '%s' not found\n" % dep_git) # FIXME: write to build log continue found = True build.log_state("adding build after dependency to: %s" % dep_git) write_log(parent_build_id, "I: adding build after dependency to: %s\n" % dep_git) build_after_deps.append(dep_repo) if found: build.build_after = build_after_deps session.commit() projectversion_ids = [] build_configs = get_buildconfigs(targets, session) found = False for build_config in build_configs: projectversion_ids.extend([ projectversion.id for projectversion in build_config.projectversions ]) # FIXME: filter for buildtype? deb_build = (session.query(Build).filter( Build.buildconfiguration == build_config, Build.versiontimestamp == info.tag_stamp, Build.version == info.version, ).first()) if deb_build: logger.warning("already built %s", repo.name) write_log(parent_build_id, "E: already built {}\n".format(repo.name)) continue # FIXME: why projectversion[0] ?? if build_config.projectversions[0].is_locked: repo.log_state( "build to locked projectversion '%s-%s' not permitted" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: build to locked projectversion '%s-%s' not permitted\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue if is_ci and not build_config.projectversions[0].ci_builds_enabled: repo.log_state( "CI builds not enabled in projectversion '%s-%s'" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) write_log( parent_build_id, "W: CI builds not enabled in projectversion '%s-%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) continue found = True write_log( parent_build_id, "I: creating build for projectversion '%s/%s'\n" % ( build_config.projectversions[0].project.name, build_config.projectversions[0].name, )) deb_build = Build( version=info.version, git_ref=info.commit_hash, ci_branch=ci_branch, is_ci=is_ci, versiontimestamp=info.tag_stamp, sourcename=info.sourcename, buildstate="new", buildtype="deb", buildconfiguration=build_config, parent_id=build.id, sourcerepository=repo, maintainer=maintainer, ) session.add(deb_build) session.commit() deb_build.log_state("created") await build_added(deb_build) # FIXME: if not found, abort? session.commit() # make list unique, filter duplicates (multiple archs) projectversion_ids = list(set(projectversion_ids)) await build.set_building() session.commit() write_log(parent_build_id, "I: building source package\n") async def fail(): write_log(parent_build_id, "E: building source package failed\n") write_log_title(build.id, "Done", no_footer_newline=True, no_header_newline=True) repo.set_ready() await build.set_failed() session.commit() # FIXME: cancel deb builds, or only create deb builds after source build ok # Build Source Package write_log_title(build.id, "Source Build") try: ret = await BuildDebSrc(repo_id, repo.src_path, build.id, info.version, is_ci, "{} {}".format(firstname, lastname), email) except Exception as exc: logger.exception(exc) await fail() return if not ret: await fail() return await build.set_needs_publish() session.commit() repo.set_ready() session.commit() write_log(parent_build_id, "I: publishing source package\n") await aptly_queue.put({"src_publish": [build.id, projectversion_ids]})
async def post_add_repository(request): """ Adds given sourcerepositories to the given projectversion. --- description: Adds given sourcerepositories to given projectversion. tags: - ProjectVersions consumes: - application/json parameters: - name: projectversion_id in: path required: true type: integer - name: sourcerepository_id in: path required: true type: integer - name: body in: body required: true schema: type: object properties: buildvariants: type: array example: [1, 2] produces: - text/json responses: "200": description: successful "400": description: Invalid data received. """ params = await request.json() projectversion_id = request.match_info["projectversion_id"] sourcerepository_id = request.match_info["sourcerepository_id"] buildvariants = params.get("buildvariants", []) if not buildvariants: return web.Response(status=400, text="No buildvariants recieved.") projectversion_id = parse_int(projectversion_id) project_v = ( request.cirrina.db_session.query(ProjectVersion) # pylint: disable=no-member .filter(ProjectVersion.id == projectversion_id).first()) if not project_v: return web.Response(status=400, text="Invalid data received.") parsed_id = parse_int(sourcerepository_id) if not parsed_id: return web.Response(status=400, text="Invalid data received.") src_repo = ( request.cirrina.db_session.query(SourceRepository) # pylint: disable=no-member .filter(SourceRepository.id == parsed_id).first()) if src_repo not in project_v.sourcerepositories: project_v.sourcerepositories.append(src_repo) request.cirrina.db_session.commit() # pylint: disable=no-member sourepprover_id = (( request.cirrina.db_session.query(SouRepProVer) # pylint: disable=no-member .filter(SouRepProVer.c.sourcerepository_id == src_repo.id).filter( SouRepProVer.c.projectversion_id == project_v.id)).first().id) for buildvariant in buildvariants: # if just the buildvariant id is given if buildvariant.get("id"): buildvar_id = parse_int(buildvariant.get("id")) buildvar = ( request.cirrina.db_session.query(BuildVariant) # pylint: disable=no-member .filter(BuildVariant.id == buildvar_id).first()) # if basemirror and architecture is given elif buildvariant.get("architecture_id") and buildvariant.get( "base_mirror_id"): arch_id = parse_int(buildvariant.get("architecture_id")) base_mirror_id = parse_int(buildvariant.get("base_mirror_id")) buildvar = ( request.cirrina.db_session.query(BuildVariant).filter( BuildVariant.architecture_id == arch_id) # pylint: disable=no-member .filter(BuildVariant.base_mirror_id == base_mirror_id).first()) else: return web.Response(status=400, text="Invalid buildvariants received.") buildconf = BuildConfiguration( buildvariant=buildvar, sourcerepositoryprojectversion_id=sourepprover_id) request.cirrina.db_session.add(buildconf) request.cirrina.db_session.commit() # pylint: disable=no-member logger.info( "SourceRepository '%s' with id '%s' added to ProjectVersion '%s/%s'", src_repo.url, src_repo.id, project_v.project.name, project_v.name, ) if src_repo.state == "new": build = Build( version=None, git_ref=None, ci_branch=None, is_ci=None, versiontimestamp=None, sourcename=src_repo.name, buildstate="new", buildtype="build", buildconfiguration=None, sourcerepository=src_repo, maintainer=None, ) request.cirrina.db_session.add(build) request.cirrina.db_session.commit() await build_added(build) token = uuid.uuid4() buildtask = BuildTask(build=build, task_id=str(token)) request.cirrina.db_session.add(buildtask) request.cirrina.db_session.commit() args = {"clone": [build.id, src_repo.id]} await request.cirrina.task_queue.put(args) return web.Response(status=200, text="SourceRepository added.")
async def finalize_mirror(task_queue, build_id, base_mirror, base_mirror_version, mirror, version, components, task_id): """ """ try: mirrorname = "{}-{}".format(mirror, version) logger.info("finalizing mirror %s task %d, build_%d", mirrorname, task_id, build_id) with Session() as session: # FIXME: get entry from build.projectversion_id query = session.query(ProjectVersion) # pylint: disable=no-member query = query.join(Project, Project.id == ProjectVersion.project_id) query = query.filter(Project.is_mirror.is_(True)) query = query.filter(ProjectVersion.name == version) entry = query.filter(Project.name == mirror).first() if not entry: logger.error("finalize mirror: mirror '%s' not found", mirrorname) return build = session.query(Build).filter(Build.id == build_id).first() if not build: logger.error("aptly worker: mirror build with id %d not found", build_id) return apt = get_aptly_connection() if entry.mirror_state == "updating": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error( "update mirror %s get progress exception: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("update mirror %s progress error", mirrorname) entry.mirror_state = "error" await build.set_failed() session.commit() # pylint: disable=no-member return logger.info( "mirrored %d/%d files (%.02f%%), %.02f/%.02fGB (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], (upd_progress["TotalDownloadSize"] - upd_progress["RemainingDownloadSize"]) / 1024.0 / 1024.0 / 1024.0, upd_progress["TotalDownloadSize"] / 1024.0 / 1024.0 / 1024.0, upd_progress["PercentSize"], ) await asyncio.sleep(2) await apt.delete_task(task_id) write_log(build.id, "I: creating snapshot\n") await build.set_publishing() session.commit() # snapshot after initial download logger.info("creating snapshot for: %s", mirrorname) try: task_id = await apt.mirror_snapshot( base_mirror, base_mirror_version, mirror, version) except AptlyError as exc: logger.error("error creating mirror %s snapshot: %s", mirrorname, exc) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return while True: try: task_state = await apt.get_task_state(task_id) except Exception: logger.exception("error getting mirror %s state", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # States: # 0: init, 1: running, 2: success, 3: failed if task_state["State"] == 2: break if task_state["State"] == 3: logger.error("creating mirror %s snapshot failed", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member return # FIMXE: why sleep ? await asyncio.sleep(2) await apt.delete_task(task_id) entry.mirror_state = "publishing" session.commit() # pylint: disable=no-member # publish new snapshot write_log(build.id, "I: publishing mirror\n") logger.info("publishing snapshot: %s", mirrorname) try: task_id = await apt.mirror_publish( base_mirror, base_mirror_version, mirror, version, entry.mirror_distribution, components) except Exception as exc: logger.error("error publishing mirror %s snapshot: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete(base_mirror, base_mirror_version, mirror, version) return if entry.mirror_state == "publishing": while True: try: upd_progress = await apt.mirror_get_progress(task_id) except Exception as exc: logger.error("error publishing mirror %s: %s", mirrorname, str(exc)) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return # States: # 0: init, 1: running, 2: success, 3: failed if upd_progress["State"] == 2: break if upd_progress["State"] == 3: logger.error("error publishing mirror %s snapshot", mirrorname) entry.mirror_state = "error" await build.set_publish_failed() session.commit() # pylint: disable=no-member await apt.mirror_snapshot_delete( base_mirror, base_mirror_version, mirror, version) return logger.info( "published %d/%d packages (%.02f%%)", upd_progress["TotalNumberOfPackages"] - upd_progress["RemainingNumberOfPackages"], upd_progress["TotalNumberOfPackages"], upd_progress["PercentPackages"], ) await asyncio.sleep(2) if entry.project.is_basemirror: for arch_name in entry.mirror_architectures[1:-1].split(","): arch = (session.query(Architecture).filter( Architecture.name == arch_name).first()) # pylint: disable=no-member if not arch: await build.set_publish_failed() logger.error( "finalize mirror: architecture '%s' not found", arch_name) return buildvariant = BuildVariant(base_mirror=entry, architecture=arch) session.add(buildvariant) # pylint: disable=no-member write_log(build.id, "I: starting chroot environments build\n") chroot_build = Build( version=version, git_ref=None, ci_branch=None, is_ci=None, versiontimestamp=None, sourcename=mirror, buildstate="new", buildtype="chroot", projectversion_id=build.projectversion_id, buildconfiguration=None, parent_id=build.id, sourcerepository=None, maintainer=None, ) session.add(chroot_build) session.commit() chroot_build.log_state("created") await build_added(chroot_build) await chroot_build.set_needs_build() session.commit() await chroot_build.set_scheduled() session.commit() chroot = Chroot(buildvariant=buildvariant, ready=False) session.add(chroot) session.commit() loop = asyncio.get_event_loop() loop.create_task( create_schroot( task_queue, chroot.id, chroot_build.id, buildvariant.base_mirror.mirror_distribution, buildvariant.base_mirror.project.name, buildvariant.base_mirror.name, buildvariant.architecture.name, )) entry.is_locked = True entry.mirror_state = "ready" session.commit() # pylint: disable=no-member await build.set_successful() session.commit() logger.info("mirror %s succesfully created", mirrorname) write_log_title(build.id, "Done") except Exception as exc: logger.exception(exc)
async def _create_mirror(self, args, session): ( mirror, url, mirror_distribution, components, keys, keyserver, is_basemirror, architectures, version, key_url, basemirror_id, download_sources, download_installer, ) = args build = Build( version=version, git_ref=None, ci_branch=None, is_ci=False, versiontimestamp=None, sourcename=mirror, buildstate="new", buildtype="mirror", buildconfiguration=None, sourcerepository=None, maintainer=None, ) build.log_state("created") session.add(build) await build_added(build) session.commit() write_log_title(build.id, "Create Mirror") mirror_project = ( session.query(Project) # pylint: disable=no-member .filter(Project.name == mirror, Project.is_mirror.is_(True)).first()) if not mirror_project: mirror_project = Project(name=mirror, is_mirror=True, is_basemirror=is_basemirror) session.add(mirror_project) # pylint: disable=no-member project_version = ( session.query(ProjectVersion).join(Project).filter( # pylint: disable=no-member Project.name == mirror, Project.is_mirror.is_(True)).filter( ProjectVersion.name == version).first()) if project_version: write_log( build.id, "W: mirror with name '%s' and version '%s' already exists\n" % (mirror, version)) logger.error( "mirror with name '%s' and version '%s' already exists", mirror, version) await build.set_successful() session.commit() return True base_mirror = None base_mirror_version = None db_buildvariant = None if not is_basemirror: db_basemirror = ( session.query(ProjectVersion) # pylint: disable=no-member .filter(ProjectVersion.id == basemirror_id).first()) if not db_basemirror: write_log( build.id, "E: could not find a basemirror with id '%d'\n" % basemirror_id) logger.error("could not find a basemirror with id '%d'", basemirror_id) await build.set_failed() session.commit() return False base_mirror = db_basemirror.project.name base_mirror_version = db_basemirror.name db_buildvariant = ( session.query(BuildVariant) # pylint: disable=no-member .filter(BuildVariant.base_mirror_id == basemirror_id).first()) if not db_buildvariant: write_log( build.id, "E: could not find a buildvariant for basemirror with id '%d'\n" % db_basemirror.id) logger.error( "could not find a buildvariant for basemirror with id '%d'", db_basemirror.id) await build.set_failed() session.commit() return False mirror_project_version = ProjectVersion( name=version, project=mirror_project, mirror_url=url, mirror_distribution=mirror_distribution, mirror_components=",".join(components), mirror_architectures="{" + ",".join(architectures) + "}", mirror_with_sources=download_sources, mirror_with_installer=download_installer, ) if db_buildvariant: mirror_project_version.buildvariants.append(db_buildvariant) session.add(mirror_project_version) session.commit() build.projectversion_id = mirror_project_version.id session.commit() write_log(build.id, "I: adding GPG keys\n") apt = get_aptly_connection() if key_url: try: await apt.gpg_add_key(key_url=key_url) except AptlyError as exc: write_log(build.id, "E: Error adding keys from '%s'\n" % key_url) logger.error("key error: %s", exc) await build.set_failed() return False elif keyserver and keys: try: await apt.gpg_add_key(key_server=keyserver, keys=keys) except AptlyError as exc: write_log(build.id, "E: Error adding keys %s\n" % str(keys)) logger.error("key error: %s", exc) await build.set_failed() return False write_log(build.id, "I: creating mirror\n") try: await apt.mirror_create( mirror, version, base_mirror, base_mirror_version, url, mirror_distribution, components, architectures, download_sources=download_sources, download_udebs=download_installer, download_installer=download_installer, ) except NotFoundError as exc: write_log(build.id, "E: aptly seems to be not available: %s\n" % str(exc)) logger.error("aptly seems to be not available: %s", str(exc)) await build.set_failed() return False except AptlyError as exc: write_log( build.id, "E: failed to create mirror %s on aptly: %s\n" % (mirror, str(exc))) logger.error("failed to create mirror %s on aptly: %s", mirror, str(exc)) await build.set_failed() return False args = { "update_mirror": [ build.id, mirror_project_version.id, base_mirror, base_mirror_version, mirror, version, components ] } await self.aptly_queue.put(args)
def test_build_changed_url_encoding(): """ Test create_chroot """ maintainer = MagicMock() maintainer.firstname.return_value = "John" maintainer.lastname.return_value = "Snow" hook = MagicMock() hook.enabled.return_value = True hook.skip_ssl = True hook.method = "get" hook.url = "http://nonsense.invalid/get/{{ build.version|urlencode }}" hook.body = "[]" srcrepo = MagicMock() srcrepo.hooks = [hook] srcrepo.id.return_value = 111 srcrepo.url.return_value = "git://url" srcrepo.name.return_value = "srcpkg" build = MagicMock() build.maintainer.return_value = maintainer build.sourcerepository = srcrepo build.startstamp = "NOW" build.endstamp = "NOW" build.id = 1337 build.buildtype = "deb" build.ci_branch = "master" build.git_ref = "1337" build.sourcename = "srcpkg" build.version = "0.0.0+git1-1337<>" build.buildstate = "successful" build.url = "/blah" build.raw_log_url = "/blub" with patch("molior.molior.notifier.Configuration") as cfg, patch( # "molior.molior.worker_notification.trigger_hook", side_effect=asyncio.coroutine( # lambda method, url, skip_ssl, body: None) # ) as trigger_hook, patch( "molior.molior.worker_notification.app") as app, patch( "molior.molior.worker_notification.Session") as Session, patch( "molior.molior.configuration.open", mock_open(read_data="{'hostname': 'testhostname'}")): cfg.return_value.hostname = "localhost" enter = MagicMock() session = MagicMock() query = MagicMock() qfilter = MagicMock() enter.__enter__.return_value = session query.filter.return_value = qfilter qfilter.first.return_value = build session.query.return_value = query Session.return_value = enter Session().__enter__().query().filter().first().return_value = build app.websocket_broadcast = Mock( side_effect=asyncio.coroutine(lambda msg: None)) loop = asyncio.get_event_loop() notification_worker = NotificationWorker() asyncio.ensure_future(notification_worker.run()) loop.run_until_complete(Build.build_changed(build)) Session.assert_called()
async def trigger_build(request): """ Triggers a build. --- description: Triggers a build tags: - TriggerBuild consumes: - application/x-www-form-urlencoded parameters: - name: repository in: body required: true type: string - name: git_ref in: body required: false type: string - name: git_branch in: body required: false type: string produces: - text/json responses: "200": description: successful "500": description: internal server error """ data = await request.json() repository = data.get("repository") git_ref = data.get("git_ref") git_branch = data.get("git_branch") maintenance_mode = False query = "SELECT value from metadata where name = :key" result = request.cirrina.db_session.execute(query, {"key": "maintenance_mode"}) for value in result: if value[0] == "true": maintenance_mode = True break if maintenance_mode: return web.Response(status=503, text="Maintenance Mode") if not repository: return web.Response(text="Bad Request", status=400) logger.info("build triggered: %s %s %s", repository, git_ref, git_branch) repo = (request.cirrina.db_session.query(SourceRepository).filter( SourceRepository.url == repository).first()) if not repo: return web.Response(text="Repo not found", status=400) build = Build( version=None, git_ref=git_ref, ci_branch=git_branch, is_ci=None, versiontimestamp=None, sourcename=repo.name, buildstate="new", buildtype="build", buildconfiguration=None, sourcerepository=repo, maintainer=None, ) request.cirrina.db_session.add(build) request.cirrina.db_session.commit() await build_added(build) token = uuid.uuid4() buildtask = BuildTask(build=build, task_id=str(token)) request.cirrina.db_session.add(buildtask) request.cirrina.db_session.commit() if git_ref == "": args = {"buildlatest": [repo.id, build.id]} else: args = {"build": [build.id, repo.id, git_ref, git_branch]} await request.cirrina.task_queue.put(args) return web.json_response({"build_token": str(token)})
async def process_push(request, data): """ Process incoming PUSH event from a GitLab instance. Args: request: The request instance. data (dict): The received data. """ user_username = data.get("user_username") logger.info("GitLab-API: PUSH (user_username): %s", user_username) user_name = data.get("user_name") logger.info("GitLab-API: PUSH (user_name): %s", user_name) user_email = data.get("user_email") logger.debug("GitLab-API: PUSH (user_email): %s", user_email) git_ref = data.get("ref") logger.info("GitLab-API: PUSH (git_ref): %s", git_ref) checkout_sha = data.get("checkout_sha") logger.info("GitLab-API: PUSH (checkout_sha): %s", checkout_sha) repository_url = "" project = data.get("project") if project: project_ssh_url = project.get("ssh_url") logger.debug("GitLab-API: PUSH (project_ssh_url): %s", project_ssh_url) project_name = project.get("name") logger.info("GitLab-API: PUSH (project_name): %s", project_name) project_url = project.get("url") logger.debug("GitLab-API: PUSH (project_url): %s", project_url) project_git_ssh_url = project.get("git_ssh_url") logger.debug("GitLab-API: PUSH (project_git_ssh_url): %s", project_git_ssh_url) if project_git_ssh_url: repository_url = project_git_ssh_url elif project_ssh_url: repository_url = project_ssh_url elif project_url: repository_url = project_url if not repository_url: repository = data.get("repository") if repository: repository_urlx = repository.get("url") logger.debug("GitLab-API: PUSH (repository_urlx): %s", repository_urlx) repository_git_ssh_url = repository.get("git_ssh_url") logger.debug("GitLab-API: PUSH (repository_git_ssh_url): %s", repository_git_ssh_url) if repository_git_ssh_url: repository_url = repository_git_ssh_url elif repository_urlx: repository_url = repository_urlx if not repository_url: return "Missing GIT repository URL (ssh)", 400 else: logger.info("GitLab-API: PUSH (repository_url): %s", repository_url) # Prepare CI/CD branch from parsed data ci_branch = None if git_ref: tag_left = "refs/tags/" if git_ref.startswith(tag_left): # Skip processing of TAG_PUSH events logger.debug("GitLab-API: PUSH unhandled due to TAG_PUSH object") return "No TAG_PUSH objects allowed here", 400 ref_left = "refs/heads/" if git_ref.startswith(ref_left): ci_branch = git_ref[len(ref_left):] # No further processing for API-tests with empty event_name # BEWARE: API-test from GitLab will have event_name set to 'push', so be careful event_name = data.get("event_name") if not event_name: logger.info("GitLab-API: TEST: Tickle, tickle ... Hihihi") return "OK", 200 # CI-Branch logger.info("GitLab-API: PUSH (ci_branch): %s", ci_branch) repo = (request.cirrina.db_session.query(SourceRepository).filter( SourceRepository.url == repository_url).first()) if not repo: return "Repo not found", 400 build = Build( version=None, git_ref= checkout_sha, # Use pure hash for CI-builds, instead of git_ref/branch ci_branch=ci_branch, is_ci=False, versiontimestamp=None, sourcename=repo.name, buildstate="new", buildtype="build", buildconfiguration=None, sourcerepository=repo, maintainer=None, ) request.cirrina.db_session.add(build) request.cirrina.db_session.commit() await build_added(build) token = uuid.uuid4() build_task = BuildTask(build=build, task_id=str(token)) request.cirrina.db_session.add(build_task) request.cirrina.db_session.commit() logger.debug("GitLab-API: CI-BUILD (build_id): %s", build.id) if checkout_sha and repo.id: args = {"build": [build.id, repo.id, checkout_sha, ci_branch]} # Queue new build job if await request.cirrina.task_queue.put(args): logger.info("GitLab-API: CI-BUILD triggered (sourcename): %s", build.sourcename) return "OK", 200 return "Unprocessable Entity", 422
async def process_tag_push(request, data): """ Process incoming TAG_PUSH event from a GitLab instance. Args: request: The request instance. data (dict): The received data. """ event_name = data.get("event_name") logger.info("GitLab-API: Incoming event (event_name): %s", event_name) user_username = data.get("user_username") logger.info("GitLab-API: TAG_PUSH (user_username): %s", user_username) user_name = data.get("user_name") logger.info("GitLab-API: TAG_PUSH (user_name): %s", user_name) git_ref = data.get("ref") logger.info("GitLab-API: TAG_PUSH (git_ref): %s", git_ref) user_email = data.get("user_email") logger.debug("GitLab-API: TAG_PUSH (user_email): %s", user_email) repository_url = "" project = data.get("project") if project: project_name = project.get("name") logger.info("GitLab-API: TAG_PUSH (project_name): %s", project_name) url = project.get("url") logger.debug("GitLab-API: TAG_PUSH (url): %s", url) ssl_url = project.get("ssl_url") logger.debug("GitLab-API: TAG_PUSH (ssl_url): %s", ssl_url) git_ssl_url = project.get("git_ssl_url") logger.debug("GitLab-API: TAG_PUSH (git_ssl_url): %s", git_ssl_url) if git_ssl_url: repository_url = git_ssl_url elif ssl_url: repository_url = ssl_url elif url: repository_url = url if not repository_url: repository = data.get("repository") if repository: url = repository.get("url") git_ssl_url = repository.get("git_ssl_url") if git_ssl_url: repository_url = git_ssl_url elif url: repository_url = url if not repository_url: return "Missing GIT repository URL (ssh)", 400 else: logger.info("GitLab-API: TAG_PUSH (repository): %s", repository_url) # Prepare branch (used for UI only) from parsed data ui_branch = None if git_ref: tag_left = "refs/tags/" if git_ref.startswith(tag_left): ui_branch = git_ref[len(tag_left):] ref_left = "refs/heads/" if git_ref.startswith(ref_left): ui_branch = git_ref[len(ref_left):] repo = (request.cirrina.db_session.query(SourceRepository).filter( SourceRepository.url == repository_url).first()) if not repo: return "Repo not found", 400 build = Build( version=None, git_ref=git_ref, ci_branch=ui_branch, is_ci=False, versiontimestamp=None, sourcename=repo.name, buildstate="new", buildtype="build", buildconfiguration=None, sourcerepository=repo, maintainer=None, ) request.cirrina.db_session.add(build) request.cirrina.db_session.commit() await build_added(build) token = uuid.uuid4() build_task = BuildTask(build=build, task_id=str(token)) request.cirrina.db_session.add(build_task) request.cirrina.db_session.commit() logger.debug("GitLab-API: CI-BUILD (build_id): %s", build.id) if git_ref and repo.id: args = {"build": [build.id, repo.id, git_ref, ui_branch]} # Queue new build job if await request.cirrina.task_queue.put(args): logger.info("GitLab-API: BUILD triggered (sourcename): %s", build.sourcename) return "OK", 200 return "Unprocessable Entity", 422
async def trigger_build(request): """ Triggers a build latest job on a sourcerepository. --- description: Triggers a build latest job on a sourcerepository. tags: - Builds consumes: - application/x-www-form-urlencoded parameters: - name: repository_id in: path required: true type: integer produces: - text/json responses: "200": description: successful "400": description: Incorrect value for repository_id "500": description: internal server error """ repository_id = request.match_info.get("repository_id") try: repository_id = int(repository_id) except (ValueError, TypeError): logger.error( "trigger_build_latest error: invalid repository_id received") return web.Response(text="Incorrect value for repository_id", status=400) repository = (request.cirrina.db_session.query(SourceRepository).filter( SourceRepository.id == repository_id).first()) if not repository: logger.error("trigger_build_latest error: repo %d not found" % repository_id) return web.Response(text="Repository not found", status=400) logger.info("trigger_build_latest for repo %d" % repository_id) build = Build( version=None, git_ref=None, ci_branch=None, is_ci=None, versiontimestamp=None, sourcename=repository.name, buildstate="new", buildtype="build", buildconfiguration=None, sourcerepository=repository, maintainer=None, ) request.cirrina.db_session.add(build) request.cirrina.db_session.commit() await build_added(build) await build.set_building() token = uuid.uuid4() buildtask = BuildTask(build=build, task_id=str(token)) request.cirrina.db_session.add(buildtask) request.cirrina.db_session.commit() args = {"buildlatest": [repository_id, build.id]} await request.cirrina.task_queue.put(args) return web.json_response({"build_token": str(token)})