Esempio n. 1
0
async def BuildDebSrc(repo_id, repo_path, build_id, ci_version, is_ci, author,
                      email):
    write_log(build_id, "I: getting debian build information\n")
    src_package_name = await get_changelog_attr("Source", repo_path)
    version = await get_changelog_attr("Version", repo_path)
    repo_path = Path(repo_path)

    key = Configuration().debsign_gpg_email
    if not key:
        write_log(build_id, "E: Signing key not defined in configuration\n")
        logger.error("Signing key not defined in configuration")
        return False

    logger.info("%s: creating source package", src_package_name)
    write_log(
        build_id,
        "I: creating source package: %s (%s)\n" % (src_package_name, version))

    async def outh(line):
        line = line.strip()
        if line:
            write_log(build_id, "%s\n" % line)

    if is_ci:
        # in order to publish a sourcepackage for a ci build we need
        # to create a ci changelog with the correct version

        distribution = await get_changelog_attr("Distribution", repo_path)

        env = os.environ.copy()
        env["DEBFULLNAME"] = author
        env["DEBEMAIL"] = email
        dchcmd = "dch -v %s --distribution %s --force-distribution 'CI Build'" % (
            ci_version, distribution)
        version = ci_version

        process = Launchy(shlex.split(dchcmd),
                          outh,
                          outh,
                          cwd=str(repo_path),
                          env=env)
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("Error running dch for CI build")
            return False

    cmd = "dpkg-buildpackage -S -d -nc -I.git -pgpg1 -k{}".format(key)
    process = Launchy(shlex.split(cmd), outh, outh, cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        write_log(build_id, "E: Error building source package\n")
        logger.error("source packaging failed, dpkg-builpackage returned %d",
                     ret)
        return False

    logger.info("%s (%d): source package v%s created", src_package_name,
                repo_id, version)
    return True
Esempio n. 2
0
    def run(self):
        self.backend = Backend().init()
        if not self.backend:
            return
        if not Auth().init():
            return

        Launchy.attach_loop(self.loop)

        worker = Worker()
        self.task_worker = asyncio.ensure_future(worker.run())

        backend_worker = BackendWorker()
        self.task_backend_worker = asyncio.ensure_future(backend_worker.run())

        aptly_worker = AptlyWorker()
        self.task_aptly_worker = asyncio.ensure_future(aptly_worker.run())

        notification_worker = NotificationWorker()
        self.task_notification_worker = asyncio.ensure_future(notification_worker.run())

        cfg = Configuration()
        daily_cleanup = cfg.aptly.get("daily_cleanup")
        if daily_cleanup is False or daily_cleanup == "off" or daily_cleanup == "disabled":
            return

        if not daily_cleanup:
            daily_cleanup = "04:00"
        cleanup_sched = Scheduler(locale="en_US")
        cleanup_job = CronJob(name='cleanup').every().day.at(daily_cleanup).go(self.cleanup_task)
        cleanup_sched.add_job(cleanup_job)
        self.task_cron = asyncio.ensure_future(cleanup_sched.start())

        app.set_context_functions(MoliorServer.create_cirrina_context, MoliorServer.destroy_cirrina_context)
        app.run(self.host, self.port, logger=self.logger, debug=self.debug)
Esempio n. 3
0
async def get_latest_tag(path, build_id):
    """
    Returns latest tag from given git
    repository.

    Args:
        path (str): Path to git repository

    Returns:
        tag (Git.tag): The latest git tag
    """
    ret = await run_git("git fetch --tags --force", str(path), build_id)
    if ret != 0:
        logger.error("error running git fetch: %s", str(path))
        return None

    git_tags = []

    async def outh(line):
        nonlocal git_tags
        git_tags.append(line.strip())

    process = Launchy(shlex.split("git tag"), outh, outh, cwd=str(path))
    await process.launch()
    await process.wait()

    valid_tags = {}

    # get commit timestamps
    for tag in git_tags:
        timestamp = None

        async def outh2(line):
            nonlocal timestamp
            timestamp = line.strip()

        process = Launchy(shlex.split(
            "git show -s --format=%ct {}".format(tag)),
                          outh2,
                          outh2,
                          cwd=str(path))
        await process.launch()
        await process.wait()

        if timestamp and validate_version_format(tag):
            valid_tags[timestamp] = tag

    if valid_tags:
        return max(valid_tags.items(), key=operator.itemgetter(0))[1]
    return None
Esempio n. 4
0
async def run_git(cmd, cwd, build_id):
    async def outh(line):
        write_log(build_id, "%s\n" % line)

    process = Launchy(shlex.split(cmd), outh, outh, cwd=cwd)
    await process.launch()
    return await process.wait()
Esempio n. 5
0
async def get_changelog_attr(name, path):
    """
    Gets given changelog attribute from given
    repository path.

    Args:
        name (str): The attr's name.
        path (pathlib.Path): The repo's path.
    """
    attr = ""
    err = ""

    async def outh(line):
        nonlocal attr
        attr += line

    async def errh(line):
        nonlocal err
        err += line

    process = Launchy(shlex.split("dpkg-parsechangelog -S {}".format(name)),
                      outh,
                      errh,
                      cwd=str(path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("error occured while getting changelog attribute: %s",
                     str(err, "utf-8"))
        raise Exception("error running dpkg-parsechangelog")

    return attr.strip()
Esempio n. 6
0
async def DeleteBuildEnv(dist, name, version, arch):
    """
    Delete sbuild chroot and other build environments.

    Args:
        dist (str): The distrelease
        version (str): The version
        arch (str): The architecture

    Returns:
        bool: True on success
    """

    logger.info("deleting build environments for %s-%s-%s", dist, version,
                arch)

    async def outh(line):
        pass

    process = Launchy([
        "sudo", "run-parts", "-a", "remove", "-a", dist, "-a", name, "-a",
        version, "-a", arch, "/etc/molior/mirror-hooks.d"
    ], outh, outh)
    await process.launch()
    ret = await process.wait()

    if not ret == 0:
        logger.error("error deleting build env")
        return False

    return True
Esempio n. 7
0
async def GitChangeUrl(old_repo_path, name, url):
    process = Launchy("git remote set-url origin {}".format(url),
                      None,
                      None,
                      cwd=str(old_repo_path))
    await process.launch()
    await process.wait()
    if os.path.exists(old_repo_path):
        os.rename(old_repo_path, os.path.dirname(old_repo_path) + "/" + name)
Esempio n. 8
0
async def run_git(cmd, cwd, build, write_output_log=True):
    await build.log("$: %s\n" % cmd)

    async def outh(line):
        if write_output_log:
            await build.log(line + "\n")

    async def errh(line):
        await build.log(line + "\n")

    env = os.environ.copy()
    env["GIT_SSL_NO_VERIFY"] = ""
    process = Launchy(cmd, outh, errh, cwd=cwd, env=env)
    await process.launch()
    ret = await process.wait()
    return ret == 0
Esempio n. 9
0
async def GetBuildInfo(repo_path, git_ref):
    class BuildInfo:
        pass

    info = BuildInfo()
    info.version = await get_changelog_attr("Version", repo_path)
    info.sourcename = await get_changelog_attr("Source", repo_path)

    gitinfo = None

    async def outh(line):
        nonlocal gitinfo
        gitinfo = line.strip()

    process = Launchy(shlex.split("git show -s --format='%H %cI %ae %an'"),
                      outh,
                      outh,
                      cwd=str(repo_path))
    await process.launch()
    await process.wait()

    gitinfos = gitinfo.split(" ", 3)
    if len(gitinfos) != 4:
        logger.error("Error parsing git info '%s'", gitinfos)
        return None

    info.commit_hash = gitinfos[0]
    d = gitinfos[1]
    info.author_email = gitinfos[2]
    info.author_name = gitinfos[3]

    ts = d[0:19] + d[19:25].replace(":", "")
    tag_dt = datetime.strptime(ts, "%Y-%m-%dT%H:%M:%S%z")

    info.tag_stamp = tag_dt.strftime("%Y-%m-%d %T%z")
    info.tag_dt = tag_dt

    try:
        info.firstname, info.lastname, info.email = await get_maintainer(
            repo_path)
    except MaintainerParseError as exc:
        logger.warning("could not get maintainer: %s" % str(exc))
        return None

    info.plain_targets = get_target_config(repo_path)

    return info
Esempio n. 10
0
async def GetBuildInfo(repo_path, git_ref):
    class BuildInfo:
        pass

    info = BuildInfo()
    info.version = await get_changelog_attr("Version", repo_path)
    info.sourcename = await get_changelog_attr("Source", repo_path)

    gitinfo = None

    async def outh(line):
        nonlocal gitinfo
        gitinfo = line.strip()

    process = Launchy("git show -s --format='%H %ae %an'",
                      outh,
                      outh,
                      cwd=str(repo_path))
    await process.launch()
    await process.wait()

    gitinfos = gitinfo.split(" ", 2)
    if len(gitinfos) != 3:
        logger.error("Error parsing git info '%s'", gitinfos)
        return None

    info.commit_hash = gitinfos[0]
    info.author_email = gitinfos[1]
    info.author_name = gitinfos[2]

    maintainer = await get_maintainer(repo_path)
    if not maintainer:
        # FIXME: log to build log
        logger.error("could not parse maintainer")
        return None

    info.firstname, info.lastname, info.email = maintainer
    info.plain_targets = get_target_config(repo_path)

    return info
Esempio n. 11
0
async def get_latest_tag(repo_path, build_id):
    """
    Returns latest tag from given git
    repository.

    Args:
        path (str): Path to git repository

    Returns:
        tag (Git.tag): The latest git tag
    """
    valid_tags = {}
    with Session() as session:
        build = session.query(Build).filter(Build.id == build_id).first()
        if not build:
            logger.error("get_latest_tag: build %d not found", build_id)
            return None

        if not await GitCleanLocal(repo_path, build):
            return None

        if not await run_git("git fetch --tags --prune --prune-tags --force",
                             str(repo_path),
                             build,
                             write_output_log=False):
            logger.error("error running git fetch: %s", str(repo_path))
            return None

        git_tags = []

        async def outh(line):
            nonlocal git_tags
            git_tags.append(line.strip())

        process = Launchy("git tag", outh, outh, cwd=str(repo_path))
        await process.launch()
        await process.wait()

        # get commit timestamps
        for tag in git_tags:
            timestamp = None

            async def outh2(line):
                nonlocal timestamp
                line = line.strip()
                if line:
                    timestamp = line

            process = Launchy("git log -1 --format=%ct {}".format(tag),
                              outh2,
                              outh2,
                              cwd=str(repo_path))
            await process.launch()
            await process.wait()

            if timestamp and validate_version_format(tag):
                valid_tags[timestamp] = tag

        if not valid_tags:
            logger.warning("no valid git tags found")
            return None

    return max(valid_tags.items(), key=operator.itemgetter(0))[1]
Esempio n. 12
0
async def publish_packages(build, out_path):
    """
    Publishes given packages to given
    publish point.

    Args:
        build (Build): The build model.
        out_path (Path): The build output path.

    Returns:
        bool: True if successful, otherwise False.
    """

    arch = build.buildconfiguration.buildvariant.architecture.name
    outfiles = debchanges_get_files(out_path, build.sourcename, build.version,
                                    arch)

    files2upload = []
    for f in outfiles:
        logger.info("publisher: adding %s", f)
        files2upload.append("{}/{}".format(out_path, f))

    count_files = len(files2upload)
    if count_files == 0:
        logger.error("publisher: build %d: no files to upload", build.id)
        write_log(build.id, "E: no debian packages found to upload\n")
        write_log(build.parent.parent.id, "E: build %d failed\n" % build.id)
        return False

    # FIXME: check on startup
    key = Configuration().debsign_gpg_email
    if not key:
        logger.error("Signing key not defined in configuration")
        write_log(build.id, "E: no signinig key defined in configuration\n")
        write_log(build.parent.parent.id, "E: build %d failed\n" % build.id)
        return False

    write_log(build.id, "I: Signing packages\n")

    async def outh(line):
        write_log(build.id, "%s\n" % line)

    cmd = "debsign -pgpg1 -k{} {}_{}_{}.changes".format(
        key, build.sourcename, build.version, arch)
    process = Launchy(shlex.split(cmd), outh, outh, cwd=str(out_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("debsign failed")
        return False

    logger.info("publisher: uploading %d file%s", count_files,
                "" if count_files == 1 else "s")
    projectversion = build.buildconfiguration.projectversions[0]

    basemirror_name = projectversion.buildvariants[0].base_mirror.project.name
    basemirror_version = projectversion.buildvariants[0].base_mirror.name
    project_name = projectversion.project.name
    project_version = projectversion.name
    archs = [bdv.architecture.name for bdv in projectversion.buildvariants]

    debian_repo = DebianRepository(basemirror_name, basemirror_version,
                                   project_name, project_version, archs)
    await debian_repo.add_packages(files2upload, ci_build=build.is_ci)

    files2delete = files2upload
    files2delete.append("{}/{}_{}_{}.changes".format(out_path,
                                                     build.sourcename,
                                                     build.version, arch))
    for f in files2delete:
        logger.info("publisher: removing %s", f)
        os.remove(f)

    return True
Esempio n. 13
0
async def CreateBuildEnv(chroot_id, build_id, dist, name, version, arch,
                         components, repo_url, mirror_keys):
    """
    Creates a sbuild chroot and other build environments.

    Args:
        dist (str): The distrelease
        version (str): The version
        arch (str): The architecture

    Returns:
        bool: True on success
    """

    with Session() as session:
        build = session.query(Build).filter(Build.id == build_id).first()
        if not build:
            logger.error("aptly worker: mirror build with id %d not found",
                         build_id)
            return False

        await build.logtitle("Chroot Environment")

        await build.set_building()
        session.commit()

        logger.info("creating build environments for %s-%s-%s", dist, version,
                    arch)
        await build.log("Creating build environments for %s-%s-%s\n\n" %
                        (dist, version, arch))

        async def outh(line):
            await build.log("%s\n" % line)

        process = Launchy([
            "sudo", "run-parts", "-a", "build", "-a", dist, "-a", name, "-a",
            version, "-a", arch, "-a", components, "-a", repo_url, "-a",
            mirror_keys, "/etc/molior/mirror-hooks.d"
        ], outh, outh)
        await process.launch()
        ret = await process.wait()

        if not ret == 0:
            logger.error("error creating build env")
            await build.log("Error creating build environment\n")
            await build.log("\n")
            await build.logtitle("Done", no_footer_newline=True)
            await build.set_failed()
            await build.logdone()
            session.commit()
            return False

        await build.set_needs_publish()
        session.commit()

        await build.set_publishing()
        session.commit()

        process = Launchy([
            "sudo", "run-parts", "-a", "publish", "-a", dist, "-a", name, "-a",
            version, "-a", arch, "/etc/molior/mirror-hooks.d"
        ], outh, outh)
        await process.launch()
        ret = await process.wait()

        if not ret == 0:
            logger.error("error publishing build env")
            await build.log("Error publishing build environment\n")
            await build.logtitle("Done", no_footer_newline=True)
            await build.set_publish_failed()
            await build.logdone()
            session.commit()
            return False

        await build.log("\n")
        await build.logtitle("Done", no_footer_newline=True)
        await build.set_successful()
        session.commit()

        chroot = session.query(Chroot).filter(Chroot.id == chroot_id).first()
        chroot.ready = True
        session.commit()

        # Schedule builds
        args = {"schedule": []}
        await enqueue_task(args)

        return True
Esempio n. 14
0
async def BuildProcess(task_queue, aptly_queue, parent_build_id, repo_id,
                       git_ref, ci_branch):
    with Session() as session:
        parent = session.query(Build).filter(
            Build.id == parent_build_id).first()
        if not parent:
            logger.error("BuildProcess: parent build {} not found".format(
                parent_build_id))
            return

        write_log_title(parent_build_id, "Molior Build")

        repo = session.query(SourceRepository).filter(
            SourceRepository.id == repo_id).first()
        if not repo:
            logger.error("source repository %d not found", repo_id)
            write_log(parent_build_id,
                      "E: source repository {} not found\n".format(repo_id))
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            session.commit()
            return

        write_log(parent_build_id, "I: git checkout {}\n".format(git_ref))

        # Checkout
        ret = await asyncio.ensure_future(
            GitCheckout(repo.src_path, git_ref, parent_build_id))
        if not ret:
            write_log(parent_build_id, "E: git checkout failed\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            repo.set_ready()
            session.commit()
            return

        write_log(parent_build_id, "\nI: get build information\n")
        info = None
        try:
            info = await GetBuildInfo(repo.src_path, git_ref)
        except Exception as exc:
            logger.exception(exc)

        if not info:
            write_log(parent_build_id, "E: Error getting build information\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            repo.set_ready()
            session.commit()
            return

        targets = get_targets(info.plain_targets, repo, session)
        if not targets:
            repo.log_state(
                "unknown target projectversions in debian/molior.yml")
            write_log(
                parent_build_id,
                "E: the repository is not added to any projectversions referenced in debian/molior.yml\n"
            )
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            repo.set_ready()
            await parent.set_failed()
            session.commit()
            return

        # check if it is a CI build
        # i.e. if gittag does not match version in debian/changelog
        is_ci = False
        gittag = ""

        async def outh(line):
            nonlocal gittag
            gittag += line

        process = Launchy(shlex.split("git describe --tags --abbrev=40"),
                          outh,
                          outh,
                          cwd=str(repo.src_path))
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("error running git describe")
        else:
            v = strip_epoch_version(info.version)
            if not re.match("^v?{}$".format(v.replace("~", "-")), gittag):
                is_ci = True

        ci_cfg = Configuration().ci_builds
        ci_enabled = ci_cfg.get("enabled") if ci_cfg else False

        if is_ci and not ci_enabled:
            repo.log_state("CI builds are not enabled in configuration")
            write_log(parent_build_id,
                      "E: CI builds are not enabled in configuration\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_successful()
            repo.set_ready()
            session.commit()
            return

        parent.is_ci = is_ci
        session.commit()

        if is_ci:
            # create CI version with git hash suffix
            info.origversion = info.version
            if is_ci:
                info.version += "+git{}.{}".format(
                    info.tag_dt.strftime("%Y%m%d%H%M%S"), git_ref[:6])

            # check if CI builds enabled in any project version
            found = False
            for target in targets:
                projectversion = session.query(ProjectVersion).filter(
                    ProjectVersion.ci_builds_enabled == True,  # noqa: E712
                    ProjectVersion.id == target.projectversion_id).first()
                if projectversion:
                    found = True
                    break
            if not found:
                repo.log_state(
                    "CI builds not enabled in specified projectversions, not building..."
                )
                write_log(
                    parent_build_id,
                    "E: CI builds not enabled in specified projectversions, not building...\n"
                )
                write_log_title(parent_build_id,
                                "Done",
                                no_footer_newline=True,
                                no_header_newline=False)
                await parent.set_successful()
                repo.set_ready()
                session.commit()
                return

        # Check if source build already exists
        build = session.query(Build).filter(
            Build.buildtype == "source", Build.sourcerepository == repo,
            Build.version == info.version).first()
        if build:
            repo.log_state(
                "source package already built for version {}".format(
                    info.version))
            write_log(
                parent_build_id,
                "E: source package already built for version {}\n".format(
                    info.version))
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            repo.set_ready()
            await parent.set_successful()
            session.commit()
            args = {"schedule": []}
            await task_queue.put(args)
            return

        # Use commiter name as maintainer for CI builds
        if is_ci:
            t = info.author_name.split(" ", 2)
            if len(t) == 2:
                firstname = t[0]
                lastname = t[1]
            else:
                firstname = t[0]
                lastname = ""
            email = info.author_email
        else:
            firstname = info.firstname
            lastname = info.lastname
            email = info.email

        maintainer = session.query(Maintainer).filter(
            Maintainer.email == email).first()
        if not maintainer:
            repo.log_state("creating new maintainer: %s %s <%s>" %
                           (firstname, lastname, email))
            write_log(
                parent_build_id, "I: creating new maintainer: %s %s <%s>\n" %
                (firstname, lastname, email))
            maintainer = Maintainer(firstname=firstname,
                                    surname=lastname,
                                    email=email)
            session.add(maintainer)
            session.commit()

        # FIXME: assert version == git tag

        build = Build(
            version=info.version,
            git_ref=info.commit_hash,
            ci_branch=ci_branch,
            is_ci=is_ci,
            versiontimestamp=info.tag_stamp,
            sourcename=info.sourcename,
            buildstate="new",
            buildtype="source",
            buildconfiguration=None,
            parent_id=parent_build_id,
            sourcerepository=repo,
            maintainer=maintainer,
        )

        session.add(build)
        session.commit()
        build.log_state("created")
        await build_added(build)

        # add build order dependencies
        build_after = get_buildorder(repo.src_path)
        build_after_deps = []
        found = False
        for dep_git in build_after:
            dep_repo = session.query(SourceRepository).filter(
                SourceRepository.url == dep_git).first()
            if not dep_repo:
                build.log_state("Error: build after repo '%s' not found" %
                                dep_git)
                write_log(parent_build_id,
                          "E: build after repo '%s' not found\n" % dep_git)
                # FIXME: write to build log
                continue
            found = True
            build.log_state("adding build after dependency to: %s" % dep_git)
            write_log(parent_build_id,
                      "I: adding build after dependency to: %s\n" % dep_git)
            build_after_deps.append(dep_repo)

        if found:
            build.build_after = build_after_deps
            session.commit()

        projectversion_ids = []
        build_configs = get_buildconfigs(targets, session)
        found = False
        for build_config in build_configs:
            projectversion_ids.extend([
                projectversion.id
                for projectversion in build_config.projectversions
            ])
            # FIXME: filter for buildtype?
            deb_build = (session.query(Build).filter(
                Build.buildconfiguration == build_config,
                Build.versiontimestamp == info.tag_stamp,
                Build.version == info.version,
            ).first())
            if deb_build:
                logger.warning("already built %s", repo.name)
                write_log(parent_build_id,
                          "E: already built {}\n".format(repo.name))
                continue

            # FIXME: why projectversion[0] ??
            if build_config.projectversions[0].is_locked:
                repo.log_state(
                    "build to locked projectversion '%s-%s' not permitted" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                write_log(
                    parent_build_id,
                    "W: build to locked projectversion '%s-%s' not permitted\n"
                    % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                continue

            if is_ci and not build_config.projectversions[0].ci_builds_enabled:
                repo.log_state(
                    "CI builds not enabled in projectversion '%s-%s'" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                write_log(
                    parent_build_id,
                    "W: CI builds not enabled in projectversion '%s-%s'\n" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                continue

            found = True

            write_log(
                parent_build_id,
                "I: creating build for projectversion '%s/%s'\n" % (
                    build_config.projectversions[0].project.name,
                    build_config.projectversions[0].name,
                ))

            deb_build = Build(
                version=info.version,
                git_ref=info.commit_hash,
                ci_branch=ci_branch,
                is_ci=is_ci,
                versiontimestamp=info.tag_stamp,
                sourcename=info.sourcename,
                buildstate="new",
                buildtype="deb",
                buildconfiguration=build_config,
                parent_id=build.id,
                sourcerepository=repo,
                maintainer=maintainer,
            )

            session.add(deb_build)
            session.commit()

            deb_build.log_state("created")
            await build_added(deb_build)

        # FIXME: if not found, abort?

        session.commit()

        # make list unique, filter duplicates (multiple archs)
        projectversion_ids = list(set(projectversion_ids))

        await build.set_building()
        session.commit()

        write_log(parent_build_id, "I: building source package\n")

        async def fail():
            write_log(parent_build_id, "E: building source package failed\n")
            write_log_title(build.id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=True)
            repo.set_ready()
            await build.set_failed()
            session.commit()
            # FIXME: cancel deb builds, or only create deb builds after source build ok

        # Build Source Package
        write_log_title(build.id, "Source Build")
        try:
            ret = await BuildDebSrc(repo_id, repo.src_path, build.id,
                                    info.version, is_ci,
                                    "{} {}".format(firstname, lastname), email)
        except Exception as exc:
            logger.exception(exc)
            await fail()
            return

        if not ret:
            await fail()
            return

        await build.set_needs_publish()
        session.commit()

        repo.set_ready()
        session.commit()

        write_log(parent_build_id, "I: publishing source package\n")
        await aptly_queue.put({"src_publish": [build.id, projectversion_ids]})
Esempio n. 15
0
def destroy_cirrina_context(cirrina):
    cirrina.db_session.close()


@click.command()
@click.option("--host",
              default="localhost",
              help="Hostname, examples: 'localhost' or '0.0.0.0'")
@click.option("--port", default=8888, help="Listen port")
@click.option("--debug", default=False, help="Enable debug")
def mainloop(host, port, debug):
    """
    Starts the molior-web app.
    """
    moliorapi.set_context_functions(create_cirrina_context,
                                    destroy_cirrina_context)
    moliorapi.run(host, port, debug=debug)


if __name__ == "__main__":
    logger.info("molior v%s", MOLIOR_VERSION)
    Launchy.attach_loop(loop)

    backend = Backend().init(backend_queue)
    if not backend:
        exit(1)
    if not Auth().init():
        exit(1)
    asyncio.ensure_future(main(backend))
    mainloop()  # pylint: disable=no-value-for-parameter
Esempio n. 16
0
async def publish_packages(build_id, buildtype, sourcename, version,
                           architecture, is_ci, basemirror_name,
                           basemirror_version, project_name, project_version,
                           archs, out_path):
    """
    Publishes given packages to given
    publish point.

    Args:
        build (Build): The build model.
        out_path (Path): The build output path.

    Returns:
        bool: True if successful, otherwise False.
    """

    outfiles = await debchanges_get_files(out_path, sourcename, version,
                                          architecture)
    add_files(build_id, buildtype, version, outfiles)
    # FIXME: commit

    files2upload = []
    for f in outfiles:
        logger.debug("publisher: adding %s", f)
        files2upload.append("{}/{}".format(out_path, f))

    count_files = len(files2upload)
    if count_files == 0:
        logger.error("publisher: build %d: no files to upload", build_id)
        await buildlog(build_id, "E: no debian packages found to upload\n")
        return False

    # FIXME: check on startup
    key = Configuration().debsign_gpg_email
    if not key:
        logger.error("Signing key not defined in configuration")
        await buildlog(build_id,
                       "E: no signinig key defined in configuration\n")
        return False

    await buildlog(build_id, "Signing packages:\n")

    async def outh(line):
        if len(line.strip()) != 0:
            await buildlog(build_id, "%s\n" % re.sub(r"^ *", " - ", line))

    v = strip_epoch_version(version)
    changes_file = "{}_{}_{}.changes".format(sourcename, v, architecture)

    cmd = "debsign -pgpg1 -k{} {}".format(key, changes_file)
    process = Launchy(cmd, outh, outh, cwd=str(out_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("debsign failed")
        return False

    logger.debug("publisher: uploading %d file%s", count_files,
                 "" if count_files == 1 else "s")

    debian_repo = DebianRepository(basemirror_name, basemirror_version,
                                   project_name, project_version, archs)
    ret = False
    try:
        ret = await debian_repo.add_packages(files2upload, ci_build=is_ci)
    except Exception as exc:
        await buildlog(build_id, "E: error uploading files to repository\n")
        logger.exception(exc)

    files2delete = files2upload
    files2delete.append("{}/{}".format(out_path, changes_file))
    for f in files2delete:
        logger.debug("publisher: removing %s", f)
        os.remove(f)

    return ret
Esempio n. 17
0
async def GitCleanLocal(repo_path, build):
    if not await run_git_cmds(
        ["git reset --hard", "git clean -dffx", "git fetch -p"],
            repo_path,
            build,
            write_output_log=False):
        return False

    default_branch = None

    async def outh(line):
        nonlocal default_branch
        default_branch = line.strip()

    # checkout remote default branch
    process = Launchy("git symbolic-ref refs/remotes/origin/HEAD",
                      outh,
                      outh,
                      cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("error getting default brach")
        return False

    async def outh_null(line):
        pass

    default_branch = default_branch.replace("refs/remotes/", "")

    process = Launchy("git checkout {}".format(default_branch),
                      outh_null,
                      outh_null,
                      cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("error checking out '%s'", default_branch)
        return False

    # get all branches
    branches = []

    async def outh3(line):
        nonlocal branches
        if "HEAD detached" not in line:
            branches.append(line.strip())

    process = Launchy("git branch", outh3, outh3, cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("error getting all branches")
        return False

    # get all tags
    tags = []

    async def outh4(line):
        nonlocal tags
        tags.append(line.strip())

    process = Launchy("git tag", outh4, outh4, cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        logger.error("error getting all tags")
        return False

    # delete all local branches and tags
    for branch in branches:
        process = Launchy("git branch -D {}".format(branch),
                          outh_null,
                          outh_null,
                          cwd=str(repo_path))
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("error deleting local branch '%s'", branch)
            return False

    for tag in tags:
        process = Launchy("git tag -d {}".format(tag),
                          outh_null,
                          outh_null,
                          cwd=str(repo_path))
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("error deleting local tag '%s'", branch)
            return False

    return True
Esempio n. 18
0
async def BuildProcess(parent_build_id,
                       repo_id,
                       git_ref,
                       ci_branch,
                       custom_targets,
                       force_ci=False):
    await buildlogtitle(parent_build_id, "Molior Build")
    info = None
    source_exists = False
    with Session() as session:
        parent = session.query(Build).filter(
            Build.id == parent_build_id).first()
        if not parent:
            logger.error("BuildProcess: parent build {} not found".format(
                parent_build_id))
            return

        repo = session.query(SourceRepository).filter(
            SourceRepository.id == repo_id).first()
        if not repo:
            logger.error("source repository %d not found", repo_id)
            await parent.log(
                "E: source repository {} not found\n".format(repo_id))
            await parent.logtitle("Done",
                                  no_footer_newline=True,
                                  no_header_newline=False)
            await parent.logdone()
            await parent.set_failed()
            session.commit()
            return
        src_path = repo.src_path

        if parent.version:
            existing_src_build = session.query(Build).filter(
                Build.buildtype == "source", Build.sourcerepository == repo,
                Build.version == parent.version,
                Build.buildstate == "successful",
                Build.is_deleted.is_(False)).first()
            if existing_src_build:
                source_exists = True

                # create fake info
                class BuildInfo:
                    pass

                info = BuildInfo()
                info.version = parent.version
                info.plain_targets = []
                info.firstname = existing_src_build.maintainer.firstname
                info.lastname = existing_src_build.maintainer.surname
                info.email = existing_src_build.maintainer.email
                info.commit_hash = existing_src_build.git_ref
                info.sourcename = existing_src_build.sourcename

    if not source_exists:
        await buildlog(parent_build_id, "I: git checkout {}\n".format(git_ref))

        # Checkout
        ret = await asyncio.ensure_future(
            GitCheckout(src_path, git_ref, parent_build_id))

        if not ret:
            await buildlog(parent_build_id, "E: git checkout failed\n")
            await buildlogtitle(parent_build_id,
                                "Done",
                                no_footer_newline=True,
                                no_header_newline=False)
            await buildlogdone(parent_build_id)

        with Session() as session:
            parent = session.query(Build).filter(
                Build.id == parent_build_id).first()
            if not parent:
                logger.error("BuildProcess: parent build {} not found".format(
                    parent_build_id))
                return
            repo = session.query(SourceRepository).filter(
                SourceRepository.id == repo_id).first()
            if not repo:
                logger.error("source repository %d not found", repo_id)
                return

            if not ret:
                await parent.set_failed()
                repo.set_ready()
                session.commit()
                return

        await buildlog(parent_build_id, "\nI: get build information\n")

        try:
            info = await GetBuildInfo(repo.src_path, git_ref)
        except Exception as exc:
            logger.exception(exc)

        if not info:
            await buildlog(parent_build_id,
                           "E: Error getting build information\n")
            await buildlogtitle(parent_build_id,
                                "Done",
                                no_footer_newline=True,
                                no_header_newline=False)
            await buildlogdone(parent_build_id)

    with Session() as session:
        parent = session.query(Build).filter(
            Build.id == parent_build_id).first()
        if not parent:
            logger.error("BuildProcess: parent build {} not found".format(
                parent_build_id))
            return
        repo = session.query(SourceRepository).filter(
            SourceRepository.id == repo_id).first()
        if not repo:
            logger.error("source repository %d not found", repo_id)
            return

        if not info:
            await parent.set_failed()
            if not source_exists:
                repo.set_ready()
            session.commit()
            return

        targets = get_targets(info.plain_targets, repo, custom_targets,
                              session)

        if not targets:
            repo.log_state(
                "unknown target projectversions in debian/molior.yml")
            await parent.log(
                "E: the repository is not added to any projectversions from debian/molior.yml:\n"
            )
            await parent.log("   %s\n" % str(info.plain_targets))
            await parent.logtitle("Done",
                                  no_footer_newline=True,
                                  no_header_newline=False)
            await parent.logdone()
            if not source_exists:
                repo.set_ready()
            await parent.set_nothing_done()
            session.commit()
            return

    is_ci = False
    if not source_exists:
        if force_ci:
            is_ci = True
        else:
            # check if it is a CI build
            # i.e. if gittag does not match version in debian/changelog
            gittag = ""

            async def outh(line):
                nonlocal gittag
                gittag += line

            process = Launchy("git describe --tags --abbrev=40",
                              outh,
                              outh,
                              cwd=str(src_path))
            await process.launch()
            ret = await process.wait()
            if ret != 0:
                logger.error("error running git describe: %s" % gittag.strip())
            else:
                v = strip_epoch_version(info.version)
                if not re.match(
                        "^v?{}$".format(
                            v.replace("~", "-").replace("+", "\\+")),
                        gittag) or "+git" in v:
                    is_ci = True

        ci_cfg = Configuration().ci_builds
        ci_enabled = ci_cfg.get("enabled") if ci_cfg else False

    with Session() as session:
        parent = session.query(Build).filter(
            Build.id == parent_build_id).first()
        if not parent:
            logger.error("BuildProcess: parent build {} not found".format(
                parent_build_id))
            return
        repo = session.query(SourceRepository).filter(
            SourceRepository.id == repo_id).first()
        if not repo:
            logger.error("source repository %d not found", repo_id)
            return

        if is_ci and not ci_enabled:
            repo.log_state("CI builds are not enabled in configuration")
            await parent.log("E: CI builds are not enabled in configuration\n")
            await parent.logtitle("Done",
                                  no_footer_newline=True,
                                  no_header_newline=False)
            await parent.logdone()
            await parent.set_successful()
            repo.set_ready()
            session.commit()
            return

        parent.is_ci = is_ci
        session.commit()

        if is_ci:
            # create CI version with git hash suffix
            info.origversion = info.version
            info.version += "+git{}.{}".format(
                datetime.now().strftime("%Y%m%d%H%M%S"), info.commit_hash[:6])

            # check if CI builds enabled in any project version
            found = False
            for target in targets:
                projectversion = session.query(ProjectVersion).filter(
                    ProjectVersion.ci_builds_enabled.is_(True),
                    ProjectVersion.id == target.projectversion_id).first()
                if projectversion:
                    found = True
                    break
            if not found:
                repo.log_state(
                    "CI builds not enabled in specified projectversions, not building..."
                )
                await parent.log(
                    "E: CI builds not enabled in specified projectversions, not building...\n"
                )
                await parent.logtitle("Done",
                                      no_footer_newline=True,
                                      no_header_newline=False)
                await parent.logdone()
                await parent.set_nothing_done()
                repo.set_ready()
                session.commit()
                return

        # Check if already built completely
        missing_builds = False
        existing_src_build = session.query(Build).filter(
            Build.buildtype == "source", Build.sourcerepository == repo,
            Build.version == info.version, Build.buildstate == "successful",
            Build.is_deleted.is_(False)).first()
        if existing_src_build:
            # check for missing successful deb builds
            for target in targets:
                for arch in db2array(target.architectures):
                    # FIXME: check buildstates
                    deb_build = session.query(Build).filter(
                        Build.buildtype == "deb",
                        Build.sourcerepository == repo,
                        Build.version == info.version,
                        Build.projectversion_id == target.projectversion_id,
                        Build.architecture == arch).first()
                    if not deb_build:
                        missing_builds = True

            if not missing_builds:
                await parent.log(
                    "E: all debian builds already existing for version {}\n".
                    format(info.version))
                await parent.logtitle("Done",
                                      no_footer_newline=True,
                                      no_header_newline=False)
                await parent.logdone()
                repo.set_ready()
                if existing_src_build.parent and existing_src_build.parent.buildstate == "successful":
                    await parent.set_already_exists()
                else:
                    await parent.set_already_failed()
                session.commit()
                args = {"schedule": []}
                await enqueue_task(args)
                return

        # Use commiter name as maintainer for CI builds
        if is_ci:
            t = info.author_name.split(" ", 2)
            if len(t) == 2:
                firstname = t[0]
                lastname = t[1]
            else:
                firstname = t[0]
                lastname = ""
            email = info.author_email
        else:
            firstname = info.firstname
            lastname = info.lastname
            email = info.email

        maintainer = session.query(Maintainer).filter(
            Maintainer.email == email).first()
        if not maintainer:
            maintainer = Maintainer(firstname=firstname,
                                    surname=lastname,
                                    email=email)
            session.add(maintainer)
            session.commit()

        # FIXME: assert version == git tag

        build = Build(
            version=info.version,
            git_ref=info.commit_hash,
            ci_branch=ci_branch,
            is_ci=is_ci,
            sourcename=info.sourcename,
            buildstate="new",
            buildtype="source",
            parent_id=parent_build_id,
            sourcerepository=repo,
            maintainer=maintainer,
        )

        # update patent
        parent.version = info.version
        parent.sourcerepository = repo
        parent.maintainer = maintainer
        parent.git_ref = info.commit_hash

        session.add(build)
        session.commit()
        await parent.build_changed()
        await build.build_added()

        # add build order dependencies
        build_after = get_buildorder(repo.src_path)
        if build_after:
            await build.parent.log("N: source needs to build after: %s\n" %
                                   ", ".join(build_after))
            build.builddeps = "{" + ",".join(build_after) + "}"
            session.commit()

        projectversion_ids = []
        found = False
        for target in targets:
            projectversion = session.query(ProjectVersion).filter(
                ProjectVersion.id == target.projectversion_id).first()
            if projectversion.is_locked:
                repo.log_state(
                    "build to locked projectversion '%s-%s' not permitted" % (
                        projectversion.project.name,
                        projectversion.name,
                    ))
                await parent.log(
                    "W: build to locked projectversion '%s-%s' not permitted\n"
                    % (
                        projectversion.project.name,
                        projectversion.name,
                    ))
                continue

            if is_ci and not projectversion.ci_builds_enabled:
                repo.log_state(
                    "CI builds not enabled in projectversion '%s-%s'" % (
                        projectversion.project.name,
                        projectversion.name,
                    ))
                await parent.log(
                    "W: CI builds not enabled in projectversion '%s-%s'\n" % (
                        projectversion.project.name,
                        projectversion.name,
                    ))
                continue

            architectures = db2array(target.architectures)
            for architecture in architectures:
                deb_build = session.query(Build).filter(
                    Build.sourcerepository_id == repo.id,
                    Build.projectversion == projectversion,
                    Build.version == info.version, Build.buildtype == "deb",
                    Build.architecture == architecture).first()
                if deb_build:
                    if deb_build.buildstate != "successful":
                        deb_build.buildstate = "needs_build"
                        session.commit()
                        found = True  # FIXME: should this be here ?
                        continue
                    await parent.log(
                        "W: packages already built for {} {}\n".format(
                            projectversion.fullname, architecture))
                    continue

                found = True

                # only add projectversions where a debian package will be built.
                # this allows deleting a source republish without deleting the original source package
                if projectversion.id not in projectversion_ids:
                    projectversion_ids.append(projectversion.id)

                await parent.log(
                    "I: creating build for projectversion '%s/%s'\n" % (
                        projectversion.project.name,
                        projectversion.name,
                    ))

                deb_build = Build(version=info.version,
                                  git_ref=info.commit_hash,
                                  ci_branch=ci_branch,
                                  is_ci=is_ci,
                                  sourcename=info.sourcename,
                                  buildstate="new",
                                  buildtype="deb",
                                  parent_id=build.id,
                                  sourcerepository=repo,
                                  maintainer=maintainer,
                                  projectversion_id=projectversion.id,
                                  architecture=architecture)

                session.add(deb_build)
                session.commit()

                await deb_build.build_added()

        if not found:
            await parent.log("E: no projectversion found to build for")
            await parent.logtitle("Done",
                                  no_footer_newline=True,
                                  no_header_newline=False)
            await parent.logdone()
            await parent.set_nothing_done()
            repo.set_ready()
            session.commit()
            return

        build.projectversions = array2db([str(p) for p in projectversion_ids])
        session.commit()

        build_id = build.id

    await enqueue_task({"src_build": [build_id]})
Esempio n. 19
0
async def DownloadDebSrc(repo_id, sourcedir, sourcename, build_id, version,
                         basemirror, projectversion):
    await buildlogtitle(build_id, "Source Package Republish")
    await buildlog(
        build_id, "I: downloading source package from {} ({})\n".format(
            projectversion, basemirror))
    cfg = Configuration()
    apt_url = cfg.aptly.get("apt_url")
    sources_url = "{}/{}/repos/{}/dists/stable/main/source/Sources".format(
        apt_url, basemirror, projectversion)

    # download Sources file
    Sources = ""
    try:
        async with aiohttp.ClientSession() as http:
            async with http.get(sources_url) as resp:
                if resp.status == 200:
                    Sources = await resp.text()
    except Exception:
        pass

    if not Sources:
        await buildlog(build_id,
                       "E: Error downloading {}\n".format(sources_url))
        return False

    # parse Soures file
    files = []
    directory = None
    found_package_entry = False
    found_directory_entry = False
    found_files_section = False
    for line in Sources.split('\n'):
        if not found_package_entry:
            if line != "Package: {}".format(sourcename):
                continue
            found_package_entry = True
            continue
        else:  # Package section
            if not found_directory_entry:
                if line == "":
                    break
                if not line.startswith("Directory: "):
                    continue
                found_directory_entry = True
                directory = line.split(" ")[1]
                continue
            elif not found_files_section:
                if line == "":
                    break
                if line != "Files:":
                    continue
                found_files_section = True
                continue
            else:  # Files section
                if line.startswith(" "):
                    files.append(line[1:].split(" "))
                else:
                    break

    if not found_directory_entry:
        await buildlog(
            build_id, "E: Could not find {}/{} in Sources file: {}\n".format(
                sourcename, version, sources_url))
        return False

    await buildlog(build_id, "I: found directory: {}\n".format(directory))
    await buildlog(build_id, "I: downloading source files:\n")
    sourcepath = None
    sourcetype = None
    source_files = []
    repopath = f"/var/lib/molior/repositories/{repo_id}"
    tmpdir = mkdtemp(dir=repopath)
    for f in files:
        await buildlog(build_id, " - {}\n".format(f[2]))

        file_url = "{}/{}/repos/{}/{}/{}".format(apt_url, basemirror,
                                                 projectversion, directory,
                                                 f[2])
        body = None
        async with aiohttp.ClientSession() as http:
            async with http.get(file_url) as resp:
                if not resp.status == 200:
                    await buildlog(
                        build_id, "E: Error downloading {}\n".format(file_url))
                    continue
                body = await resp.read()

        filepath = f"{tmpdir}/{f[2]}"
        async with AIOFile(filepath, "wb") as afp:
            writer = Writer(afp)
            await writer(body)

        source_files.append(f[2])

        if filepath.endswith(".git"):
            sourcetype = "git"
            sourcepath = filepath
        elif filepath.endswith(".tar.gz") or filepath.endswith(".tar.xz"):
            sourcetype = "tar"
            sourcepath = filepath

    # extract source, if git, checkout version tag
    ret = None
    if sourcetype:
        output = ""

        async def outh(line):
            nonlocal output
            await buildlog(build_id, "{}\n".format(line))
            output += line

        if sourcetype == "tar":
            cmd = "tar xf {}".format(sourcepath)
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=tmpdir)
            await process.launch()
            ret = await process.wait()
        elif sourcetype == "git":
            cmd = f"git clone -b v{version.replace('~', '-')} {filepath} {sourcedir}"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=tmpdir)
            await process.launch()
            ret = await process.wait()
            output = ""

        if ret == 0:
            cmd = "dpkg-genchanges -S"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=f"{tmpdir}/{sourcedir}")
            await process.launch()
            ret = await process.wait()

        if ret == 0:
            cmd = "dpkg-genbuildinfo --build=source"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=f"{tmpdir}/{sourcedir}")
            await process.launch()
            ret = await process.wait()

        source_files.append(f"{sourcename}_{version}_source.buildinfo")

    for source_file in source_files:
        try:
            os.rename(f"{tmpdir}/{source_file}", f"{repopath}/{source_file}")
        except Exception as exc:
            logger.exception(exc)

    try:
        rmtree(tmpdir)
    except Exception as exc:
        logger.exception(exc)

    return ret == 0