示例#1
0
def test_load_config_non_existent():
    """
    Test load config non-existent
    """
    cfg = Configuration()
    with patch("molior.molior.configuration.logger"):
        assert cfg._load_config(Path("/non/existent")) is None
示例#2
0
def test_get_config_attr():
    """
    Test get config attribute
    """
    cfg = Configuration()
    cfg._config = {"test": "config"}
    assert cfg.test == "config"
示例#3
0
def test_config():
    """
    Test configuration get config
    """
    cfg = Configuration()
    with patch("molior.molior.configuration.Configuration._load_config"):
        cfg._config = {"test": "config"}
        assert cfg.config() == {"test": "config"}
示例#4
0
def test_get_config_attr_no_cfg():
    """
    Test get config attribute if config is empty
    """
    cfg = Configuration()
    cfg._config = {}
    with patch("molior.molior.configuration.Configuration._load_config"
               ) as load_cfg:
        assert cfg.test == {}
        assert load_cfg.called
示例#5
0
def test_load_config():
    """
    Test load config
    """
    cfg = Configuration()
    with patch("molior.molior.configuration.open",
               mock_open(read_data="{'test': 'config'}")):
        path = "/"
        cfg._load_config(path)
        assert cfg._config == {"test": "config"}
示例#6
0
    def get_apt_repo(self, url_only=False, dist="stable"):
        """
        Returns the apt sources url string of the projectversion.
        """
        cfg = Configuration()
        base_url = cfg.aptly.get("apt_url")

        if self.project.is_basemirror:
            url = "{0}/{1}/{2}".format(base_url, self.project.name, self.name)
            full = "deb {0} {1} {2}".format(
                url, self.mirror_distribution,
                self.mirror_components.replace(",", " "))
            return url if url_only else full

        if not self.buildvariants:
            logger.error("project version '%s' has no basemirror",
                         self.fullname)
            return str()

        b_mirror = self.buildvariants[0].base_mirror
        base_mirror = "{}/{}".format(b_mirror.project.name, b_mirror.name)

        if self.project.is_mirror:
            url = "{0}/{1}/mirrors/{2}/{3}".format(base_url, base_mirror,
                                                   self.project.name,
                                                   self.name)
            full = "deb {0} {1} {2}".format(
                url, self.mirror_distribution,
                self.mirror_components.replace(",", " "))
            return url if url_only else full

        url = "{0}/{1}/repos/{2}/{3}".format(base_url, base_mirror,
                                             self.project.name, self.name)
        full = "deb {0} {1} {2}".format(url, dist, "main")
        return url if url_only else full
示例#7
0
async def BuildDebSrc(repo_id, repo_path, build_id, ci_version, is_ci, author,
                      email):
    write_log(build_id, "I: getting debian build information\n")
    src_package_name = await get_changelog_attr("Source", repo_path)
    version = await get_changelog_attr("Version", repo_path)
    repo_path = Path(repo_path)

    key = Configuration().debsign_gpg_email
    if not key:
        write_log(build_id, "E: Signing key not defined in configuration\n")
        logger.error("Signing key not defined in configuration")
        return False

    logger.info("%s: creating source package", src_package_name)
    write_log(
        build_id,
        "I: creating source package: %s (%s)\n" % (src_package_name, version))

    async def outh(line):
        line = line.strip()
        if line:
            write_log(build_id, "%s\n" % line)

    if is_ci:
        # in order to publish a sourcepackage for a ci build we need
        # to create a ci changelog with the correct version

        distribution = await get_changelog_attr("Distribution", repo_path)

        env = os.environ.copy()
        env["DEBFULLNAME"] = author
        env["DEBEMAIL"] = email
        dchcmd = "dch -v %s --distribution %s --force-distribution 'CI Build'" % (
            ci_version, distribution)
        version = ci_version

        process = Launchy(shlex.split(dchcmd),
                          outh,
                          outh,
                          cwd=str(repo_path),
                          env=env)
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("Error running dch for CI build")
            return False

    cmd = "dpkg-buildpackage -S -d -nc -I.git -pgpg1 -k{}".format(key)
    process = Launchy(shlex.split(cmd), outh, outh, cwd=str(repo_path))
    await process.launch()
    ret = await process.wait()
    if ret != 0:
        write_log(build_id, "E: Error building source package\n")
        logger.error("source packaging failed, dpkg-builpackage returned %d",
                     ret)
        return False

    logger.info("%s (%d): source package v%s created", src_package_name,
                repo_id, version)
    return True
示例#8
0
    def path(self):
        """
        Returns the top level path of the soucrerepo.
        E.g. /var/lib/molior/repositories/1

        Returns:
            Path: The sourcerepo's top level path.
        """
        cfg = Configuration()
        cwd = cfg.working_dir if cfg.working_dir else DEFAULT_CWD
        return Path(cwd, "repositories", str(self.id))
示例#9
0
async def schedule_build(build, session):
    """
    Sends the given build to
    the task queue.

    Args:
        build (molior.model.build.Build): Build to schedule.
    """
    if not chroot_ready(build, session):
        return False

    token = uuid.uuid4()
    buildtask = BuildTask(build=build, task_id=str(token))
    session.add(buildtask)
    session.commit()

    arch = build.buildconfiguration.buildvariant.architecture.name
    base_mirror_db = build.buildconfiguration.buildvariant.base_mirror
    distrelease_name = base_mirror_db.project.name
    distrelease_version = base_mirror_db.name

    # FIXME: why [0] ?
    project_version = build.buildconfiguration.projectversions[0]
    apt_urls = get_apt_repos(project_version, session, is_ci=build.is_ci)

    arch_any_only = False if arch == get_target_arch(build, session) else True

    config = Configuration()
    apt_url = config.aptly.get("apt_url")

    token = buildtask.task_id

    await build.set_scheduled()
    session.commit()  # pylint: disable=no-member

    await backend_queue.put({
        "schedule": [
            build.id,
            token,
            build.version,
            apt_url,
            arch,
            arch_any_only,
            distrelease_name,
            distrelease_version,
            "unstable" if build.is_ci else "stable",
            build.sourcename,
            project_version.project.name,
            project_version.name,
            apt_urls,
        ]
    })
    return True
示例#10
0
def is_gitlab_auth_token_valid(token):

    # TODO: Implement more sophisticated user-based auth mechanism
    config = Configuration()
    auth_token = config.gitlab.get("auth_token")
    if auth_token and token == auth_token:
        logger.debug("GitLab-API: Access authorized")
        return True

    # Grant access when no token is set in config
    elif not auth_token:
        return True

    # Authentication failed
    else:
        return False
示例#11
0
def get_log_file_path(build_id):
    """Get log file path for given task_id.

        Args:
            task_id (str): The tasks's id

        Returns:
            str: Path to log file
    """
    buildout_path = Path(Configuration().working_dir) / "buildout"
    dir_path = buildout_path / str(build_id)
    # FIXME: do not create buildout directory here
    if not dir_path.is_dir():
        dir_path.mkdir(parents=True)
    full_path = dir_path / "build.log"
    return str(full_path)
示例#12
0
文件: info.py 项目: szakalboss/molior
async def get_aptlyhostname(*_):
    """
    Returns the aptly hostname from the molior
    config file

    ---
    description: Returns the aptly hostname from the molior config file
    tags:
        - Info
    consumes:
        - application/x-www-form-urlencoded
    responses:
        "200":
            description: successful
    """
    config = Configuration()
    return web.Response(text=config.aptly.get("host"))
示例#13
0
async def file_upload(request, tempfile, filename, size):
    token = request.match_info["token"]
    logger.debug("file uploaded: %s (%s) %dbytes, token %s", tempfile,
                 filename, size, token)

    with Session() as session:
        build = (session.query(Build).join(BuildTask).filter(
            BuildTask.task_id == token).first())
        if not build:
            logger.error("file_upload: no build found for token '%s'", token)
            return web.Response(status=400, text="Invalid file upload.")

        buildout_path = Path(Configuration().working_dir) / "buildout" / str(
            build.id)
        # FIXME: do not overwrite
        os.rename(tempfile, str(buildout_path / filename))

    return web.Response(
        text="file uploaded: {} ({} bytes)".format(filename, size))
示例#14
0
async def auth_admin(request, user, passwd):
    """
    Authenticates admin user

    Args:
        user (str): The user's name.
        passwd (str): The user's password.

    Returns:
        bool: True if successfully authenticated, otherwise False.
    """
    user = user.lower()
    if user == "admin":
        config = Configuration()
        admin_pass = config.admin.get("pass")
        if not admin_pass:
            logger.info("admin password is not set in configuration")
            return False
        if passwd == admin_pass:
            load_user("admin", request.cirrina.db_session)
            return True
    return False
示例#15
0
async def get_apt_sources(request):
    """
    Returns apt sources list for given project,
    projectversion and distrelease.

    ---
    description: Returns apt sources list.
    tags:
        - Projects
    consumes:
        - application/x-www-form-urlencoded
    parameters:
        - name: project_name
          in: path
          required: true
          type: str
        - name: projectver_name
          in: path
          required: true
          type: str
    produces:
        - text/json
    responses:
        "200":
            description: successful
        "400":
            description: Parameter missing
    """
    project_name = request.match_info.get("project_name")
    projectver_name = request.match_info.get("projectver_name")

    if not project_name or not projectver_name:
        return web.Response(text="Parameter missing", status=400)

    project = (
        request.cirrina.db_session.query(Project)  # pylint: disable=no-member
        .filter(Project.name == project_name)
        .first()
    )
    if not project:
        return web.Response(text=str(), status=400)

    version = (
        request.cirrina.db_session.query(ProjectVersion)  # pylint: disable=no-member
        .filter_by(project_id=project.id)
        .filter(ProjectVersion.name == projectver_name)
        .first()
    )

    if not version:
        return web.Response(text=str(), status=400)

    deps = [version]
    deps += get_projectversion_deps_manually(version, to_dict=False)

    cfg = Configuration()
    apt_url = cfg.aptly.get("apt_url")
    keyfile = cfg.aptly.get("key")

    sources_list = "# APT Sources for project {0} {1}\n".format(
        project_name, projectver_name
    )
    sources_list += "# GPG-Key: {0}/{1}\n".format(apt_url, keyfile)
    if not project.is_basemirror and version.buildvariants:
        sources_list += "# Base Mirror\n"
        base_mirror = version.buildvariants[0].base_mirror
        sources_list += "{}\n".format(base_mirror.get_apt_repo())

    sources_list += "# Project Sources\n"
    for dep in deps:
        sources_list += "{}\n".format(dep.get_apt_repo())

    return web.Response(text=sources_list, status=200)
示例#16
0
async def BuildProcess(task_queue, aptly_queue, parent_build_id, repo_id,
                       git_ref, ci_branch):
    with Session() as session:
        parent = session.query(Build).filter(
            Build.id == parent_build_id).first()
        if not parent:
            logger.error("BuildProcess: parent build {} not found".format(
                parent_build_id))
            return

        write_log_title(parent_build_id, "Molior Build")

        repo = session.query(SourceRepository).filter(
            SourceRepository.id == repo_id).first()
        if not repo:
            logger.error("source repository %d not found", repo_id)
            write_log(parent_build_id,
                      "E: source repository {} not found\n".format(repo_id))
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            session.commit()
            return

        write_log(parent_build_id, "I: git checkout {}\n".format(git_ref))

        # Checkout
        ret = await asyncio.ensure_future(
            GitCheckout(repo.src_path, git_ref, parent_build_id))
        if not ret:
            write_log(parent_build_id, "E: git checkout failed\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            repo.set_ready()
            session.commit()
            return

        write_log(parent_build_id, "\nI: get build information\n")
        info = None
        try:
            info = await GetBuildInfo(repo.src_path, git_ref)
        except Exception as exc:
            logger.exception(exc)

        if not info:
            write_log(parent_build_id, "E: Error getting build information\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_failed()
            repo.set_ready()
            session.commit()
            return

        targets = get_targets(info.plain_targets, repo, session)
        if not targets:
            repo.log_state(
                "unknown target projectversions in debian/molior.yml")
            write_log(
                parent_build_id,
                "E: the repository is not added to any projectversions referenced in debian/molior.yml\n"
            )
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            repo.set_ready()
            await parent.set_failed()
            session.commit()
            return

        # check if it is a CI build
        # i.e. if gittag does not match version in debian/changelog
        is_ci = False
        gittag = ""

        async def outh(line):
            nonlocal gittag
            gittag += line

        process = Launchy(shlex.split("git describe --tags --abbrev=40"),
                          outh,
                          outh,
                          cwd=str(repo.src_path))
        await process.launch()
        ret = await process.wait()
        if ret != 0:
            logger.error("error running git describe")
        else:
            v = strip_epoch_version(info.version)
            if not re.match("^v?{}$".format(v.replace("~", "-")), gittag):
                is_ci = True

        ci_cfg = Configuration().ci_builds
        ci_enabled = ci_cfg.get("enabled") if ci_cfg else False

        if is_ci and not ci_enabled:
            repo.log_state("CI builds are not enabled in configuration")
            write_log(parent_build_id,
                      "E: CI builds are not enabled in configuration\n")
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            await parent.set_successful()
            repo.set_ready()
            session.commit()
            return

        parent.is_ci = is_ci
        session.commit()

        if is_ci:
            # create CI version with git hash suffix
            info.origversion = info.version
            if is_ci:
                info.version += "+git{}.{}".format(
                    info.tag_dt.strftime("%Y%m%d%H%M%S"), git_ref[:6])

            # check if CI builds enabled in any project version
            found = False
            for target in targets:
                projectversion = session.query(ProjectVersion).filter(
                    ProjectVersion.ci_builds_enabled == True,  # noqa: E712
                    ProjectVersion.id == target.projectversion_id).first()
                if projectversion:
                    found = True
                    break
            if not found:
                repo.log_state(
                    "CI builds not enabled in specified projectversions, not building..."
                )
                write_log(
                    parent_build_id,
                    "E: CI builds not enabled in specified projectversions, not building...\n"
                )
                write_log_title(parent_build_id,
                                "Done",
                                no_footer_newline=True,
                                no_header_newline=False)
                await parent.set_successful()
                repo.set_ready()
                session.commit()
                return

        # Check if source build already exists
        build = session.query(Build).filter(
            Build.buildtype == "source", Build.sourcerepository == repo,
            Build.version == info.version).first()
        if build:
            repo.log_state(
                "source package already built for version {}".format(
                    info.version))
            write_log(
                parent_build_id,
                "E: source package already built for version {}\n".format(
                    info.version))
            write_log_title(parent_build_id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=False)
            repo.set_ready()
            await parent.set_successful()
            session.commit()
            args = {"schedule": []}
            await task_queue.put(args)
            return

        # Use commiter name as maintainer for CI builds
        if is_ci:
            t = info.author_name.split(" ", 2)
            if len(t) == 2:
                firstname = t[0]
                lastname = t[1]
            else:
                firstname = t[0]
                lastname = ""
            email = info.author_email
        else:
            firstname = info.firstname
            lastname = info.lastname
            email = info.email

        maintainer = session.query(Maintainer).filter(
            Maintainer.email == email).first()
        if not maintainer:
            repo.log_state("creating new maintainer: %s %s <%s>" %
                           (firstname, lastname, email))
            write_log(
                parent_build_id, "I: creating new maintainer: %s %s <%s>\n" %
                (firstname, lastname, email))
            maintainer = Maintainer(firstname=firstname,
                                    surname=lastname,
                                    email=email)
            session.add(maintainer)
            session.commit()

        # FIXME: assert version == git tag

        build = Build(
            version=info.version,
            git_ref=info.commit_hash,
            ci_branch=ci_branch,
            is_ci=is_ci,
            versiontimestamp=info.tag_stamp,
            sourcename=info.sourcename,
            buildstate="new",
            buildtype="source",
            buildconfiguration=None,
            parent_id=parent_build_id,
            sourcerepository=repo,
            maintainer=maintainer,
        )

        session.add(build)
        session.commit()
        build.log_state("created")
        await build_added(build)

        # add build order dependencies
        build_after = get_buildorder(repo.src_path)
        build_after_deps = []
        found = False
        for dep_git in build_after:
            dep_repo = session.query(SourceRepository).filter(
                SourceRepository.url == dep_git).first()
            if not dep_repo:
                build.log_state("Error: build after repo '%s' not found" %
                                dep_git)
                write_log(parent_build_id,
                          "E: build after repo '%s' not found\n" % dep_git)
                # FIXME: write to build log
                continue
            found = True
            build.log_state("adding build after dependency to: %s" % dep_git)
            write_log(parent_build_id,
                      "I: adding build after dependency to: %s\n" % dep_git)
            build_after_deps.append(dep_repo)

        if found:
            build.build_after = build_after_deps
            session.commit()

        projectversion_ids = []
        build_configs = get_buildconfigs(targets, session)
        found = False
        for build_config in build_configs:
            projectversion_ids.extend([
                projectversion.id
                for projectversion in build_config.projectversions
            ])
            # FIXME: filter for buildtype?
            deb_build = (session.query(Build).filter(
                Build.buildconfiguration == build_config,
                Build.versiontimestamp == info.tag_stamp,
                Build.version == info.version,
            ).first())
            if deb_build:
                logger.warning("already built %s", repo.name)
                write_log(parent_build_id,
                          "E: already built {}\n".format(repo.name))
                continue

            # FIXME: why projectversion[0] ??
            if build_config.projectversions[0].is_locked:
                repo.log_state(
                    "build to locked projectversion '%s-%s' not permitted" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                write_log(
                    parent_build_id,
                    "W: build to locked projectversion '%s-%s' not permitted\n"
                    % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                continue

            if is_ci and not build_config.projectversions[0].ci_builds_enabled:
                repo.log_state(
                    "CI builds not enabled in projectversion '%s-%s'" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                write_log(
                    parent_build_id,
                    "W: CI builds not enabled in projectversion '%s-%s'\n" % (
                        build_config.projectversions[0].project.name,
                        build_config.projectversions[0].name,
                    ))
                continue

            found = True

            write_log(
                parent_build_id,
                "I: creating build for projectversion '%s/%s'\n" % (
                    build_config.projectversions[0].project.name,
                    build_config.projectversions[0].name,
                ))

            deb_build = Build(
                version=info.version,
                git_ref=info.commit_hash,
                ci_branch=ci_branch,
                is_ci=is_ci,
                versiontimestamp=info.tag_stamp,
                sourcename=info.sourcename,
                buildstate="new",
                buildtype="deb",
                buildconfiguration=build_config,
                parent_id=build.id,
                sourcerepository=repo,
                maintainer=maintainer,
            )

            session.add(deb_build)
            session.commit()

            deb_build.log_state("created")
            await build_added(deb_build)

        # FIXME: if not found, abort?

        session.commit()

        # make list unique, filter duplicates (multiple archs)
        projectversion_ids = list(set(projectversion_ids))

        await build.set_building()
        session.commit()

        write_log(parent_build_id, "I: building source package\n")

        async def fail():
            write_log(parent_build_id, "E: building source package failed\n")
            write_log_title(build.id,
                            "Done",
                            no_footer_newline=True,
                            no_header_newline=True)
            repo.set_ready()
            await build.set_failed()
            session.commit()
            # FIXME: cancel deb builds, or only create deb builds after source build ok

        # Build Source Package
        write_log_title(build.id, "Source Build")
        try:
            ret = await BuildDebSrc(repo_id, repo.src_path, build.id,
                                    info.version, is_ci,
                                    "{} {}".format(firstname, lastname), email)
        except Exception as exc:
            logger.exception(exc)
            await fail()
            return

        if not ret:
            await fail()
            return

        await build.set_needs_publish()
        session.commit()

        repo.set_ready()
        session.commit()

        write_log(parent_build_id, "I: publishing source package\n")
        await aptly_queue.put({"src_publish": [build.id, projectversion_ids]})
示例#17
0
import logging
import os
from aiohttp import web
from pathlib import Path

from molior.molior.configuration import Configuration
from molior.model.database import Session
from molior.model.build import Build
from molior.model.buildtask import BuildTask

from .app import app

logger = logging.getLogger("molior-web")  # pylint: disable=invalid-name

if not os.environ.get("IS_SPHINX", False):
    config = Configuration()
    upload_dir = config.working_dir + "/upload/"
else:
    upload_dir = "/non/existent"


@app.http_upload("/internal/buildupload/{token}", upload_dir=upload_dir)
async def file_upload(request, tempfile, filename, size):
    token = request.match_info["token"]
    logger.debug("file uploaded: %s (%s) %dbytes, token %s", tempfile,
                 filename, size, token)

    with Session() as session:
        build = (session.query(Build).join(BuildTask).filter(
            BuildTask.task_id == token).first())
        if not build:
示例#18
0
Simple http/websocket based molior backend
"""
import asyncio
import json

from molior.molior.logger import get_logger
from molior.api.app import app
from molior.molior.worker_backend import backend_queue
from molior.molior.configuration import Configuration

logger = get_logger()
registry = {"amd64": [], "arm64": []}
build_tasks = {"amd64": asyncio.Queue(), "arm64": asyncio.Queue()}
running_nodes = {"amd64": [], "arm64": []}

cfg = Configuration()
pt = cfg.backend_http.get("ping_timeout")
if pt:
    PING_TIMEOUT = int(pt)
else:
    PING_TIMEOUT = 5


async def watchdog(ws_client):
    try:
        arch = ws_client.molior_node_arch
        while True:
            if hasattr(ws_client, "molior_pong_pending"
                       ) and ws_client.molior_pong_pending == 1:
                logger.info("backend: ping timeout after %ds on %s/%s",
                            PING_TIMEOUT, ws_client.molior_node_arch,
示例#19
0
def test_load_config_non_existent():
    """
    Test load config non-existent
    """
    cfg = Configuration()
    assert cfg._load_config(Path("/non/existent")) is None