예제 #1
0
    def test_06_read_recipe(self):
        """Test reading a recipe from a commit"""
        commits = recipes.list_commits(self.repo, "master",
                                       "example-http-server.toml")
        self.assertEqual(len(commits), 1,
                         "Wrong number of commits: %s" % commits)

        recipe = recipes.read_recipe_commit(self.repo, "master",
                                            "example-http-server")
        self.assertNotEqual(recipe, None)
        self.assertEqual(recipe["name"], "example-http-server")

        # Read by commit id
        recipe = recipes.read_recipe_commit(self.repo, "master",
                                            "example-http-server",
                                            commits[0].commit)
        self.assertNotEqual(recipe, None)
        self.assertEqual(recipe["name"], "example-http-server")

        # Read the recipe and its commit id
        (commit_id,
         recipe) = recipes.read_recipe_and_id(self.repo, "master",
                                              "example-http-server",
                                              commits[0].commit)
        self.assertEqual(commit_id, commits[0].commit)
예제 #2
0
def start_build(cfg,
                dnflock,
                gitlock,
                branch,
                recipe_name,
                compose_type,
                test_mode=0):
    """ Start the build

    :param cfg: Configuration object
    :type cfg: ComposerConfig
    :param dnflock: Lock and YumBase for depsolving
    :type dnflock: YumLock
    :param recipe: The recipe to build
    :type recipe: str
    :param compose_type: The type of output to create from the recipe
    :type compose_type: str
    :returns: Unique ID for the build that can be used to track its status
    :rtype: str
    """
    share_dir = cfg.get("composer", "share_dir")
    lib_dir = cfg.get("composer", "lib_dir")

    # Make sure compose_type is valid
    if compose_type not in compose_types(share_dir):
        raise RuntimeError("Invalid compose type (%s), must be one of %s" %
                           (compose_type, compose_types(share_dir)))

    # Some image types (live-iso) need extra packages for composer to execute the output template
    with dnflock.lock:
        extra_pkgs = get_extra_pkgs(dnflock.dbo, share_dir, compose_type)
    log.debug("Extra packages needed for %s: %s", compose_type, extra_pkgs)

    with gitlock.lock:
        (commit_id, recipe) = read_recipe_and_id(gitlock.repo, branch,
                                                 recipe_name)

    # Combine modules and packages and depsolve the list
    module_nver = recipe.module_nver
    package_nver = recipe.package_nver
    package_nver.extend([(name, '*') for name in extra_pkgs])

    projects = sorted(set(module_nver + package_nver),
                      key=lambda p: p[0].lower())
    deps = []
    log.info("depsolving %s", recipe["name"])
    try:
        # This can possibly update repodata and reset the YumBase object.
        with dnflock.lock_check:
            (installed_size,
             deps) = projects_depsolve_with_size(dnflock.dbo,
                                                 projects,
                                                 recipe.group_names,
                                                 with_core=False)
    except ProjectsError as e:
        log.error("start_build depsolve: %s", str(e))
        raise RuntimeError("Problem depsolving %s: %s" %
                           (recipe["name"], str(e)))

    # Read the kickstart template for this type
    ks_template_path = joinpaths(share_dir, "composer", compose_type) + ".ks"
    ks_template = open(ks_template_path, "r").read()

    # How much space will the packages in the default template take?
    ks_version = makeVersion()
    ks = KickstartParser(ks_version,
                         errorsAreFatal=False,
                         missingIncludeIsFatal=False)
    ks.readKickstartFromString(ks_template + "\n%end\n")
    pkgs = [(name, "*") for name in ks.handler.packages.packageList]
    grps = [grp.name for grp in ks.handler.packages.groupList]
    try:
        with dnflock.lock:
            (template_size, _) = projects_depsolve_with_size(
                dnflock.dbo,
                pkgs,
                grps,
                with_core=not ks.handler.packages.nocore)
    except ProjectsError as e:
        log.error("start_build depsolve: %s", str(e))
        raise RuntimeError("Problem depsolving %s: %s" %
                           (recipe["name"], str(e)))
    log.debug("installed_size = %d, template_size=%d", installed_size,
              template_size)

    # Minimum LMC disk size is 1GiB, and anaconda bumps the estimated size up by 10% (which doesn't always work).
    installed_size = int((installed_size + template_size)) * 1.2
    log.debug("/ partition size = %d", installed_size)

    # Create the results directory
    build_id = str(uuid4())
    results_dir = joinpaths(lib_dir, "results", build_id)
    os.makedirs(results_dir)

    # Write the recipe commit hash
    commit_path = joinpaths(results_dir, "COMMIT")
    with open(commit_path, "w") as f:
        f.write(commit_id)

    # Write the original recipe
    recipe_path = joinpaths(results_dir, "blueprint.toml")
    with open(recipe_path, "w") as f:
        f.write(recipe.toml())

    # Write the frozen recipe
    frozen_recipe = recipe.freeze(deps)
    recipe_path = joinpaths(results_dir, "frozen.toml")
    with open(recipe_path, "w") as f:
        f.write(frozen_recipe.toml())

    # Write out the dependencies to the results dir
    deps_path = joinpaths(results_dir, "deps.toml")
    with open(deps_path, "w") as f:
        f.write(toml.dumps({"packages": deps}))

    # Save a copy of the original kickstart
    shutil.copy(ks_template_path, results_dir)

    with dnflock.lock:
        repos = list(dnflock.dbo.repos.iter_enabled())
    if not repos:
        raise RuntimeError("No enabled repos, canceling build.")

    # Create the git rpms, if any, and return the path to the repo under results_dir
    gitrpm_repo = create_gitrpm_repo(results_dir, recipe)

    # Create the final kickstart with repos and package list
    ks_path = joinpaths(results_dir, "final-kickstart.ks")
    with open(ks_path, "w") as f:
        ks_url = repo_to_ks(repos[0], "url")
        log.debug("url = %s", ks_url)
        f.write('url %s\n' % ks_url)
        for idx, r in enumerate(repos[1:]):
            ks_repo = repo_to_ks(r, "baseurl")
            log.debug("repo composer-%s = %s", idx, ks_repo)
            f.write('repo --name="composer-%s" %s\n' % (idx, ks_repo))

        if gitrpm_repo:
            log.debug("repo gitrpms = %s", gitrpm_repo)
            f.write('repo --name="gitrpms" --baseurl="file://%s"\n' %
                    gitrpm_repo)

        # Setup the disk for booting
        # TODO Add GPT and UEFI boot support
        f.write('clearpart --all --initlabel\n')

        # Write the root partition and it's size in MB (rounded up)
        f.write('part / --size=%d\n' % ceil(installed_size / 1024**2))

        # Some customizations modify the template before writing it
        f.write(customize_ks_template(ks_template, recipe))

        for d in deps:
            f.write(dep_nevra(d) + "\n")

        # Include the rpms from the gitrpm repo directory
        if gitrpm_repo:
            for rpm in glob(os.path.join(gitrpm_repo, "*.rpm")):
                f.write(os.path.basename(rpm)[:-4] + "\n")

        f.write("%end\n")

        # Other customizations can be appended to the kickstart
        add_customizations(f, recipe)

    # Setup the config to pass to novirt_install
    log_dir = joinpaths(results_dir, "logs/")
    cfg_args = compose_args(compose_type)

    # Get the title, project, and release version from the host
    if not os.path.exists("/etc/os-release"):
        log.error(
            "/etc/os-release is missing, cannot determine product or release version"
        )
    os_release = flatconfig("/etc/os-release")

    log.debug("os_release = %s", dict(os_release.items()))

    cfg_args["title"] = os_release.get("PRETTY_NAME", "")
    cfg_args["project"] = os_release.get("NAME", "")
    cfg_args["releasever"] = os_release.get("VERSION_ID", "")
    cfg_args["volid"] = ""
    cfg_args["extra_boot_args"] = get_kernel_append(recipe)

    cfg_args.update({
        "compression": "xz",
        "compress_args": [],
        "ks": [ks_path],
        "logfile": log_dir,
        "timeout": 60,  # 60 minute timeout
    })
    with open(joinpaths(results_dir, "config.toml"), "w") as f:
        f.write(toml.dumps(cfg_args))

    # Set the initial status
    open(joinpaths(results_dir, "STATUS"), "w").write("WAITING")

    # Set the test mode, if requested
    if test_mode > 0:
        open(joinpaths(results_dir, "TEST"), "w").write("%s" % test_mode)

    write_timestamp(results_dir, TS_CREATED)
    log.info("Adding %s (%s %s) to compose queue", build_id, recipe["name"],
             compose_type)
    os.symlink(results_dir, joinpaths(lib_dir, "queue/new/", build_id))

    return build_id
예제 #3
0
파일: compose.py 프로젝트: stefwalter/lorax
def start_build(cfg,
                dnflock,
                gitlock,
                branch,
                recipe_name,
                compose_type,
                test_mode=0):
    """ Start the build

    :param cfg: Configuration object
    :type cfg: ComposerConfig
    :param dnflock: Lock and YumBase for depsolving
    :type dnflock: YumLock
    :param recipe: The recipe to build
    :type recipe: str
    :param compose_type: The type of output to create from the recipe
    :type compose_type: str
    :returns: Unique ID for the build that can be used to track its status
    :rtype: str
    """
    share_dir = cfg.get("composer", "share_dir")
    lib_dir = cfg.get("composer", "lib_dir")

    # Make sure compose_type is valid
    if compose_type not in compose_types(share_dir):
        raise RuntimeError("Invalid compose type (%s), must be one of %s" %
                           (compose_type, compose_types(share_dir)))

    with gitlock.lock:
        (commit_id, recipe) = read_recipe_and_id(gitlock.repo, branch,
                                                 recipe_name)

    # Combine modules and packages and depsolve the list
    # TODO include the version/glob in the depsolving
    module_nver = recipe.module_nver
    package_nver = recipe.package_nver
    projects = sorted(set(module_nver + package_nver),
                      key=lambda p: p[0].lower())
    deps = []
    try:
        with dnflock.lock:
            (installed_size,
             deps) = projects_depsolve_with_size(dnflock.dbo,
                                                 projects,
                                                 with_core=False)
    except ProjectsError as e:
        log.error("start_build depsolve: %s", str(e))
        raise RuntimeError("Problem depsolving %s: %s" %
                           (recipe["name"], str(e)))

    # Read the kickstart template for this type
    ks_template_path = joinpaths(share_dir, "composer", compose_type) + ".ks"
    ks_template = open(ks_template_path, "r").read()

    # How much space will the packages in the default template take?
    ks_version = makeVersion()
    ks = KickstartParser(ks_version,
                         errorsAreFatal=False,
                         missingIncludeIsFatal=False)
    ks.readKickstartFromString(ks_template + "\n%end\n")
    ks_projects = [(name, "*") for name in ks.handler.packages.packageList]
    try:
        with dnflock.lock:
            (template_size, _) = projects_depsolve_with_size(
                dnflock.dbo,
                ks_projects,
                with_core=not ks.handler.packages.nocore)
    except ProjectsError as e:
        log.error("start_build depsolve: %s", str(e))
        raise RuntimeError("Problem depsolving %s: %s" %
                           (recipe["name"], str(e)))
    log.debug("installed_size = %d, template_size=%d", installed_size,
              template_size)

    # Minimum LMC disk size is 1GiB, and anaconda bumps the estimated size up by 10% (which doesn't always work).
    # XXX BUT Anaconda has a bug, it won't execute a kickstart on a disk smaller than 3000 MB
    # XXX There is an upstream patch pending, but until then, use that as the minimum
    installed_size = max(3e9, int((installed_size + template_size))) * 1.2
    log.debug("/ partition size = %d", installed_size)

    # Create the results directory
    build_id = str(uuid4())
    results_dir = joinpaths(lib_dir, "results", build_id)
    os.makedirs(results_dir)

    # Write the recipe commit hash
    commit_path = joinpaths(results_dir, "COMMIT")
    with open(commit_path, "w") as f:
        f.write(commit_id)

    # Write the original recipe
    recipe_path = joinpaths(results_dir, "blueprint.toml")
    with open(recipe_path, "w") as f:
        f.write(recipe.toml())

    # Write the frozen recipe
    frozen_recipe = recipe.freeze(deps)
    recipe_path = joinpaths(results_dir, "frozen.toml")
    with open(recipe_path, "w") as f:
        f.write(frozen_recipe.toml())

    # Write out the dependencies to the results dir
    deps_path = joinpaths(results_dir, "deps.toml")
    with open(deps_path, "w") as f:
        f.write(toml.dumps({"packages": deps}))

    # Save a copy of the original kickstart
    shutil.copy(ks_template_path, results_dir)

    with dnflock.lock:
        repos = list(dnflock.dbo.repos.iter_enabled())
    if not repos:
        raise RuntimeError("No enabled repos, canceling build.")

    # Create the final kickstart with repos and package list
    ks_path = joinpaths(results_dir, "final-kickstart.ks")
    with open(ks_path, "w") as f:
        ks_url = repo_to_ks(repos[0], "url")
        log.debug("url = %s", ks_url)
        f.write('url %s\n' % ks_url)
        for idx, r in enumerate(repos[1:]):
            ks_repo = repo_to_ks(r, "baseurl")
            log.debug("repo composer-%s = %s", idx, ks_repo)
            f.write('repo --name="composer-%s" %s\n' % (idx, ks_repo))

        # Write the root partition and it's size in MB (rounded up)
        f.write('part / --fstype="ext4" --size=%d\n' %
                ceil(installed_size / 1024**2))

        f.write(ks_template)

        for d in deps:
            f.write(dep_nevra(d) + "\n")
        f.write("%end\n")

        add_customizations(f, recipe)

    # Setup the config to pass to novirt_install
    log_dir = joinpaths(results_dir, "logs/")
    cfg_args = compose_args(compose_type)

    # Get the title, project, and release version from the host
    if not os.path.exists("/etc/os-release"):
        log.error(
            "/etc/os-release is missing, cannot determine product or release version"
        )
    os_release = SimpleConfigFile("/etc/os-release")
    os_release.read()

    log.debug("os_release = %s", os_release)

    cfg_args["title"] = os_release.get("PRETTY_NAME")
    cfg_args["project"] = os_release.get("NAME")
    cfg_args["releasever"] = os_release.get("VERSION_ID")
    cfg_args["volid"] = ""

    cfg_args.update({
        "compression": "xz",
        "compress_args": [],
        "ks": [ks_path],
        "logfile": log_dir,
        "timeout": 60,  # 60 minute timeout
    })
    with open(joinpaths(results_dir, "config.toml"), "w") as f:
        f.write(toml.dumps(cfg_args))

    # Set the initial status
    open(joinpaths(results_dir, "STATUS"), "w").write("WAITING")

    # Set the test mode, if requested
    if test_mode > 0:
        open(joinpaths(results_dir, "TEST"), "w").write("%s" % test_mode)

    log.info("Adding %s (%s %s) to compose queue", build_id, recipe["name"],
             compose_type)
    os.symlink(results_dir, joinpaths(lib_dir, "queue/new/", build_id))

    return build_id