コード例 #1
0
def timestamp_dict(destdir):
    path = joinpaths(destdir, "times.toml")

    try:
        return toml.loads(open(path, "r").read())
    except IOError:
        return toml.loads("")
コード例 #2
0
def uuid_info(cfg, uuid):
    """Return information about the composition

    :param cfg: Configuration settings
    :type cfg: ComposerConfig
    :param uuid: The UUID of the build
    :type uuid: str
    :returns: dictionary of information about the composition or None
    :rtype: dict
    :raises: RuntimeError if there was a problem

    This will return a dict with the following fields populated:

    * id - The uuid of the comoposition
    * config - containing the configuration settings used to run Anaconda
    * blueprint - The depsolved blueprint used to generate the kickstart
    * commit - The (local) git commit hash for the blueprint used
    * deps - The NEVRA of all of the dependencies used in the composition
    * compose_type - The type of output generated (tar, iso, etc.)
    * queue_status - The final status of the composition (FINISHED or FAILED)
    """
    uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
    if not os.path.exists(uuid_dir):
        return None

    # Load the compose configuration
    cfg_path = joinpaths(uuid_dir, "config.toml")
    if not os.path.exists(cfg_path):
        raise RuntimeError("Missing config.toml for %s" % uuid)
    cfg_dict = toml.loads(open(cfg_path, "r").read())

    frozen_path = joinpaths(uuid_dir, "frozen.toml")
    if not os.path.exists(frozen_path):
        raise RuntimeError("Missing frozen.toml for %s" % uuid)
    frozen_dict = toml.loads(open(frozen_path, "r").read())

    deps_path = joinpaths(uuid_dir, "deps.toml")
    if not os.path.exists(deps_path):
        raise RuntimeError("Missing deps.toml for %s" % uuid)
    deps_dict = toml.loads(open(deps_path, "r").read())

    details = compose_detail(uuid_dir)

    commit_path = joinpaths(uuid_dir, "COMMIT")
    if not os.path.exists(commit_path):
        raise RuntimeError("Missing commit hash for %s" % uuid)
    commit_id = open(commit_path, "r").read().strip()

    return {
        "id": uuid,
        "config": cfg_dict,
        "blueprint": frozen_dict,
        "commit": commit_id,
        "deps": deps_dict,
        "compose_type": details["compose_type"],
        "queue_status": details["queue_status"],
        "image_size": details["image_size"]
    }
コード例 #3
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
    def gitrpm_repo_test(self):
        """Test creating a dnf repo of the git rpms"""
        recipe = toml.loads("""
            [[repos.git]]
            rpmname="repo-test-alpha"
            rpmversion="1.1.0"
            rpmrelease="1"
            summary="Testing the git rpm code"
            repo="file://%s"
            ref="v1.1.0"
            destination="/srv/testing-alpha/"

            [[repos.git]]
            rpmname="repo-test-beta"
            rpmversion="1.0.0"
            rpmrelease="1"
            summary="Testing the git rpm code"
            repo="file://%s"
            ref="v1.0.0"
            destination="/srv/testing-beta/"
        """ % (self.repodir, self.repodir))
        try:
            temp_dir = tempfile.mkdtemp(prefix="git-rpm-test.")
            repo_dir = create_gitrpm_repo(temp_dir, recipe)

            self.assertTrue(len(repo_dir) > 0)
            self.assertTrue(
                os.path.exists(
                    joinpaths(repo_dir, "repo-test-alpha-1.1.0-1.noarch.rpm")))
            self.assertTrue(
                os.path.exists(
                    joinpaths(repo_dir, "repo-test-beta-1.0.0-1.noarch.rpm")))

        finally:
            shutil.rmtree(temp_dir)
コード例 #4
0
def write_timestamp(destdir, ty):
    path = joinpaths(destdir, "times.toml")

    try:
        contents = toml.loads(open(path, "r").read())
    except IOError:
        contents = toml.loads("")

    if ty == TS_CREATED:
        contents[TS_CREATED] = time.time()
    elif ty == TS_STARTED:
        contents[TS_STARTED] = time.time()
    elif ty == TS_FINISHED:
        contents[TS_FINISHED] = time.time()

    with open(path, "w") as f:
        f.write(toml.dumps(contents))
コード例 #5
0
def recipe_from_toml(recipe_str):
    """Create a Recipe object from a toml string.

    :param recipe_str: The Recipe TOML string
    :type recipe_str: str
    :returns: A Recipe object
    :rtype: Recipe
    :raises: TomlError
    """
    recipe_dict = toml.loads(recipe_str)
    return recipe_from_dict(recipe_dict)
コード例 #6
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_tag_test(self):
     """Test creating an archive from a git tag"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="v1.1.0"
         destination="/srv/testing-rpm/"
     """ % (self.repodir))
     archive = GitArchiveTarball(git_repo["repos"]["git"][0])
     self._check_tar(archive, "git-rpm-test/", "second")
コード例 #7
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_branch_test(self):
     """Test creating an archive from a git branch"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="origin/custom-branch"
         destination="/srv/testing-rpm/"
     """ % self.repodir)
     archive = GitArchiveTarball(git_repo["repos"]["git"][0])
     self._check_tar(archive, "git-rpm-test/", "branch")
コード例 #8
0
ファイル: test_gitrpm.py プロジェクト: sitedata/lorax
 def test_git_commit(self):
     """Test creating an archive from a git commit hash"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="%s"
         destination="/srv/testing-rpm/"
     """ % (self.repodir, self.first_commit))
     archive = GitArchiveTarball(git_repo["repos"]["git"][0])
     self._check_tar(archive, "git-rpm-test/", "first")
コード例 #9
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_fail_ref_test(self):
     """Test creating an archive from a bad ref"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="0297617d7b8baa263a69ae7dc901bbbcefd0eaa4"
         destination="/srv/testing-rpm/"
     """ % (self.repodir))
     with self.assertRaises(RuntimeError):
         archive = GitArchiveTarball(git_repo["repos"]["git"][0])
         self._check_tar(archive, "git-rpm-test/", None)
コード例 #10
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_fail_repo_test(self):
     """Test creating an archive from a bad url"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="v1.1.0"
         destination="/srv/testing-rpm/"
     """ % ("/tmp/no-repo-here/"))
     with self.assertRaises(RuntimeError):
         archive = GitArchiveTarball(git_repo["repos"]["git"][0])
         self._check_tar(archive, "git-rpm-test/", None)
コード例 #11
0
def uuid_tar(cfg, uuid, metadata=False, image=False, logs=False):
    """Return a tar of the build data

    :param cfg: Configuration settings
    :type cfg: ComposerConfig
    :param uuid: The UUID of the build
    :type uuid: str
    :param metadata: Set to true to include all the metadata needed to reproduce the build
    :type metadata: bool
    :param image: Set to true to include the output image
    :type image: bool
    :param logs: Set to true to include the logs from the build
    :type logs: bool
    :returns: A stream of bytes from tar
    :rtype: A generator
    :raises: RuntimeError if there was a problem (eg. missing config file)

    This yields an uncompressed tar's data to the caller. It includes
    the selected data to the caller by returning the Popen stdout from the tar process.
    """
    uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
    if not os.path.exists(uuid_dir):
        raise RuntimeError("%s is not a valid build_id" % uuid)

    # Load the compose configuration
    cfg_path = joinpaths(uuid_dir, "config.toml")
    if not os.path.exists(cfg_path):
        raise RuntimeError("Missing config.toml for %s" % uuid)
    cfg_dict = toml.loads(open(cfg_path, "r").read())
    image_name = cfg_dict["image_name"]

    def include_file(f):
        if f.endswith("/logs"):
            return logs
        if f.endswith(image_name):
            return image
        return metadata

    filenames = [
        os.path.basename(f) for f in glob(joinpaths(uuid_dir, "*"))
        if include_file(f)
    ]

    tar = Popen(["tar", "-C", uuid_dir, "-cf-"] + filenames, stdout=PIPE)
    return tar.stdout
コード例 #12
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_commit_test(self):
     """Test creating an rpm from a git commit hash"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="%s"
         destination="/srv/testing-rpm/"
     """ % (self.repodir, self.first_commit))
     try:
         rpm_dir = tempfile.mkdtemp(prefix="git-rpm-test.")
         rpm_file = make_git_rpm(git_repo["repos"]["git"][0], rpm_dir)
         self._check_rpm(git_repo["repos"]["git"][0], rpm_dir, rpm_file,
                         "first")
     finally:
         shutil.rmtree(rpm_dir)
コード例 #13
0
ファイル: test_gitrpm.py プロジェクト: rasrivas-redhat/lorax
 def git_root_test(self):
     """Test creating an rpm with / as the destination """
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="v1.1.0"
         destination="/"
     """ % (self.repodir))
     try:
         rpm_dir = tempfile.mkdtemp(prefix="git-rpm-test.")
         rpm_file = make_git_rpm(git_repo["repos"]["git"][0], rpm_dir)
         self._check_rpm(git_repo["repos"]["git"][0], rpm_dir, rpm_file,
                         "second")
     finally:
         shutil.rmtree(rpm_dir)
コード例 #14
0
ファイル: test_gitrpm.py プロジェクト: sitedata/lorax
 def test_git_branch(self):
     """Test creating an rpm from a git branch"""
     git_repo = toml.loads("""
         [[repos.git]]
         rpmname="git-rpm-test"
         rpmversion="1.0.0"
         rpmrelease="1"
         summary="Testing the git rpm code"
         repo="file://%s"
         ref="origin/custom-branch"
         destination="/srv/testing-rpm/"
     """ % self.repodir)
     try:
         rpm_dir = tempfile.mkdtemp(prefix="git-rpm-test.")
         rpm_file = make_git_rpm(git_repo["repos"]["git"][0], rpm_dir)
         self._check_rpm(git_repo["repos"]["git"][0], rpm_dir, rpm_file,
                         "branch")
     finally:
         shutil.rmtree(rpm_dir)
コード例 #15
0
def get_image_name(uuid_dir):
    """Return the filename and full path of the build's image file

    :param uuid: The UUID of the build
    :type uuid: str
    :returns: The image filename and full path
    :rtype: tuple of strings
    :raises: RuntimeError if there was a problem (eg. invalid uuid, missing config file)
    """
    uuid = os.path.basename(os.path.abspath(uuid_dir))
    if not os.path.exists(uuid_dir):
        raise RuntimeError("%s is not a valid build_id" % uuid)

    # Load the compose configuration
    cfg_path = joinpaths(uuid_dir, "config.toml")
    if not os.path.exists(cfg_path):
        raise RuntimeError("Missing config.toml for %s" % uuid)
    cfg_dict = toml.loads(open(cfg_path, "r").read())
    image_name = cfg_dict["image_name"]

    return (image_name, joinpaths(uuid_dir, image_name))
コード例 #16
0
ファイル: v1.py プロジェクト: dwlehman/lorax
def v1_projects_source_new():
    """Add a new package source. Or change an existing one

    **POST /api/v0/projects/source/new**

      Add (or change) a source for use when depsolving blueprints and composing images.

      The ``proxy`` and ``gpgkey_urls`` entries are optional. All of the others are required. The supported
      types for the urls are:

      * ``yum-baseurl`` is a URL to a yum repository.
      * ``yum-mirrorlist`` is a URL for a mirrorlist.
      * ``yum-metalink`` is a URL for a metalink.

      If ``check_ssl`` is true the https certificates must be valid. If they are self-signed you can either set
      this to false, or add your Certificate Authority to the host system.

      If ``check_gpg`` is true the GPG key must either be installed on the host system, or ``gpgkey_urls``
      should point to it.

      You can edit an existing source (other than system sources), by doing a POST
      of the new version of the source. It will overwrite the previous one.

      Example::

          {
              "id": "custom-source-1",
              "name": "Custom Package Source #1",
              "url": "https://url/path/to/repository/",
              "type": "yum-baseurl",
              "check_ssl": true,
              "check_gpg": true,
              "gpgkey_urls": [
                  "https://url/path/to/gpg-key"
              ]
          }

    In v0 the ``name`` field was used for the id (a short name for the repo). In v1 ``name`` changed
    to ``id`` and ``name`` is now used for the longer descriptive name of the repository.
    """
    if request.headers['Content-Type'] == "text/x-toml":
        source = toml.loads(request.data)
    else:
        source = request.get_json(cache=False)

    # XXX TODO
    # Check for id in source, return error if not
    # Add test for that
    if "id" not in source:
        return jsonify(status=False,
                       errors=[{
                           "id":
                           UNKNOWN_SOURCE,
                           "msg":
                           "'id' field is missing from API v1 request."
                       }]), 400

    system_sources = get_repo_sources("/etc/yum.repos.d/*.repo")
    if source["id"] in system_sources:
        return jsonify(status=False,
                       errors=[{
                           "id":
                           SYSTEM_SOURCE,
                           "msg":
                           "%s is a system source, it cannot be changed." %
                           source["id"]
                       }]), 400

    try:
        # Remove it from the RepoDict (NOTE that this isn't explicitly supported by the DNF API)
        with api.config["DNFLOCK"].lock:
            dbo = api.config["DNFLOCK"].dbo
            # If this repo already exists, delete it and replace it with the new one
            repos = list(r.id for r in dbo.repos.iter_enabled())
            if source["id"] in repos:
                del dbo.repos[source["id"]]

            repo = source_to_repo(source, dbo.conf)
            dbo.repos.add(repo)

            log.info("Updating repository metadata after adding %s",
                     source["id"])
            dbo.fill_sack(load_system_repo=False)
            dbo.read_comps()

        # Write the new repo to disk, replacing any existing ones
        repo_dir = api.config["COMPOSER_CFG"].get("composer", "repo_dir")

        # Remove any previous sources with this id, ignore it if it isn't found
        try:
            delete_repo_source(joinpaths(repo_dir, "*.repo"), source["id"])
        except ProjectsError:
            pass

        # Make sure the source id can't contain a path traversal by taking the basename
        source_path = joinpaths(repo_dir,
                                os.path.basename("%s.repo" % source["id"]))
        with open(source_path, "w") as f:
            f.write(dnf_repo_to_file_repo(repo))
    except Exception as e:
        log.error("(v0_projects_source_add) adding %s failed: %s",
                  source["id"], str(e))

        # Cleanup the mess, if loading it failed we don't want to leave it in memory
        repos = list(r.id for r in dbo.repos.iter_enabled())
        if source["id"] in repos:
            with api.config["DNFLOCK"].lock:
                dbo = api.config["DNFLOCK"].dbo
                del dbo.repos[source["id"]]

                log.info("Updating repository metadata after adding %s failed",
                         source["id"])
                dbo.fill_sack(load_system_repo=False)
                dbo.read_comps()

        return jsonify(status=False,
                       errors=[{
                           "id": PROJECTS_ERROR,
                           "msg": str(e)
                       }]), 400

    return jsonify(status=True)
コード例 #17
0
ファイル: v1.py プロジェクト: sitedata/lorax
def v1_projects_source_new():
    """Add a new package source. Or change an existing one

    **POST /api/v1/projects/source/new**

      Add (or change) a source for use when depsolving blueprints and composing images.

      The ``proxy`` and ``gpgkey_urls`` entries are optional. All of the others are required. The supported
      types for the urls are:

      * ``yum-baseurl`` is a URL to a yum repository.
      * ``yum-mirrorlist`` is a URL for a mirrorlist.
      * ``yum-metalink`` is a URL for a metalink.

      If ``check_ssl`` is true the https certificates must be valid. If they are self-signed you can either set
      this to false, or add your Certificate Authority to the host system.

      If ``check_gpg`` is true the GPG key must either be installed on the host system, or ``gpgkey_urls``
      should point to it.

      You can edit an existing source (other than system sources), by doing a POST
      of the new version of the source. It will overwrite the previous one.

      Example::

          {
              "id": "custom-source-1",
              "name": "Custom Package Source #1",
              "url": "https://url/path/to/repository/",
              "type": "yum-baseurl",
              "check_ssl": true,
              "check_gpg": true,
              "gpgkey_urls": [
                  "https://url/path/to/gpg-key"
              ]
          }

    In v0 the ``name`` field was used for the id (a short name for the repo). In v1 ``name`` changed
    to ``id`` and ``name`` is now used for the longer descriptive name of the repository.
    """
    if request.headers['Content-Type'] == "text/x-toml":
        source = toml.loads(request.data)
    else:
        source = request.get_json(cache=False)

    # Check for id in source, return error if not
    if "id" not in source:
        return jsonify(status=False,
                       errors=[{
                           "id":
                           UNKNOWN_SOURCE,
                           "msg":
                           "'id' field is missing from API v1 request."
                       }]), 400

    system_sources = get_repo_sources("/etc/yum.repos.d/*.repo")
    if source["id"] in system_sources:
        return jsonify(status=False,
                       errors=[{
                           "id":
                           SYSTEM_SOURCE,
                           "msg":
                           "%s is a system source, it cannot be changed." %
                           source["id"]
                       }]), 400

    try:
        # Remove it from the RepoDict (NOTE that this isn't explicitly supported by the DNF API)
        with api.config["DNFLOCK"].lock:
            repo_dir = api.config["COMPOSER_CFG"].get("composer", "repo_dir")
            new_repo_source(api.config["DNFLOCK"].dbo, source["id"], source,
                            repo_dir)
    except Exception as e:
        return jsonify(status=False,
                       errors=[{
                           "id": PROJECTS_ERROR,
                           "msg": str(e)
                       }]), 400

    return jsonify(status=True)
コード例 #18
0
def make_compose(cfg, results_dir):
    """Run anaconda with the final-kickstart.ks from results_dir

    :param cfg: Configuration settings
    :type cfg: DataHolder
    :param results_dir: The directory containing the metadata and results for the build
    :type results_dir: str
    :returns: Nothing
    :raises: May raise various exceptions

    This takes the final-kickstart.ks, and the settings in config.toml and runs Anaconda
    in no-virt mode (directly on the host operating system). Exceptions should be caught
    at the higer level.

    If there is a failure, the build artifacts will be cleaned up, and any logs will be
    moved into logs/anaconda/ and their ownership will be set to the user from the cfg
    object.
    """

    # Check on the ks's presence
    ks_path = joinpaths(results_dir, "final-kickstart.ks")
    if not os.path.exists(ks_path):
        raise RuntimeError("Missing kickstart file at %s" % ks_path)

    # Load the compose configuration
    cfg_path = joinpaths(results_dir, "config.toml")
    if not os.path.exists(cfg_path):
        raise RuntimeError("Missing config.toml for %s" % results_dir)
    cfg_dict = toml.loads(open(cfg_path, "r").read())

    # The keys in cfg_dict correspond to the arguments setup in livemedia-creator
    # keys that define what to build should be setup in compose_args, and keys with
    # defaults should be setup here.

    # Make sure that image_name contains no path components
    cfg_dict["image_name"] = os.path.basename(cfg_dict["image_name"])

    # Only support novirt installation, set some other defaults
    cfg_dict["no_virt"] = True
    cfg_dict["disk_image"] = None
    cfg_dict["fs_image"] = None
    cfg_dict["keep_image"] = False
    cfg_dict["domacboot"] = False
    cfg_dict["anaconda_args"] = ""
    cfg_dict["proxy"] = ""
    cfg_dict["armplatform"] = ""
    cfg_dict["squashfs_args"] = None

    cfg_dict["lorax_templates"] = find_templates(cfg.share_dir)
    cfg_dict["tmp"] = cfg.tmp
    cfg_dict["dracut_args"] = None  # Use default args for dracut

    # TODO How to support other arches?
    cfg_dict["arch"] = None

    # Compose things in a temporary directory inside the results directory
    cfg_dict["result_dir"] = joinpaths(results_dir, "compose")
    os.makedirs(cfg_dict["result_dir"])

    install_cfg = DataHolder(**cfg_dict)

    # Some kludges for the 99-copy-logs %post, failure in it will crash the build
    for f in ["/tmp/NOSAVE_INPUT_KS", "/tmp/NOSAVE_LOGS"]:
        open(f, "w")

    # Placing a CANCEL file in the results directory will make execWithRedirect send anaconda a SIGTERM
    def cancel_build():
        return os.path.exists(joinpaths(results_dir, "CANCEL"))

    log.debug("cfg  = %s", install_cfg)
    try:
        test_path = joinpaths(results_dir, "TEST")
        write_timestamp(results_dir, TS_STARTED)
        if os.path.exists(test_path):
            # Pretend to run the compose
            time.sleep(5)
            try:
                test_mode = int(open(test_path, "r").read())
            except Exception:
                test_mode = 1
            if test_mode == 1:
                raise RuntimeError("TESTING FAILED compose")
            else:
                open(joinpaths(results_dir, install_cfg.image_name),
                     "w").write("TEST IMAGE")
        else:
            run_creator(install_cfg, cancel_func=cancel_build)

            # Extract the results of the compose into results_dir and cleanup the compose directory
            move_compose_results(install_cfg, results_dir)
    finally:
        # Make sure any remaining temporary directories are removed (eg. if there was an exception)
        for d in glob(joinpaths(cfg.tmp, "lmc-*")):
            if os.path.isdir(d):
                shutil.rmtree(d)
            elif os.path.isfile(d):
                os.unlink(d)

        # Make sure that everything under the results directory is owned by the user
        user = pwd.getpwuid(cfg.uid).pw_name
        group = grp.getgrgid(cfg.gid).gr_name
        log.debug("Install finished, chowning results to %s:%s", user, group)
        subprocess.call(["chown", "-R", "%s:%s" % (user, group), results_dir])