Ejemplo n.º 1
0
def download_dependencies(prefer_offline=False, redownload=False):
    print("\nModules:")
    counter = 1
    definition = get_definition()
    max_length = longest_module_name()
    for module in definition["build"]:
        name = module["name"]
        if name.startswith("./"):
            local_module_copy(module, counter, max_length)
            counter += 1
            continue
        commit = module["commit"]
        url = strip_right(module["repo"], ".git")
        commit_dir = get_download_path(module)
        if redownload:
            rm(commit_dir, missing_ok=True)
        if not os.path.exists(commit_dir):
            sh(f"git clone {url} {commit_dir}")
            sh(f"(cd {commit_dir} && git checkout {commit})")
        target = f"out/steps/{counter:03d}_{module['name']}_{commit}/"
        module["_directory"] = target
        module["_counter"] = counter
        subdirectory = module.get("subdirectory", None)
        if not subdirectory:
            cp(commit_dir, target)
        else:
            cp(os.path.join(commit_dir, subdirectory), target)
        print(
            f"{counter:03d} {pad_right(name, max_length)} @ {commit} (Downloaded)"
        )
        counter += 1
Ejemplo n.º 2
0
def _download_dependencies(config, prefer_offline=False, redownload=False):
    # TODO: This function should be split in 2:
    #       1. Code for downloading things into ~/.cfengine
    #       2. Code for copying things into ./out
    print("\nModules:")
    counter = 1
    max_length = config.longest_module_name()
    downloads = os.path.join(cfbs_dir(), "downloads")
    for module in config["build"]:
        name = module["name"]
        if name.startswith("./"):
            local_module_copy(module, counter, max_length)
            counter += 1
            continue
        if "commit" not in module:
            user_error("module %s must have a commit property" % name)
        commit = module["commit"]
        if not is_a_commit_hash(commit):
            user_error("'%s' is not a commit reference" % commit)

        url = module.get("url") or module["repo"]
        url = strip_right(url, ".git")
        commit_dir = get_download_path(module)
        if redownload:
            rm(commit_dir, missing_ok=True)
        if "subdirectory" in module:
            module_dir = os.path.join(commit_dir, module["subdirectory"])
        else:
            module_dir = commit_dir
        if not os.path.exists(module_dir):
            if url.endswith(SUPPORTED_ARCHIVES):
                fetch_archive(url, commit)
            elif "index" in module:
                sh("git clone %s %s" % (url, commit_dir))
                sh("(cd %s && git checkout %s)" % (commit_dir, commit))
            else:
                versions = get_json(_VERSION_INDEX)
                try:
                    checksum = versions[name][
                        module["version"]]["archive_sha256"]
                except KeyError:
                    user_error("Cannot verify checksum of the '%s' module" %
                               name)
                module_archive_url = os.path.join(_MODULES_URL, name,
                                                  commit + ".tar.gz")
                fetch_archive(module_archive_url,
                              checksum,
                              directory=commit_dir,
                              with_index=False)
        target = "out/steps/%03d_%s_%s/" % (counter, module["name"], commit)
        module["_directory"] = target
        module["_counter"] = counter
        subdirectory = module.get("subdirectory", None)
        if not subdirectory:
            cp(commit_dir, target)
        else:
            cp(os.path.join(commit_dir, subdirectory), target)
        print("%03d %s @ %s (Downloaded)" %
              (counter, pad_right(name, max_length), commit))
        counter += 1
Ejemplo n.º 3
0
def build_step(module, step, max_length):
    step = step.split(" ")
    operation, args = step[0], step[1:]
    source = module["_directory"]
    counter = module["_counter"]
    destination = "out/masterfiles"

    prefix = f"{counter:03d} {pad_right(module['name'], max_length)} :"

    if operation == "copy":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print(f"{prefix} copy '{src}' 'masterfiles/{dst}'")
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        cp(src, dst)
    elif operation == "run":
        shell_command = " ".join(args)
        print(f"{prefix} run '{shell_command}'")
        sh(shell_command, source)
    elif operation == "delete":
        files = [args] if type(args) is str else args
        assert len(files) > 0
        as_string = " ".join([f"'{f}'" for f in files])
        print(f"{prefix} delete {as_string}")
        for file in files:
            rm(os.path.join(source, file))
    elif operation == "json":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print(f"{prefix} json '{src}' 'masterfiles/{dst}'")
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        extras, original = read_json(src), read_json(dst)
        assert extras is not None
        if not extras:
            print(
                f"Warning: '{os.path.basename(src)}' looks empty, adding nothing"
            )
        if original:
            merged = merge_json(original, extras)
        else:
            merged = extras
        write_json(dst, merged)
    elif operation == "append":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print(f"{prefix} append '{src}' 'masterfiles/{dst}'")
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        if not os.path.exists(dst):
            touch(dst)
        assert os.path.isfile(dst)
        sh(f"cat '{src}' >> '{dst}'")
    else:
        user_error(f"Unknown build step operation: {operation}")
Ejemplo n.º 4
0
def install_command(destination=None) -> int:
    if not os.path.exists("out/masterfiles"):
        r = build_command()
        if r != 0:
            return r

    if not destination:
        destination = "/var/cfengine/masterfiles"
    rm(destination, missing_ok=True)
    cp("out/masterfiles", destination)
    return 0
Ejemplo n.º 5
0
def install_command(args) -> int:
    if len(args) > 1:
        user_error(
            "Only one destination is allowed for command: cfbs install [destination]"
        )
    if not os.path.exists("out/masterfiles"):
        r = build_command()
        if r != 0:
            return r

    if os.getuid() == 0:
        destination = "/var/cfengine/masterfiles"
    if len(args) > 0:
        destination = args[0]
    elif os.getuid() == 0:
        destination = "/var/cfengine/masterfiles"
    else:
        destination = os.path.join(os.environ["HOME"], ".cfagent/inputs")
    if not destination.startswith("/") and not destination.startswith("./"):
        destination = "./" + destination
    rm(destination, missing_ok=True)
    cp("out/masterfiles", destination)
    print("Installed to %s" % destination)
    return 0
Ejemplo n.º 6
0
def init_build_folder():
    rm("out", missing_ok=True)
    mkdir("out")
    mkdir("out/masterfiles")
    mkdir("out/steps")
Ejemplo n.º 7
0
def fetch_archive(url, checksum=None, directory=None, with_index=True):
    assert url.endswith(SUPPORTED_ARCHIVES)

    url_path = url[url.index("://") + 3:]
    archive_dirname = os.path.dirname(url_path)
    archive_filename = os.path.basename(url_path)

    for ext in SUPPORTED_ARCHIVES:
        if archive_filename.endswith(ext):
            archive_type = ext
            break
    else:
        user_error("Unsupported archive type: '%s'" % url)

    archive_name = strip_right(archive_filename, archive_type)
    downloads = os.path.join(cfbs_dir(), "downloads")

    archive_dir = os.path.join(downloads, archive_dirname)
    mkdir(archive_dir)

    archive_path = os.path.join(downloads, archive_dir, archive_filename)
    try:
        archive_checksum = fetch_url(url, archive_path, checksum)
    except FetchError as e:
        user_error(str(e))

    content_dir = os.path.join(downloads, archive_dir, archive_checksum)
    index_path = os.path.join(content_dir, "cfbs.json")
    if with_index and os.path.exists(index_path):
        # available already
        return (index_path, archive_checksum)
    else:
        mkdir(content_dir)

    # TODO: use Python modules instead of CLI tools?
    if archive_type.startswith(_SUPPORTED_TAR_TYPES):
        if shutil.which("tar"):
            sh("cd %s; tar -xf %s" % (content_dir, archive_path))
        else:
            user_error("Working with .tar archives requires the 'tar' utility")
    elif archive_type == (".zip"):
        if shutil.which("unzip"):
            sh("cd %s; unzip %s" % (content_dir, archive_path))
        else:
            user_error(
                "Working with .zip archives requires the 'unzip' utility")
    else:
        raise RuntimeError(
            "Unhandled archive type: '%s'. Please report this at %s." %
            (url, "https://github.com/cfengine/cfbs/issues"))

    os.unlink(archive_path)

    content_root_items = [
        os.path.join(content_dir, item) for item in os.listdir(content_dir)
    ]
    if (with_index and len(content_root_items) == 1
            and os.path.isdir(content_root_items[0]) and os.path.exists(
                os.path.join(content_root_items[0], "cfbs.json"))):
        # the archive contains a top-level folder, let's just move things one
        # level up from inside it
        sh("mv %s %s" %
           (os.path.join(content_root_items[0], "*"), content_dir))
        shutil.rmtree(content_root_items[0])

    if with_index:
        if os.path.exists(index_path):
            return (index_path, archive_checksum)
        else:
            user_error(
                "Archive '%s' doesn't contain a valid cfbs.json index file" %
                url)
    else:
        if directory is not None:
            directory = directory.rstrip("/")
            mkdir(os.path.dirname(directory))
            sh("rsync -a %s/ %s/" % (content_dir, directory))
            rm(content_dir)
            return (directory, archive_checksum)
        return (content_dir, archive_checksum)
Ejemplo n.º 8
0
def _perform_build_step(module, step, max_length):
    step = step.split(" ")
    operation, args = step[0], step[1:]
    source = module["_directory"]
    counter = module["_counter"]
    destination = "out/masterfiles"

    prefix = "%03d %s :" % (counter, pad_right(module["name"], max_length))

    if operation == "copy":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print("%s copy '%s' 'masterfiles/%s'" % (prefix, src, dst))
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        cp(src, dst)
    elif operation == "run":
        shell_command = " ".join(args)
        print("%s run '%s'" % (prefix, shell_command))
        sh(shell_command, source)
    elif operation == "delete":
        files = [args] if type(args) is str else args
        assert len(files) > 0
        as_string = " ".join(["'%s'" % f for f in files])
        print("%s delete %s" % (prefix, as_string))
        for file in files:
            rm(os.path.join(source, file))
    elif operation == "json":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print("%s json '%s' 'masterfiles/%s'" % (prefix, src, dst))
        if not os.path.isfile(os.path.join(source, src)):
            user_error("'%s' is not a file" % src)
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        extras, original = read_json(src), read_json(dst)
        if not extras:
            print("Warning: '%s' looks empty, adding nothing" %
                  os.path.basename(src))
        if original:
            merged = merge_json(original, extras)
        else:
            merged = extras
        write_json(dst, merged)
    elif operation == "append":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print("%s append '%s' 'masterfiles/%s'" % (prefix, src, dst))
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        if not os.path.exists(dst):
            touch(dst)
        assert os.path.isfile(dst)
        sh("cat '%s' >> '%s'" % (src, dst))
    elif operation == "directory":
        src, dst = args
        if dst in [".", "./"]:
            dst = ""
        print("{} directory '{}' 'masterfiles/{}'".format(prefix, src, dst))
        dstarg = dst  # save this for adding .cf files to inputs
        src, dst = os.path.join(source, src), os.path.join(destination, dst)
        defjson = os.path.join(destination, "def.json")
        merged = read_json(defjson)
        if not merged:
            merged = {}
        if "classes" not in merged:
            merged["classes"] = {}
        if "services_autorun_bundles" not in merged["classes"]:
            merged["classes"]["services_autorun_bundles"] = ["any"]
        inputs = []
        for root, dirs, files in os.walk(src):
            for f in files:
                if f.endswith(".cf"):
                    inputs.append(os.path.join(dstarg, f))
                    cp(os.path.join(root, f),
                       os.path.join(destination, dstarg, f))
                elif f == "def.json":
                    extra = read_json(os.path.join(root, f))
                    if extra:
                        merged = merge_json(merged, extra)
                else:
                    cp(os.path.join(root, f),
                       os.path.join(destination, dstarg, f))
        if "inputs" in merged:
            merged["inputs"].extend(inputs)
        else:
            merged["inputs"] = inputs
        write_json(defjson, merged)
    else:
        user_error("Unknown build step operation: %s" % operation)