def _download_dependencies(config, prefer_offline=False, redownload=False): # TODO: This function should be split in 2: # 1. Code for downloading things into ~/.cfengine # 2. Code for copying things into ./out print("\nModules:") counter = 1 max_length = config.longest_module_name() downloads = os.path.join(cfbs_dir(), "downloads") for module in config["build"]: name = module["name"] if name.startswith("./"): local_module_copy(module, counter, max_length) counter += 1 continue if "commit" not in module: user_error("module %s must have a commit property" % name) commit = module["commit"] if not is_a_commit_hash(commit): user_error("'%s' is not a commit reference" % commit) url = module.get("url") or module["repo"] url = strip_right(url, ".git") commit_dir = get_download_path(module) if redownload: rm(commit_dir, missing_ok=True) if "subdirectory" in module: module_dir = os.path.join(commit_dir, module["subdirectory"]) else: module_dir = commit_dir if not os.path.exists(module_dir): if url.endswith(SUPPORTED_ARCHIVES): fetch_archive(url, commit) elif "index" in module: sh("git clone %s %s" % (url, commit_dir)) sh("(cd %s && git checkout %s)" % (commit_dir, commit)) else: versions = get_json(_VERSION_INDEX) try: checksum = versions[name][ module["version"]]["archive_sha256"] except KeyError: user_error("Cannot verify checksum of the '%s' module" % name) module_archive_url = os.path.join(_MODULES_URL, name, commit + ".tar.gz") fetch_archive(module_archive_url, checksum, directory=commit_dir, with_index=False) target = "out/steps/%03d_%s_%s/" % (counter, module["name"], commit) module["_directory"] = target module["_counter"] = counter subdirectory = module.get("subdirectory", None) if not subdirectory: cp(commit_dir, target) else: cp(os.path.join(commit_dir, subdirectory), target) print("%03d %s @ %s (Downloaded)" % (counter, pad_right(name, max_length), commit)) counter += 1
def download_dependencies(prefer_offline=False, redownload=False): print("\nModules:") counter = 1 definition = get_definition() max_length = longest_module_name() for module in definition["build"]: name = module["name"] if name.startswith("./"): local_module_copy(module, counter, max_length) counter += 1 continue commit = module["commit"] url = strip_right(module["repo"], ".git") commit_dir = get_download_path(module) if redownload: rm(commit_dir, missing_ok=True) if not os.path.exists(commit_dir): sh(f"git clone {url} {commit_dir}") sh(f"(cd {commit_dir} && git checkout {commit})") target = f"out/steps/{counter:03d}_{module['name']}_{commit}/" module["_directory"] = target module["_counter"] = counter subdirectory = module.get("subdirectory", None) if not subdirectory: cp(commit_dir, target) else: cp(os.path.join(commit_dir, subdirectory), target) print( f"{counter:03d} {pad_right(name, max_length)} @ {commit} (Downloaded)" ) counter += 1
def build_step(module, step, max_length): step = step.split(" ") operation, args = step[0], step[1:] source = module["_directory"] counter = module["_counter"] destination = "out/masterfiles" prefix = f"{counter:03d} {pad_right(module['name'], max_length)} :" if operation == "copy": src, dst = args if dst in [".", "./"]: dst = "" print(f"{prefix} copy '{src}' 'masterfiles/{dst}'") src, dst = os.path.join(source, src), os.path.join(destination, dst) cp(src, dst) elif operation == "run": shell_command = " ".join(args) print(f"{prefix} run '{shell_command}'") sh(shell_command, source) elif operation == "delete": files = [args] if type(args) is str else args assert len(files) > 0 as_string = " ".join([f"'{f}'" for f in files]) print(f"{prefix} delete {as_string}") for file in files: rm(os.path.join(source, file)) elif operation == "json": src, dst = args if dst in [".", "./"]: dst = "" print(f"{prefix} json '{src}' 'masterfiles/{dst}'") src, dst = os.path.join(source, src), os.path.join(destination, dst) extras, original = read_json(src), read_json(dst) assert extras is not None if not extras: print( f"Warning: '{os.path.basename(src)}' looks empty, adding nothing" ) if original: merged = merge_json(original, extras) else: merged = extras write_json(dst, merged) elif operation == "append": src, dst = args if dst in [".", "./"]: dst = "" print(f"{prefix} append '{src}' 'masterfiles/{dst}'") src, dst = os.path.join(source, src), os.path.join(destination, dst) if not os.path.exists(dst): touch(dst) assert os.path.isfile(dst) sh(f"cat '{src}' >> '{dst}'") else: user_error(f"Unknown build step operation: {operation}")
def perform_build_steps(config) -> int: print("\nSteps:") module_name_length = config.longest_module_name() for module in config["build"]: for step in module["steps"]: _perform_build_step(module, step, module_name_length) if os.path.isfile("out/masterfiles/def.json"): pretty_file("out/masterfiles/def.json") print("") print("Generating tarball...") sh("( cd out/ && tar -czf masterfiles.tgz masterfiles )") print("\nBuild complete, ready to deploy 🐿") print(" -> Directory: out/masterfiles") print(" -> Tarball: out/masterfiles.tgz") print("") print("To install on this machine: sudo cfbs install") print("To deploy on remote hub(s): cf-remote deploy") return 0
def clone_url_repo(repo_url): assert repo_url.startswith(("https://", "ssh://", "git://")) commit = None if "@" in repo_url and (repo_url.rindex("@") > repo_url.rindex(".")): # commit specified in the url repo_url, commit = repo_url.rsplit("@", 1) if not is_a_commit_hash(commit): user_error("'%s' is not a commit reference" % commit) downloads = os.path.join(cfbs_dir(), "downloads") repo_path = _get_path_from_url(repo_url) repo_dir = os.path.join(downloads, repo_path) os.makedirs(repo_dir, exist_ok=True) if commit is not None: commit_path = os.path.join(repo_dir, commit) _clone_and_checkout(repo_url, commit_path, commit) else: master_path = os.path.join(repo_dir, "master") sh("git clone %s %s" % (repo_url, master_path)) commit = _get_git_repo_commit_sha(master_path) commit_path = os.path.join(repo_dir, commit) if os.path.exists(commit_path): # Already cloned in the commit dir, just remove the 'master' clone sh("rm -rf %s" % master_path) else: sh("mv %s %s" % (master_path, commit_path)) json_path = os.path.join(commit_path, "cfbs.json") if os.path.exists(json_path): return (json_path, commit) else: user_error( "Repository '%s' doesn't contain a valid cfbs.json index file" % repo_url)
def fetch_archive(url, checksum=None, directory=None, with_index=True): assert url.endswith(SUPPORTED_ARCHIVES) url_path = url[url.index("://") + 3:] archive_dirname = os.path.dirname(url_path) archive_filename = os.path.basename(url_path) for ext in SUPPORTED_ARCHIVES: if archive_filename.endswith(ext): archive_type = ext break else: user_error("Unsupported archive type: '%s'" % url) archive_name = strip_right(archive_filename, archive_type) downloads = os.path.join(cfbs_dir(), "downloads") archive_dir = os.path.join(downloads, archive_dirname) mkdir(archive_dir) archive_path = os.path.join(downloads, archive_dir, archive_filename) try: archive_checksum = fetch_url(url, archive_path, checksum) except FetchError as e: user_error(str(e)) content_dir = os.path.join(downloads, archive_dir, archive_checksum) index_path = os.path.join(content_dir, "cfbs.json") if with_index and os.path.exists(index_path): # available already return (index_path, archive_checksum) else: mkdir(content_dir) # TODO: use Python modules instead of CLI tools? if archive_type.startswith(_SUPPORTED_TAR_TYPES): if shutil.which("tar"): sh("cd %s; tar -xf %s" % (content_dir, archive_path)) else: user_error("Working with .tar archives requires the 'tar' utility") elif archive_type == (".zip"): if shutil.which("unzip"): sh("cd %s; unzip %s" % (content_dir, archive_path)) else: user_error( "Working with .zip archives requires the 'unzip' utility") else: raise RuntimeError( "Unhandled archive type: '%s'. Please report this at %s." % (url, "https://github.com/cfengine/cfbs/issues")) os.unlink(archive_path) content_root_items = [ os.path.join(content_dir, item) for item in os.listdir(content_dir) ] if (with_index and len(content_root_items) == 1 and os.path.isdir(content_root_items[0]) and os.path.exists( os.path.join(content_root_items[0], "cfbs.json"))): # the archive contains a top-level folder, let's just move things one # level up from inside it sh("mv %s %s" % (os.path.join(content_root_items[0], "*"), content_dir)) shutil.rmtree(content_root_items[0]) if with_index: if os.path.exists(index_path): return (index_path, archive_checksum) else: user_error( "Archive '%s' doesn't contain a valid cfbs.json index file" % url) else: if directory is not None: directory = directory.rstrip("/") mkdir(os.path.dirname(directory)) sh("rsync -a %s/ %s/" % (content_dir, directory)) rm(content_dir) return (directory, archive_checksum) return (content_dir, archive_checksum)
def _clone_and_checkout(url, path, commit): # NOTE: If any of these shell (git) commands fail, we will exit if not os.path.exists(os.path.join(path, ".git")): sh("git clone --no-checkout %s %s" % (url, path)) sh("git checkout " + commit, directory=path)
def _perform_build_step(module, step, max_length): step = step.split(" ") operation, args = step[0], step[1:] source = module["_directory"] counter = module["_counter"] destination = "out/masterfiles" prefix = "%03d %s :" % (counter, pad_right(module["name"], max_length)) if operation == "copy": src, dst = args if dst in [".", "./"]: dst = "" print("%s copy '%s' 'masterfiles/%s'" % (prefix, src, dst)) src, dst = os.path.join(source, src), os.path.join(destination, dst) cp(src, dst) elif operation == "run": shell_command = " ".join(args) print("%s run '%s'" % (prefix, shell_command)) sh(shell_command, source) elif operation == "delete": files = [args] if type(args) is str else args assert len(files) > 0 as_string = " ".join(["'%s'" % f for f in files]) print("%s delete %s" % (prefix, as_string)) for file in files: rm(os.path.join(source, file)) elif operation == "json": src, dst = args if dst in [".", "./"]: dst = "" print("%s json '%s' 'masterfiles/%s'" % (prefix, src, dst)) if not os.path.isfile(os.path.join(source, src)): user_error("'%s' is not a file" % src) src, dst = os.path.join(source, src), os.path.join(destination, dst) extras, original = read_json(src), read_json(dst) if not extras: print("Warning: '%s' looks empty, adding nothing" % os.path.basename(src)) if original: merged = merge_json(original, extras) else: merged = extras write_json(dst, merged) elif operation == "append": src, dst = args if dst in [".", "./"]: dst = "" print("%s append '%s' 'masterfiles/%s'" % (prefix, src, dst)) src, dst = os.path.join(source, src), os.path.join(destination, dst) if not os.path.exists(dst): touch(dst) assert os.path.isfile(dst) sh("cat '%s' >> '%s'" % (src, dst)) elif operation == "directory": src, dst = args if dst in [".", "./"]: dst = "" print("{} directory '{}' 'masterfiles/{}'".format(prefix, src, dst)) dstarg = dst # save this for adding .cf files to inputs src, dst = os.path.join(source, src), os.path.join(destination, dst) defjson = os.path.join(destination, "def.json") merged = read_json(defjson) if not merged: merged = {} if "classes" not in merged: merged["classes"] = {} if "services_autorun_bundles" not in merged["classes"]: merged["classes"]["services_autorun_bundles"] = ["any"] inputs = [] for root, dirs, files in os.walk(src): for f in files: if f.endswith(".cf"): inputs.append(os.path.join(dstarg, f)) cp(os.path.join(root, f), os.path.join(destination, dstarg, f)) elif f == "def.json": extra = read_json(os.path.join(root, f)) if extra: merged = merge_json(merged, extra) else: cp(os.path.join(root, f), os.path.join(destination, dstarg, f)) if "inputs" in merged: merged["inputs"].extend(inputs) else: merged["inputs"] = inputs write_json(defjson, merged) else: user_error("Unknown build step operation: %s" % operation)