def download_dependencies(prefer_offline=False, redownload=False): print("\nModules:") counter = 1 definition = get_definition() max_length = longest_module_name() for module in definition["build"]: name = module["name"] if name.startswith("./"): local_module_copy(module, counter, max_length) counter += 1 continue commit = module["commit"] url = strip_right(module["repo"], ".git") commit_dir = get_download_path(module) if redownload: rm(commit_dir, missing_ok=True) if not os.path.exists(commit_dir): sh(f"git clone {url} {commit_dir}") sh(f"(cd {commit_dir} && git checkout {commit})") target = f"out/steps/{counter:03d}_{module['name']}_{commit}/" module["_directory"] = target module["_counter"] = counter subdirectory = module.get("subdirectory", None) if not subdirectory: cp(commit_dir, target) else: cp(os.path.join(commit_dir, subdirectory), target) print( f"{counter:03d} {pad_right(name, max_length)} @ {commit} (Downloaded)" ) counter += 1
def _download_dependencies(config, prefer_offline=False, redownload=False): # TODO: This function should be split in 2: # 1. Code for downloading things into ~/.cfengine # 2. Code for copying things into ./out print("\nModules:") counter = 1 max_length = config.longest_module_name() downloads = os.path.join(cfbs_dir(), "downloads") for module in config["build"]: name = module["name"] if name.startswith("./"): local_module_copy(module, counter, max_length) counter += 1 continue if "commit" not in module: user_error("module %s must have a commit property" % name) commit = module["commit"] if not is_a_commit_hash(commit): user_error("'%s' is not a commit reference" % commit) url = module.get("url") or module["repo"] url = strip_right(url, ".git") commit_dir = get_download_path(module) if redownload: rm(commit_dir, missing_ok=True) if "subdirectory" in module: module_dir = os.path.join(commit_dir, module["subdirectory"]) else: module_dir = commit_dir if not os.path.exists(module_dir): if url.endswith(SUPPORTED_ARCHIVES): fetch_archive(url, commit) elif "index" in module: sh("git clone %s %s" % (url, commit_dir)) sh("(cd %s && git checkout %s)" % (commit_dir, commit)) else: versions = get_json(_VERSION_INDEX) try: checksum = versions[name][ module["version"]]["archive_sha256"] except KeyError: user_error("Cannot verify checksum of the '%s' module" % name) module_archive_url = os.path.join(_MODULES_URL, name, commit + ".tar.gz") fetch_archive(module_archive_url, checksum, directory=commit_dir, with_index=False) target = "out/steps/%03d_%s_%s/" % (counter, module["name"], commit) module["_directory"] = target module["_counter"] = counter subdirectory = module.get("subdirectory", None) if not subdirectory: cp(commit_dir, target) else: cp(os.path.join(commit_dir, subdirectory), target) print("%03d %s @ %s (Downloaded)" % (counter, pad_right(name, max_length), commit)) counter += 1
def get_download_path(module) -> str: downloads = os.path.join(cfbs_dir(), "downloads") github = os.path.join(downloads, "github.com") commit = module["commit"] url = module["repo"] url = strip_right(url, ".git") assert url.startswith("https://github.com/") user_repo = strip_left(url, "https://github.com/") user, repo = user_repo.split("/") repo_dir = os.path.join(github, user, repo) mkdir(repo_dir) return os.path.join(repo_dir, commit)
def get_download_path(module) -> str: downloads = os.path.join(cfbs_dir(), "downloads") commit = module["commit"] if not is_a_commit_hash(commit): user_error("'%s' is not a commit reference" % commit) url = module.get("url") or module["repo"] if url.endswith(SUPPORTED_ARCHIVES): url = os.path.dirname(url) else: url = strip_right(url, ".git") repo = url[url.index("://") + 3:] repo_dir = os.path.join(downloads, repo) mkdir(repo_dir) return os.path.join(repo_dir, commit)
def _get_path_from_url(url): if not url.startswith(("https://", "ssh://", "git://")): if "://" in url: return user_error("Unsupported URL protocol in '%s'" % url) else: # It's a path already, just remove trailing slashes (if any). return url.rstrip("/") path = None if url.startswith("ssh://"): match = re.match(r"ssh://(\w+)@(.+)", url) if match is not None: path = match[2] path = path or url[url.index("://") + 3:] path = strip_right(path, ".git") path = path.rstrip("/") return path
def fetch_archive(url, checksum=None, directory=None, with_index=True): assert url.endswith(SUPPORTED_ARCHIVES) url_path = url[url.index("://") + 3:] archive_dirname = os.path.dirname(url_path) archive_filename = os.path.basename(url_path) for ext in SUPPORTED_ARCHIVES: if archive_filename.endswith(ext): archive_type = ext break else: user_error("Unsupported archive type: '%s'" % url) archive_name = strip_right(archive_filename, archive_type) downloads = os.path.join(cfbs_dir(), "downloads") archive_dir = os.path.join(downloads, archive_dirname) mkdir(archive_dir) archive_path = os.path.join(downloads, archive_dir, archive_filename) try: archive_checksum = fetch_url(url, archive_path, checksum) except FetchError as e: user_error(str(e)) content_dir = os.path.join(downloads, archive_dir, archive_checksum) index_path = os.path.join(content_dir, "cfbs.json") if with_index and os.path.exists(index_path): # available already return (index_path, archive_checksum) else: mkdir(content_dir) # TODO: use Python modules instead of CLI tools? if archive_type.startswith(_SUPPORTED_TAR_TYPES): if shutil.which("tar"): sh("cd %s; tar -xf %s" % (content_dir, archive_path)) else: user_error("Working with .tar archives requires the 'tar' utility") elif archive_type == (".zip"): if shutil.which("unzip"): sh("cd %s; unzip %s" % (content_dir, archive_path)) else: user_error( "Working with .zip archives requires the 'unzip' utility") else: raise RuntimeError( "Unhandled archive type: '%s'. Please report this at %s." % (url, "https://github.com/cfengine/cfbs/issues")) os.unlink(archive_path) content_root_items = [ os.path.join(content_dir, item) for item in os.listdir(content_dir) ] if (with_index and len(content_root_items) == 1 and os.path.isdir(content_root_items[0]) and os.path.exists( os.path.join(content_root_items[0], "cfbs.json"))): # the archive contains a top-level folder, let's just move things one # level up from inside it sh("mv %s %s" % (os.path.join(content_root_items[0], "*"), content_dir)) shutil.rmtree(content_root_items[0]) if with_index: if os.path.exists(index_path): return (index_path, archive_checksum) else: user_error( "Archive '%s' doesn't contain a valid cfbs.json index file" % url) else: if directory is not None: directory = directory.rstrip("/") mkdir(os.path.dirname(directory)) sh("rsync -a %s/ %s/" % (content_dir, directory)) rm(content_dir) return (directory, archive_checksum) return (content_dir, archive_checksum)