def fetch_helm_chart(dep_mapping, save_dir, force): """ downloads a helm chart and its subcharts from source then untars and moves it to save_dir """ source, deps = dep_mapping # to avoid collisions between source.chart_name/source.version path_hash = hashlib.sha256(source.repo.encode()).hexdigest()[:8] cached_repo_path = os.path.join( save_dir, path_hash, source.chart_name + "-" + (source.version or "latest")) if force or not exists_in_cache(cached_repo_path): fetch_helm_archive(source.helm_path, source.repo, source.chart_name, source.version, cached_repo_path) else: logger.debug("Using cached helm chart at %s", cached_repo_path) for dep in deps: output_path = dep["output_path"] parent_dir = os.path.dirname(output_path) if parent_dir != "": os.makedirs(parent_dir, exist_ok=True) if force: copy_tree(cached_repo_path, output_path) else: safe_copy_tree(cached_repo_path, output_path) logger.info("Dependency %s: saved to %s", source.chart_name, output_path)
def fetch_http_dependency(dep_mapping, save_dir, force, item_type="Dependency"): """ fetches a http[s] file at source and saves into save_dir, after which it is copied into the output_path stored in dep_mapping """ source, deps = dep_mapping # to avoid collisions between basename(source) path_hash = hashlib.sha256( os.path.dirname(source).encode()).hexdigest()[:8] cached_source_path = os.path.join(save_dir, path_hash + os.path.basename(source)) if not exists_in_cache(cached_source_path) or force: content_type = fetch_http_source(source, cached_source_path, item_type) else: logger.debug("Using cached %s %s", item_type, cached_source_path) content_type = MimeTypes().guess_type(cached_source_path)[0] for dep in deps: output_path = dep["output_path"] if dep.get("unpack", False): # ensure that the directory we are extracting to exists os.makedirs(output_path, exist_ok=True) if force: is_unpacked = unpack_downloaded_file(cached_source_path, output_path, content_type) else: unpack_output = os.path.join( save_dir, "extracted-" + str(multiprocessing.current_process().name)) os.makedirs(unpack_output) is_unpacked = unpack_downloaded_file(cached_source_path, unpack_output, content_type) safe_copy_tree(unpack_output, output_path) # delete unpack output rmtree(unpack_output) if is_unpacked: logger.info("%s %s: extracted to %s", item_type, source, output_path) else: logger.info( "%s %s: Content-Type %s is not supported for unpack. Ignoring save", item_type, source, content_type, ) else: # we are downloading a single file parent_dir = os.path.dirname(output_path) if parent_dir != "": os.makedirs(parent_dir, exist_ok=True) if force: copyfile(cached_source_path, output_path) else: safe_copy_file(cached_source_path, output_path) logger.info("%s %s: saved to %s", item_type, source, output_path)
def fetch_git_dependency(dep_mapping, save_dir, force, item_type="Dependency"): """ fetches a git repository at source into save_dir, and copy the repository into output_path stored in dep_mapping. ref is used to checkout if exists, fetches master branch by default. only subdir is copied into output_path if specified. """ source, deps = dep_mapping # to avoid collisions between basename(source) path_hash = hashlib.sha256( os.path.dirname(source).encode()).hexdigest()[:8] cached_repo_path = os.path.join(save_dir, path_hash + os.path.basename(source)) if not exists_in_cache(cached_repo_path) or force: fetch_git_source(source, cached_repo_path, item_type) else: logger.debug("Using cached {} {}".format(item_type, cached_repo_path)) for dep in deps: repo = Repo(cached_repo_path) output_path = dep["output_path"] copy_src_path = cached_repo_path if "ref" in dep: ref = dep["ref"] repo.git.checkout(ref) else: repo.git.checkout("master") # default ref if "subdir" in dep: sub_dir = dep["subdir"] full_subdir = os.path.join(cached_repo_path, sub_dir) if os.path.isdir(full_subdir): copy_src_path = full_subdir else: raise GitSubdirNotFoundError( "{} {}: subdir {} not found in repo".format( item_type, source, sub_dir)) if force: copy_tree(copy_src_path, output_path) else: safe_copy_tree(copy_src_path, output_path) logger.info("{} {}: saved to {}".format(item_type, source, output_path))