def test_compile_helm_input(self): """compile targets with helm inputs in parallel""" temp_main = tempfile.mkdtemp() sys.argv = [ "kapitan", "compile", "--output-path", temp_main, "-t", "nginx-ingress", "nginx-ingress-helm-params", "acs-engine-autoscaler", ] with contextlib.redirect_stdout(io.StringIO()): main() main_hash = directory_hash(temp_main + "/compiled") temp_bin = tempfile.mkdtemp() subprocess.run( [ "../../" + BINARY_PATH, "compile", "--output-path", temp_bin, "-t", "nginx-ingress", "nginx-ingress-helm-params", "acs-engine-autoscaler", ], stdout=subprocess.DEVNULL, ) bin_hash = directory_hash(temp_bin + "/compiled") self.assertEqual(bin_hash, main_hash)
def test_compile(self): sys.argv = ["kapitan", "compile"] main() compiled_dir_hash = directory_hash(os.getcwd() + '/compiled') test_compiled_dir_hash = directory_hash( os.getcwd() + '/../../tests/test_docker_compiled') self.assertEqual(compiled_dir_hash, test_compiled_dir_hash)
def test_compile(self): sys.argv = ["kapitan", "compile"] main() compiled_dir_hash = directory_hash(os.getcwd() + "/compiled") test_compiled_dir_hash = directory_hash( os.getcwd() + "/../../tests/test_terraform_compiled") self.assertEqual(compiled_dir_hash, test_compiled_dir_hash)
def test_compiled_copy_all_targets(self): sys.argv = ["kapitan", "compile"] main() file_path_hash = directory_hash(os.path.join("components", "busybox")) compile_path_hash = directory_hash( os.path.join("compiled", "busybox", "copy")) self.assertEqual(file_path_hash, compile_path_hash)
def test_compile(self): sys.argv = ["kapitan", "compile", "-c"] main() os.remove('./compiled/.kapitan_cache') compiled_dir_hash = directory_hash(os.getcwd() + '/compiled') test_compiled_dir_hash = directory_hash( os.getcwd() + '/../../tests/test_kubernetes_compiled') self.assertEqual(compiled_dir_hash, test_compiled_dir_hash)
def generate_inv_cache_hashes(inventory_path, targets, cache_paths): """ generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes struct: { inventory: <target>: classes: <sha256> parameters: <sha256> folder: components: <sha256> docs: <sha256> lib: <sha256> scripts: <sha256> ... } """ inv = inventory_reclass(inventory_path) cached.inv_cache = {} cached.inv_cache['inventory'] = {} cached.inv_cache['folder'] = {} if targets: for target in targets: try: cached.inv_cache['inventory'][target] = {} cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes']) cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters']) except KeyError as e: logger.error("'%s' target not found", target) raise else: for target in inv['nodes']: cached.inv_cache['inventory'][target] = {} cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes']) cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters']) compile_obj = inv['nodes'][target]['parameters']['kapitan']['compile'] for obj in compile_obj: for input_path in obj['input_paths']: base_folder = os.path.dirname(input_path).split('/')[0] if base_folder == '': base_folder = os.path.basename(input_path).split('/')[0] if base_folder not in cached.inv_cache['folder'].keys(): if os.path.exists(base_folder) and os.path.isdir(base_folder): cached.inv_cache['folder'][base_folder] = directory_hash(base_folder) # Cache additional folders set by --cache-paths for path in cache_paths: if path not in cached.inv_cache['folder'].keys(): if os.path.exists(path) and os.path.isdir(path): cached.inv_cache['folder'][path] = directory_hash(path) # Most commonly changed but not referenced in input_paths for common in ('lib', 'vendor', 'secrets'): if common not in cached.inv_cache['folder'].keys(): if os.path.exists(common) and os.path.isdir(common): cached.inv_cache['folder'][common] = directory_hash(common)
def test_compile(self): sys.argv = ["kapitan", "compile", "-c"] main() # Compile again to verify caching works as expected main() os.remove("./compiled/.kapitan_cache") compiled_dir_hash = directory_hash(os.getcwd() + "/compiled") test_compiled_dir_hash = directory_hash(os.getcwd() + "/../../tests/test_kubernetes_compiled") self.assertEqual(compiled_dir_hash, test_compiled_dir_hash)
def test_cli_kubenetes_compile(self): """ run $kapitan compile """ compile_path = tempfile.mkdtemp() argv = ["kapitan", "compile"] sys.argv = argv with contextlib.redirect_stdout(io.StringIO()): main() argv[0] = "../../" + BINARY_PATH argv.extend(["--output-path", compile_path]) subprocess.run(argv, stdout=subprocess.DEVNULL) main_compiled_dir_hash = directory_hash(os.getcwd() + "/compiled") binary_compiled_dir_hash = directory_hash(compile_path + "/compiled") self.assertEqual(main_compiled_dir_hash, binary_compiled_dir_hash)
def test_cli_terraform_compile(self): """ run $kapitan compile """ sys.argv = ["kapitan", "compile"] main() compiled_dir_hash = directory_hash(os.getcwd() + '/compiled') test_compiled_dir_hash = directory_hash( os.getcwd() + '/../../tests/test_terraform_compiled') self.assertEqual(compiled_dir_hash, test_compiled_dir_hash) compile_path = tempfile.mkdtemp() argv = ["kapitan", "compile"] sys.argv = argv with contextlib.redirect_stdout(io.StringIO()): main() argv[0] = '../../' + BINARY_PATH argv.extend(["--output-path", compile_path]) subprocess.run(argv, stdout=subprocess.DEVNULL) main_compiled_dir_hash = directory_hash(os.getcwd() + '/compiled') binary_compiled_dir_hash = directory_hash(compile_path + '/compiled') self.assertEqual(main_compiled_dir_hash, binary_compiled_dir_hash)
def generate_inv_cache_hashes(inventory_path, targets, cache_paths): """ generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes struct: { inventory: <target>: classes: <sha256> parameters: <sha256> folder: components: <sha256> docs: <sha256> lib: <sha256> scripts: <sha256> ... } """ inv = inventory_reclass(inventory_path) cached.inv_cache = {} cached.inv_cache["inventory"] = {} cached.inv_cache["folder"] = {} if targets: for target in targets: try: cached.inv_cache["inventory"][target] = {} cached.inv_cache["inventory"][target][ "classes"] = dictionary_hash( inv["nodes"][target]["classes"]) cached.inv_cache["inventory"][target][ "parameters"] = dictionary_hash( inv["nodes"][target]["parameters"]) except KeyError: raise CompileError("target not found: {}".format(target)) else: for target in inv["nodes"]: cached.inv_cache["inventory"][target] = {} cached.inv_cache["inventory"][target]["classes"] = dictionary_hash( inv["nodes"][target]["classes"]) cached.inv_cache["inventory"][target][ "parameters"] = dictionary_hash( inv["nodes"][target]["parameters"]) compile_obj = inv["nodes"][target]["parameters"]["kapitan"][ "compile"] for obj in compile_obj: for input_path in obj["input_paths"]: base_folder = os.path.dirname(input_path).split("/")[0] if base_folder == "": base_folder = os.path.basename(input_path).split( "/")[0] if base_folder not in cached.inv_cache["folder"].keys(): if os.path.exists(base_folder) and os.path.isdir( base_folder): cached.inv_cache["folder"][ base_folder] = directory_hash(base_folder) # Cache additional folders set by --cache-paths for path in cache_paths: if path not in cached.inv_cache["folder"].keys(): if os.path.exists(path) and os.path.isdir(path): cached.inv_cache["folder"][path] = directory_hash( path) # Most commonly changed but not referenced in input_paths for common in ("lib", "vendor", "secrets"): if common not in cached.inv_cache["folder"].keys(): if os.path.exists(common) and os.path.isdir(common): cached.inv_cache["folder"][common] = directory_hash(common)
def test_copy_folder_folder(self): test_dirs_bootstrap_helper() self.copy_compiler.compile_file(file_path, compile_path, None) file_path_hash = directory_hash(file_path) compile_path_hash = directory_hash(compile_path) self.assertEqual(file_path_hash, compile_path_hash)