def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write file_path (jsonnet evaluated) items as files to compile_path. ext_vars will be passed as parameters to jsonnet_file() kwargs: output: default 'yaml', accepts 'json' prune: default False, accepts True reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled indent: default 2 """ def _search_imports(cwd, imp): return search_imports(cwd, imp, self.search_paths) json_output = jsonnet_file(file_path, import_callback=_search_imports, native_callbacks=resource_callbacks( self.search_paths), ext_vars=ext_vars) json_output = json.loads(json_output) output = kwargs.get('output', 'yaml') prune = kwargs.get('prune', False) reveal = kwargs.get('reveal', False) target_name = kwargs.get('target_name', None) indent = kwargs.get('indent', 2) if prune: json_output = prune_empty(json_output) logger.debug("Pruned output for: %s", file_path) for item_key, item_value in json_output.items(): # write each item to disk if output == 'json': file_path = os.path.join(compile_path, '%s.%s' % (item_key, output)) with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent) as fp: fp.write_json(item_value) elif output == 'yaml': file_path = os.path.join(compile_path, '%s.%s' % (item_key, "yml")) with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent) as fp: fp.write_yaml(item_value) else: raise ValueError('output is neither "json" or "yaml"')
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write file_path (kadet evaluated) items as files to compile_path. ext_vars is not used in Kadet kwargs: output: default 'yaml', accepts 'json' prune: default False reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled indent: default 2 """ output = kwargs.get('output', 'yaml') prune = kwargs.get('prune', False) reveal = kwargs.get('reveal', False) target_name = kwargs.get('target_name', None) indent = kwargs.get('indent', 2) # These will be updated per target # XXX At the moment we have no other way of setting externals for modules... global search_paths search_paths = self.search_paths global inventory inventory = lambda: Dict(inventory_func(self.search_paths, target_name)) # noqa E731 global inventory_global inventory_global = lambda: Dict(inventory_func(self.search_paths, None)) # noqa E731 kadet_module, spec = module_from_path(file_path) sys.modules[spec.name] = kadet_module spec.loader.exec_module(kadet_module) logger.debug('Kadet.compile_file: spec.name: %s', spec.name) output_obj = kadet_module.main().to_dict() if prune: output_obj = prune_empty(output_obj) for item_key, item_value in output_obj.items(): # write each item to disk if output == 'json': file_path = os.path.join(compile_path, '%s.%s' % (item_key, output)) with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent) as fp: fp.write_json(item_value) elif output == 'yaml': file_path = os.path.join(compile_path, '%s.%s' % (item_key, "yml")) with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent) as fp: fp.write_yaml(item_value) elif output == 'plain': file_path = os.path.join(compile_path, '%s' % item_key) with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent) as fp: fp.write(item_value) else: raise ValueError('output is neither "json", "yaml" or "plain"') logger.debug("Pruned output for: %s", file_path)
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write items in path as jinja2 rendered files to compile_path. path can be either a file or directory. kwargs: reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled """ reveal = kwargs.get('reveal', False) target_name = kwargs.get('target_name', None) # set ext_vars and inventory for jinja2 context context = ext_vars.copy() context["inventory"] = inventory(self.search_paths, target_name) context["inventory_global"] = inventory(self.search_paths, None) jinja2_filters = kwargs.get('jinja2_filters') for item_key, item_value in render_jinja2(file_path, context, jinja2_filters=jinja2_filters).items(): full_item_path = os.path.join(compile_path, item_key) os.makedirs(os.path.dirname(full_item_path), exist_ok=True) with CompiledFile(full_item_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name) as fp: fp.write(item_value["content"]) mode = item_value["mode"] os.chmod(full_item_path, mode) logger.debug("Wrote %s with mode %.4o", full_item_path, mode)
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Render templates in file_path/templates and write to compile_path. file_path must be a directory containing helm chart. kwargs: reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled """ reveal = kwargs.get("reveal", False) target_name = kwargs.get("target_name", None) if self.file_path is not None: raise CompileError( "The same helm input was compiled with different input paths, which will give a wrong result." + f" The input paths found are: {self.file_path} and {file_path}." + f" The search paths were: {self.search_paths}." ) self.file_path = file_path temp_dir = tempfile.mkdtemp() os.makedirs(os.path.dirname(compile_path), exist_ok=True) # save the template output to temp dir first error_message = self.render_chart( chart_dir=file_path, output_path=temp_dir, helm_path=self.helm_path, helm_params=self.helm_params, helm_values_file=self.helm_values_file, helm_values_files=self.helm_values_files, ) if error_message: raise HelmTemplateError(error_message) walk_root_files = os.walk(temp_dir) for current_dir, _, files in walk_root_files: for file in files: # go through all the template files rel_dir = os.path.relpath(current_dir, temp_dir) rel_file_name = os.path.join(rel_dir, file) full_file_name = os.path.join(current_dir, file) with open(full_file_name, "r") as f: item_path = os.path.join(compile_path, rel_file_name) os.makedirs(os.path.dirname(item_path), exist_ok=True) with CompiledFile( item_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, ) as fp: yml_obj = list(yaml.safe_load_all(f)) fp.write_yaml(yml_obj) logger.debug("Wrote file %s to %s", full_file_name, item_path) self.helm_values_file = None # reset this self.helm_params = {} self.helm_values_files = []
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Render templates in file_path/templates and write to compile_path. file_path must be a directory containing helm chart. kwargs: reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled """ if not self.lib: raise HelmBindingUnavailableError( "Helm binding is not supported for {}." "\nOr the binding does not exist.".format(platform.system())) reveal = kwargs.get("reveal", False) target_name = kwargs.get("target_name", None) temp_dir = tempfile.mkdtemp() os.makedirs(os.path.dirname(compile_path), exist_ok=True) # save the template output to temp dir first error_message = self.render_chart( chart_dir=file_path, output_path=temp_dir, helm_values_file=self.helm_values_file, helm_values_files=self.helm_values_files, **self.helm_params, ) if error_message: raise HelmTemplateError(error_message) walk_root_files = os.walk(temp_dir) for current_dir, _, files in walk_root_files: for file in files: # go through all the template files rel_dir = os.path.relpath(current_dir, temp_dir) rel_file_name = os.path.join(rel_dir, file) full_file_name = os.path.join(current_dir, file) with open(full_file_name, "r") as f: item_path = os.path.join(compile_path, rel_file_name) os.makedirs(os.path.dirname(item_path), exist_ok=True) with CompiledFile(item_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name) as fp: yml_obj = list(yaml.safe_load_all(f)) fp.write_yaml(yml_obj) logger.debug("Wrote file %s to %s", full_file_name, item_path) self.helm_values_file = None # reset this self.helm_params = {} self.helm_values_files = []
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Render templates in file_path/templates and write to compile_path. file_path must be a directory containing helm chart. kwargs: reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled """ reveal = kwargs.get('reveal', False) target_name = kwargs.get('target_name', None) temp_dir = tempfile.mkdtemp() os.makedirs(os.path.dirname(compile_path), exist_ok=True) # save the template output to temp dir first error_message = render_chart(chart_dir=file_path, output_path=temp_dir, helm_values_file=self.helm_values_file, **self.helm_params) if error_message: raise HelmTemplateError(error_message) walk_root_files = os.walk(temp_dir) for root, _, files in walk_root_files: for file in files: # go through all the template files with open(os.path.join(root, file), 'r') as f: item_path = os.path.join(compile_path, file) with CompiledFile(item_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name) as fp: fp.write(f.read()) logger.debug("Wrote file %s to %s", os.path.join(file_path, file), item_path) self.helm_values_file = None # reset this self.helm_params = {}
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write items in path as jinja2 rendered files to compile_path. path can be either a file or directory. kwargs: reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled """ reveal = kwargs.get("reveal", False) target_name = kwargs.get("target_name", None) # set ext_vars and inventory for jinja2 context context = ext_vars.copy() context["inventory"] = inventory(self.search_paths, target_name) context["inventory_global"] = inventory(self.search_paths, None) context["input_params"] = self.input_params # reset between each compile if jinja2 component is used multiple times self.input_params = {} jinja2_filters = kwargs.get("jinja2_filters") for item_key, item_value in render_jinja2( file_path, context, jinja2_filters=jinja2_filters, search_paths=self.search_paths).items(): full_item_path = os.path.join(compile_path, item_key) with CompiledFile(full_item_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name) as fp: fp.write(item_value["content"]) mode = item_value["mode"] os.chmod(full_item_path, mode) logger.debug("Wrote %s with mode %.4o", full_item_path, mode)
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write file_path (kadet evaluated) items as files to compile_path. ext_vars is not used in Kadet kwargs: output: default 'yaml', accepts 'json' prune: default False reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled indent: default 2 """ output = kwargs.get("output", "yaml") prune = kwargs.get("prune", False) reveal = kwargs.get("reveal", False) target_name = kwargs.get("target_name", None) inventory_path = kwargs.get("inventory_path", None) indent = kwargs.get("indent", 2) input_params = self.input_params # set compile_path allowing kadet functions to have context on where files # are being compiled on the current kapitan run input_params["compile_path"] = compile_path # reset between each compile if kadet component is used multiple times self.input_params = {} # These will be updated per target # XXX At the moment we have no other way of setting externals for modules... global search_paths search_paths = self.search_paths global inventory inventory = lambda: Dict( inventory_func(self.search_paths, target_name, inventory_path) ) # noqa E731 global inventory_global inventory_global = lambda: Dict( inventory_func(self.search_paths, None, inventory_path) ) # noqa E731 kadet_module, spec = module_from_path(file_path) sys.modules[spec.name] = kadet_module spec.loader.exec_module(kadet_module) logger.debug("Kadet.compile_file: spec.name: %s", spec.name) kadet_arg_spec = inspect.getfullargspec(kadet_module.main) logger.debug("Kadet main args: %s", kadet_arg_spec.args) if len(kadet_arg_spec.args) == 1: output_obj = kadet_module.main(input_params).to_dict() elif len(kadet_arg_spec.args) == 0: output_obj = kadet_module.main().to_dict() else: raise ValueError( f"Kadet {spec.name} main parameters not equal to 1 or 0") if prune: output_obj = prune_empty(output_obj) # Return None if output_obj has no output if output_obj is None: return None for item_key, item_value in output_obj.items(): # write each item to disk if output == "json": file_path = os.path.join(compile_path, "%s.%s" % (item_key, output)) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write_json(item_value) elif output in ["yml", "yaml"]: file_path = os.path.join(compile_path, "%s.%s" % (item_key, output)) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write_yaml(item_value) elif output == "plain": file_path = os.path.join(compile_path, "%s" % item_key) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write(item_value) else: raise ValueError( f"Output type defined in inventory for {file_path} is neither 'json', 'yaml' nor 'plain'" ) logger.debug("Pruned output for: %s", file_path)
def compile_file(self, file_path, compile_path, ext_vars, **kwargs): """ Write file_path (jsonnet evaluated) items as files to compile_path. ext_vars will be passed as parameters to jsonnet_file() kwargs: output: default 'yaml', accepts 'json' prune: default False, accepts True reveal: default False, set to reveal refs on compile target_name: default None, set to current target being compiled indent: default 2 """ def _search_imports(cwd, imp): return search_imports(cwd, imp, self.search_paths) json_output = jsonnet_file( file_path, import_callback=_search_imports, native_callbacks=resource_callbacks(self.search_paths), ext_vars=ext_vars, ) output_obj = json.loads(json_output) output = kwargs.get("output", "yaml") prune = kwargs.get("prune_input", False) reveal = kwargs.get("reveal", False) target_name = kwargs.get("target_name", None) indent = kwargs.get("indent", 2) if prune: output_obj = prune_empty(output_obj) logger.debug("Pruned output for: %s", file_path) if not isinstance(output_obj, dict): tmp_output_obj = output_obj # assume that the output filename is the # same as the input jsonnet filename filename = os.path.splitext(os.path.basename(file_path))[0] output_obj = {} output_obj[filename] = tmp_output_obj for item_key, item_value in output_obj.items(): # write each item to disk if output == "json": file_path = os.path.join(compile_path, "%s.%s" % (item_key, output)) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write_json(item_value) elif output in ["yml", "yaml"]: file_path = os.path.join(compile_path, "%s.%s" % (item_key, output)) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write_yaml(item_value) elif output == "plain": file_path = os.path.join(compile_path, "%s" % item_key) with CompiledFile( file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, indent=indent, ) as fp: fp.write(item_value) else: raise ValueError( f"Output type defined in inventory for {file_path} is neither 'json', 'yaml' nor 'plain'" )