def inventory_reclass(inventory_path, ignore_class_notfound=False): """ Runs a reclass inventory in inventory_path (same output as running ./reclass.py -b inv_base_uri/ --inventory) Will attempt to read reclass config from 'reclass-config.yml' otherwise it will failback to the default config. Returns a reclass style dictionary Does not throw errors if a class is not found while --fetch flag is enabled """ if not cached.inv: reclass_config = { "storage_type": "yaml_fs", "inventory_base_uri": inventory_path, "nodes_uri": os.path.join(inventory_path, "targets"), "classes_uri": os.path.join(inventory_path, "classes"), "compose_node_name": False, "allow_none_override": True, "ignore_class_notfound": ignore_class_notfound, } try: cfg_file = os.path.join(inventory_path, "reclass-config.yml") with open(cfg_file) as reclass_cfg: reclass_config = yaml.load(reclass_cfg, Loader=YamlLoader) # normalise relative nodes_uri and classes_uri paths for uri in ("nodes_uri", "classes_uri"): uri_val = reclass_config.get(uri) uri_path = os.path.join(inventory_path, uri_val) normalised_path = os.path.normpath(uri_path) reclass_config.update({uri: normalised_path}) logger.debug("Using reclass inventory config at: %s", cfg_file) except IOError as ex: # If file does not exist, ignore if ex.errno == errno.ENOENT: logger.debug("Using reclass inventory config defaults") try: storage = reclass.get_storage( reclass_config["storage_type"], reclass_config["nodes_uri"], reclass_config["classes_uri"], reclass_config["compose_node_name"], ) class_mappings = reclass_config.get( "class_mappings") # this defaults to None (disabled) _reclass = reclass.core.Core( storage, class_mappings, reclass.settings.Settings(reclass_config)) cached.inv = _reclass.inventory() except ReclassException as e: if isinstance(e, NotFoundError): logger.error("Inventory reclass error: inventory not found") else: logger.error("Inventory reclass error: %s", e.message) raise InventoryError(e.message) return cached.inv
def inventory(search_paths, target, inventory_path=None): """ Reads inventory (set by inventory_path) in search_paths. set nodes_uri to change reclass nodes_uri the default value set target to None to return all target in the inventory set inventory_path to read custom path. None defaults to value set via cli Returns a dictionary with the inventory for target """ if inventory_path is None: # grab inventory_path value from cli subcommand inventory_path_arg = cached.args.get("compile") or cached.args.get("inventory") inventory_path = inventory_path_arg.inventory_path inv_path_exists = False # check if the absolute inventory_path exists full_inv_path = os.path.abspath(inventory_path) if os.path.exists(full_inv_path): inv_path_exists = True # if not, check for inventory_path in search_paths else: for path in search_paths: full_inv_path = os.path.join(path, inventory_path) if os.path.exists(full_inv_path): inv_path_exists = True break if not inv_path_exists: raise InventoryError(f"Inventory not found in search paths: {search_paths}") if target is None: return inventory_reclass(full_inv_path)["nodes"] return inventory_reclass(full_inv_path)["nodes"][target]
def validate_matching_target_name(target_filename, target_obj, inventory_path): """Throws *InventoryError* if parameters.kapitan.vars.target is not set, or target does not have a corresponding yaml file in *inventory_path* """ logger.debug("validating target name matches the name of yml file%s", target_filename) try: target_name = target_obj["vars"]["target"] except KeyError: error_message = "Target missing: target \"{}\" is missing parameters.kapitan.vars.target\n" \ "This parameter should be set to the target name" raise InventoryError(error_message.format(target_filename)) if target_filename != target_name: target_path = os.path.join(os.path.abspath(inventory_path), "targets") error_message = "Target \"{}\" is missing the corresponding yml file in {}\n" \ "Target name should match the name of the target yml file in inventory" raise InventoryError(error_message.format(target_name, target_path))
def inventory_reclass(inventory_path): """ Runs a reclass inventory in inventory_path (same output as running ./reclass.py -b inv_base_uri/ --inventory) Will attempt to read reclass config from 'reclass-config.yml' otherwise it will failback to the default config. Returns a reclass style dictionary """ if not cached.inv: reclass_config = { 'storage_type': 'yaml_fs', 'inventory_base_uri': inventory_path, 'nodes_uri': os.path.join(inventory_path, 'targets'), 'classes_uri': os.path.join(inventory_path, 'classes'), 'compose_node_name': False } try: cfg_file = os.path.join(inventory_path, 'reclass-config.yml') with open(cfg_file) as reclass_cfg: reclass_config = yaml.load(reclass_cfg, Loader=YamlLoader) # normalise relative nodes_uri and classes_uri paths for uri in ('nodes_uri', 'classes_uri'): uri_val = reclass_config.get(uri) uri_path = os.path.join(inventory_path, uri_val) normalised_path = os.path.normpath(uri_path) reclass_config.update({uri: normalised_path}) logger.debug("Using reclass inventory config at: %s", cfg_file) except IOError as ex: # If file does not exist, ignore if ex.errno == errno.ENOENT: logger.debug("Using reclass inventory config defaults") try: storage = reclass.get_storage(reclass_config['storage_type'], reclass_config['nodes_uri'], reclass_config['classes_uri'], reclass_config['compose_node_name']) class_mappings = reclass_config.get( 'class_mappings') # this defaults to None (disabled) _reclass = reclass.core.Core( storage, class_mappings, reclass.settings.Settings(reclass_config)) cached.inv = _reclass.inventory() except ReclassException as e: if isinstance(e, NotFoundError): logger.error("Inventory reclass error: inventory not found") else: logger.error("Inventory reclass error: %s", e.message) raise InventoryError(e.message) return cached.inv
def schema_validate_compiled(targets, compiled_path, inventory_path, schema_cache_path, parallel): """ validates compiled output according to schemas specified in the inventory """ if not os.path.isdir(compiled_path): logger.error("compiled-path {} not found".format(compiled_path)) sys.exit(1) if not os.path.isdir(schema_cache_path): os.makedirs(schema_cache_path) logger.info( "created schema-cache-path at {}".format(schema_cache_path)) worker = partial(schema_validate_kubernetes_output, cache_dir=schema_cache_path) pool = multiprocessing.Pool(parallel) try: target_objs = load_target_inventory(inventory_path, targets) validate_map = create_validate_mapping(target_objs, compiled_path) [ p.get() for p in pool.imap_unordered(worker, validate_map.items()) if p ] pool.close() except ReclassException as e: if isinstance(e, NotFoundError): logger.error("Inventory reclass error: inventory not found") else: logger.error("Inventory reclass error: %s", e.message) raise InventoryError(e.message) except Exception as e: pool.terminate() logger.debug("Validate pool terminated") # only print traceback for errors we don't know about if not isinstance(e, KapitanError): logger.exception("Unknown (Non-Kapitan) Error occured") logger.error("\n") logger.error(e) sys.exit(1) finally: # always wait for other worker processes to terminate pool.join()
def inventory(search_paths, target, inventory_path="inventory/"): """ Reads inventory (set by inventory_path) in search_paths. set nodes_uri to change reclass nodes_uri the default value set target to None to return all target in the inventory Returns a dictionary with the inventory for target """ full_inv_path = "" inv_path_exists = False for path in search_paths: full_inv_path = os.path.join(path, inventory_path) if os.path.exists(full_inv_path): inv_path_exists = True break if not inv_path_exists: raise InventoryError(f"Inventory not found in search paths: {search_paths}") if target is None: return inventory_reclass(full_inv_path)["nodes"] return inventory_reclass(full_inv_path)["nodes"][target]
def valid_target_obj(target_obj): """ Validates a target_obj Returns a dict object if target is valid Otherwise raises ValidationError """ schema = { "type": "object", "properties": { "vars": { "type": "object" }, "secrets": { "type": "object" }, "compile": { "type": "array", "items": { "type": "object", "properties": { "input_paths": { "type": "array" }, "input_type": { "type": "string" }, "output_path": { "type": "string" }, "output_type": { "type": "string" }, "helm_values": { "type": "object" }, "helm_params": { "type": "object", "properties": { "namespace": { "type": "string" }, "name_template": { "type": "string" }, "release_name": { "type": "string" }, }, "additionalProperties": False, }, }, "required": ["input_type", "input_paths", "output_path"], "minItems": 1, "oneOf": [ { "properties": { "input_type": { "enum": ["jsonnet", "kadet", "copy"] }, "output_type": { "enum": ["yaml", "json", "plain"] }, }, }, { "properties": { "input_type": { "enum": ["jinja2", "helm"] } } }, ], }, }, "validate": { "type": "array", "items": { "type": "object", "properties": { "output_paths": { "type": "array" }, "type": { "type": "string", "enum": ["kubernetes"] }, "kind": { "type": "string" }, "version": { "type": "string" }, }, "required": ["output_paths", "type"], "minItems": 1, "allOf": [ { "if": { "properties": { "type": { "const": "kubernetes" } } }, "then": { "properties": { "type": {}, "kind": {}, "output_paths": {}, "version": {} }, "additionalProperties": False, "required": ["kind"], }, }, ], }, }, "dependencies": { "type": "array", "items": { "type": "object", "properties": { "type": { "type": "string", "enum": ["git", "http", "https"] }, "output_path": { "type": "string" }, "source": { "type": "string" }, "subdir": { "type": "string" }, "ref": { "type": "string" }, "unpack": { "type": "boolean" }, }, "required": ["type", "output_path", "source"], "additionalProperties": False, "allOf": [ { "if": { "properties": { "type": { "enum": ["http", "https"] } } }, "then": { "properties": { "type": {}, "source": { "format": "uri" }, "output_path": {}, "unpack": {}, }, "additionalProperties": False, }, }, ], }, }, }, "required": ["compile"], } try: jsonschema.validate(target_obj, schema, format_checker=jsonschema.FormatChecker()) except jsonschema.exceptions.ValidationError as e: raise InventoryError( "Invalid inventory structure\n\nError: {}\nOn instance:\n{}". format(e.message, json.dumps(e.instance, indent=2, sort_keys=False)))
def compile_targets(inventory_path, search_paths, output_path, parallel, targets, labels, ref_controller, **kwargs): """ Searches and loads target files, and runs compile_target() on a multiprocessing pool with parallel number of processes. kwargs are passed to compile_target() """ # temp_path will hold compiled items temp_path = tempfile.mkdtemp(suffix=".kapitan") updated_targets = targets try: updated_targets = search_targets(inventory_path, targets, labels) except CompileError as e: logger.error(e) sys.exit(1) # If --cache is set if kwargs.get("cache"): additional_cache_paths = kwargs.get("cache_paths") generate_inv_cache_hashes(inventory_path, targets, additional_cache_paths) if not targets: updated_targets = changed_targets(inventory_path, output_path) logger.debug("Changed targets since last compilation: %s", updated_targets) if len(updated_targets) == 0: logger.info("No changes since last compilation.") return pool = multiprocessing.Pool(parallel) try: target_objs = load_target_inventory(inventory_path, updated_targets) # append "compiled" to output_path so we can safely overwrite it compile_path = os.path.join(output_path, "compiled") worker = partial( compile_target, search_paths=search_paths, compile_path=temp_path, ref_controller=ref_controller, **kwargs, ) if not target_objs: raise CompileError("Error: no targets found") if kwargs.get("fetch_dependencies", False): fetch_dependencies(target_objs, pool) # compile_target() returns None on success # so p is only not None when raising an exception [p.get() for p in pool.imap_unordered(worker, target_objs) if p] os.makedirs(compile_path, exist_ok=True) # if '-t' is set on compile or only a few changed, only override selected targets if updated_targets: for target in updated_targets: compile_path_target = os.path.join(compile_path, target) temp_path_target = os.path.join(temp_path, target) os.makedirs(compile_path_target, exist_ok=True) shutil.rmtree(compile_path_target) shutil.copytree(temp_path_target, compile_path_target) logger.debug("Copied %s into %s", temp_path_target, compile_path_target) # otherwise override all targets else: shutil.rmtree(compile_path) shutil.copytree(temp_path, compile_path) logger.debug("Copied %s into %s", temp_path, compile_path) # validate the compiled outputs if kwargs.get("validate", False): validate_map = create_validate_mapping(target_objs, compile_path) worker = partial(schema_validate_kubernetes_output, cache_dir=kwargs.get("schemas_path", "./schemas")) [ p.get() for p in pool.imap_unordered(worker, validate_map.items()) if p ] # Save inventory and folders cache save_inv_cache(compile_path, targets) pool.close() except ReclassException as e: if isinstance(e, NotFoundError): logger.error("Inventory reclass error: inventory not found") else: logger.error("Inventory reclass error: %s", e.message) raise InventoryError(e.message) except Exception as e: # if compile worker fails, terminate immediately pool.terminate() logger.debug("Compile pool terminated") # only print traceback for errors we don't know about if not isinstance(e, KapitanError): logger.exception("Unknown (Non-Kapitan) Error occurred") logger.error("\n") logger.error(e) sys.exit(1) finally: # always wait for other worker processes to terminate pool.join() shutil.rmtree(temp_path) logger.debug("Removed %s", temp_path)
def valid_target_obj(target_obj, require_compile=True): """ Validates a target_obj Returns a dict object if target is valid Otherwise raises ValidationError """ schema = { "type": "object", "properties": { "vars": { "type": "object" }, "secrets": { "type": "object", "properties": { "gpg": { "type": "object", "properties": { "recipients": { "type": "array", "items": { "type": "object", "properties": { "name": { "type": "string" }, "fingerprint": { "type": "string" }, }, }, }, }, "required": ["recipients"], }, "gkms": { "type": "object", "properties": { "key": { "type": "string" } }, "required": ["key"], }, "awskms": { "type": "object", "properties": { "key": { "type": "string" } }, "required": ["key"], }, "azkms": { "type": "object", "properties": { "key": { "type": "string" } }, "required": ["key"], }, "vaultkv": { "type": "object", "properties": { "VAULT_ADDR": { "type": "string" }, "VAULT_NAMESPACE": { "type": "string" }, "VAULT_SKIP_VERIFY": { "type": "string" }, "VAULT_CLIENT_KEY": { "type": "string" }, "VAULT_CLIENT_CERT": { "type": "string" }, "auth": { "enum": [ "token", "userpass", "ldap", "github", "approle" ] }, "engine": { "type": "string" }, "mount": { "type": "string" }, }, }, }, "additionalProperties": False, }, "compile": { "type": "array", "items": { "type": "object", "properties": { "name": { "type": "string" }, "input_paths": { "type": "array" }, "input_type": { "type": "string" }, "output_path": { "type": "string" }, "output_type": { "type": "string" }, "helm_values": { "type": "object" }, "helm_values_files": { "type": "array" }, "helm_params": { "type": "object", "properties": { "namespace": { "type": "string" }, "name_template": { "type": "string" }, "release_name": { "type": "string" }, }, "additionalProperties": False, }, "input_params": { "type": "object" }, "env_vars": { "type": "object" }, "args": { "type": "array" }, }, "required": ["input_type", "input_paths", "output_path"], "minItems": 1, "oneOf": [ { "properties": { "input_type": { "enum": ["jsonnet", "kadet", "copy", "remove"] }, "output_type": { "enum": ["yml", "yaml", "json", "plain"] }, }, }, { "properties": { "input_type": { "enum": ["jinja2", "helm", "external"] } } }, ], }, }, "validate": { "type": "array", "items": { "type": "object", "properties": { "output_paths": { "type": "array" }, "type": { "type": "string", "enum": ["kubernetes"] }, "kind": { "type": "string" }, "version": { "type": "string" }, }, "required": ["output_paths", "type"], "minItems": 1, "allOf": [ { "if": { "properties": { "type": { "const": "kubernetes" } } }, "then": { "properties": { "type": {}, "kind": {}, "output_paths": {}, "version": {} }, "additionalProperties": False, "required": ["kind"], }, }, ], }, }, "dependencies": { "type": "array", "items": { "type": "object", "properties": { "chart_name": { "type": "string" }, "type": { "type": "string", "enum": ["git", "http", "https", "helm"] }, "output_path": { "type": "string" }, "source": { "type": "string" }, "subdir": { "type": "string" }, "ref": { "type": "string" }, "unpack": { "type": "boolean" }, "version": { "type": "string" }, }, "required": ["type", "output_path", "source"], "additionalProperties": False, "allOf": [ { "if": { "properties": { "type": { "enum": ["http", "https"] } } }, "then": { "properties": { "type": {}, "source": { "format": "uri" }, "output_path": {}, "unpack": {}, }, "additionalProperties": False, }, }, { "if": { "properties": { "type": { "enum": ["helm"] } } }, "then": { "properties": { "type": {}, "source": { "format": "uri" }, "output_path": {}, "unpack": {}, "chart_name": { "type": "string" }, "version": { "type": "string" }, }, "required": [ "type", "output_path", "source", "chart_name" ], "additionalProperties": False, }, }, ], }, }, "inventory": { "type": "array", "items": { "type": "object", "properties": { "type": { "type": "string", "enum": ["git", "http", "https"] }, "output_path": { "type": "string" }, "source": { "type": "string" }, "subdir": { "type": "string" }, "ref": { "type": "string" }, "unpack": { "type": "boolean" }, }, "required": ["type", "output_path", "source"], "additionalProperties": False, "allOf": [ { "if": { "properties": { "type": { "enum": ["http", "https"] } } }, "then": { "properties": { "type": {}, "source": { "format": "uri" }, "output_path": {}, "unpack": {}, }, "additionalProperties": False, }, }, ], }, }, }, } if require_compile: schema["required"] = ["compile"] try: jsonschema.validate(target_obj, schema, format_checker=jsonschema.FormatChecker()) except jsonschema.exceptions.ValidationError as e: raise InventoryError( "Invalid inventory structure\n\nError: {}\nOn instance:\n{}". format(e.message, json.dumps(e.instance, indent=2, sort_keys=False)))