Exemple #1
0
    def compile_input_path(self, input_path, comp_obj, ext_vars, **kwargs):
        """
        compile and validate input_path in comp_obj
        kwargs are passed into compile_file()
        """
        target_name = ext_vars["target"]
        output_path = comp_obj["output_path"]
        output_dirname = comp_obj.get("output_dirname", target_name)
        output_type = comp_obj.get("output_type", self.default_output_type())
        file_found = False

        for path in self.search_paths:
            compile_file_sp = os.path.join(path, input_path)
            if os.path.exists(compile_file_sp):
                file_found = True
                logger.debug("Compiling %s", compile_file_sp)
                try:
                    _compile_path = os.path.join(self.compile_path, output_dirname, output_path)
                    self.compile_file(compile_file_sp, _compile_path, ext_vars, output=output_type,
                                      target_name=target_name, **kwargs)
                except KapitanError as e:
                    raise CompileError("{}\nCompile error: failed to compile target: {}".format(e, target_name))

        if not file_found:
            raise CompileError("Compile error: {} for target: {} not found in "
                               "search_paths: {}".format(input_path, target_name, self.search_paths))
Exemple #2
0
def search_targets(inventory_path, targets, labels):
    """returns a list of targets where the labels match, otherwise just return the original targets"""
    if not labels:
        return targets

    try:
        labels_dict = dict(label.split("=") for label in labels)
    except ValueError:
        raise CompileError(
            "Compile error: Failed to parse labels, should be formatted like: kapitan compile -l env=prod app=example"
        )

    targets_found = []
    inv = inventory_reclass(inventory_path)

    for target_name in inv["nodes"]:
        matched_all_labels = False
        for label, value in labels_dict.items():
            try:
                if inv["nodes"][target_name]["parameters"]["kapitan"]["labels"][label] == value:
                    matched_all_labels = True
                    continue
            except KeyError:
                logger.debug("search_targets: label %s=%s didn't match target %s", label, value, target_name)

            matched_all_labels = False
            break

        if matched_all_labels:
            targets_found.append(target_name)

    if len(targets_found) == 0:
        raise CompileError("No targets found with labels: {}".format(labels))

    return targets_found
Exemple #3
0
def directory_hash(directory):
    """Return the sha256 hash for the file contents of a directory"""
    if not os.path.exists(directory):
        raise IOError(
            f"utils.directory_hash failed, {directory} dir doesn't exist")

    if not os.path.isdir(directory):
        raise IOError(
            f"utils.directory_hash failed, {directory} is not a directory")

    try:
        hash = sha256()
        for root, _, files in sorted(os.walk(directory)):
            for names in sorted(files):
                file_path = os.path.join(root, names)
                try:
                    with open(file_path, "r") as f:
                        file_hash = sha256(f.read().encode("UTF-8"))
                        hash.update(file_hash.hexdigest().encode("UTF-8"))
                except Exception as e:
                    if isinstance(e, UnicodeDecodeError):
                        with open(file_path, "rb") as f:
                            binary_file_hash = sha256(f.read())
                            hash.update(
                                binary_file_hash.hexdigest().encode("UTF-8"))
                    else:
                        raise CompileError(
                            f"utils.directory_hash failed to open {file_path}: {e}"
                        )
    except Exception as e:
        raise CompileError(f"utils.directory_hash failed: {e}")

    return hash.hexdigest()
Exemple #4
0
def jinja2_render_file(search_paths, name, ctx):
    """
    Render jinja2 file name with context ctx.
    search_paths is used to find the file name
    as there is a limitation with jsonnet's native_callback approach:
    one can't access the current directory being evaluated
    """
    ctx = json.loads(ctx)
    _full_path = ""

    for path in search_paths:
        _full_path = os.path.join(path, name)
        logger.debug("jinja2_render_file trying file %s", _full_path)
        if os.path.exists(_full_path):
            logger.debug("jinja2_render_file found file at %s", _full_path)
            try:
                return render_jinja2_file(_full_path,
                                          ctx,
                                          search_paths=search_paths)
            except Exception as e:
                raise CompileError(
                    "Jsonnet jinja2 failed to render {}: {}".format(
                        _full_path, e))

    raise IOError(
        "jinja2 failed to render, could not find file: {}".format(_full_path))
Exemple #5
0
    def compile_obj(self, comp_obj, ext_vars, **kwargs):
        """
        Expand globbed input paths, taking into account provided search paths
        and run compile_input_path() for each resolved input_path.
        kwargs are passed into compile_input_path()
        """
        input_type = comp_obj["input_type"]
        assert input_type == self.type_name

        # expand any globbed paths, taking into account provided search paths
        input_paths = []
        for input_path in comp_obj["input_paths"]:
            globbed_paths = [
                glob.glob(os.path.join(path, input_path))
                for path in self.search_paths
            ]
            inputs = list(itertools.chain.from_iterable(globbed_paths))
            if len(inputs) == 0:
                raise CompileError(
                    "Compile error: {} for target: {} not found in "
                    "search_paths: {}".format(input_path, ext_vars["target"],
                                              self.search_paths))
            input_paths.extend(inputs)

        for input_path in input_paths:
            self.compile_input_path(input_path, comp_obj, ext_vars, **kwargs)
Exemple #6
0
def regex_search(value, regex, *args, **kwargs):
    """Perform re.search and return the list of matches or a backref"""
    groups = list()
    for arg in args:
        if arg.startswith('\\g'):
            match = re.match(r'\\g<(\S+)>', arg).group(1)
            groups.append(match)
        elif arg.startswith('\\'):
            match = int(re.match(r'\\(\d+)', arg).group(1))
            groups.append(match)
        else:
            raise CompileError('Unknown argument')

    flags = 0
    if kwargs.get('ignorecase'):
        flags |= re.I
    if kwargs.get('multiline'):
        flags |= re.M

    match = re.search(regex, value, flags)
    if match:
        if not groups:
            return match.group()
        else:
            items = list()
            for item in groups:
                items.append(match.group(item))
            return items
Exemple #7
0
def compile_target(target_obj, search_paths, compile_path, ref_controller,
                   **kwargs):
    """Compiles target_obj and writes to compile_path"""
    start = time.time()

    compile_objs = target_obj["compile"]
    ext_vars = target_obj["vars"]
    target_name = ext_vars["target"]

    jinja2_compiler = Jinja2(compile_path, search_paths, ref_controller)
    jsonnet_compiler = Jsonnet(compile_path, search_paths, ref_controller)
    kadet_compiler = Kadet(compile_path, search_paths, ref_controller)

    for comp_obj in compile_objs:
        input_type = comp_obj["input_type"]
        output_path = comp_obj["output_path"]
        if input_type == "jinja2":
            input_compiler = jinja2_compiler
        elif input_type == "jsonnet":
            input_compiler = jsonnet_compiler
        elif input_type == "kadet":
            input_compiler = kadet_compiler
        else:
            err_msg = "Invalid input_type: \"{}\". Supported input_types: jsonnet, jinja2, kadet"
            raise CompileError(err_msg.format(input_type))

        input_compiler.make_compile_dirs(target_name, output_path)
        input_compiler.compile_obj(comp_obj, ext_vars, **kwargs)

    logger.info("Compiled %s (%.2fs)", target_name, time.time() - start)
Exemple #8
0
    def compile_input_path(self, input_path, comp_obj, ext_vars, **kwargs):
        """
        Compile validated input_path in comp_obj
        kwargs are passed into compile_file()
        """
        target_name = ext_vars["target"]
        output_path = comp_obj["output_path"]
        output_type = comp_obj.get("output_type", self.default_output_type())
        prune_input = comp_obj.get("prune", kwargs.get("prune", False))

        logger.debug("Compiling %s", input_path)
        try:
            _compile_path = os.path.join(self.compile_path, target_name,
                                         output_path)
            self.compile_file(
                input_path,
                _compile_path,
                ext_vars,
                output=output_type,
                target_name=target_name,
                prune_input=prune_input,
                **kwargs,
            )
        except KapitanError as e:
            raise CompileError(
                "{}\nCompile error: failed to compile target: {}".format(
                    e, target_name))
Exemple #9
0
def render_jinja2_dir(path, context):
    """
    Render files in path with context
    Returns a dict where the is key is the filename (with subpath)
    and value is a dict with content and mode
    Empty paths will not be rendered
    Ignores hidden files (.filename)
    """
    rendered = {}
    for root, _, files in os.walk(path):
        for f in files:
            if f.startswith('.'):
                logger.debug('render_jinja2_dir: ignoring file %s', f)
                continue
            render_path = os.path.join(root, f)
            logger.debug("render_jinja2_dir rendering %s", render_path)
            # get subpath and filename, strip any leading/trailing /
            name = render_path[len(os.path.commonprefix([root, path])):].strip('/')
            try:
                rendered[name] = {"content": render_jinja2_file(render_path, context),
                                  "mode": file_mode(render_path)
                                 }
            except Exception as e:
                logger.error("Jinja2 error: failed to render %s: %s", render_path, str(e))
                raise CompileError(e)
    return rendered
Exemple #10
0
def strftime(string_format, second=None):
    """return current date string for format. See https://docs.python.org/3/library/time.html#time.strftime for format"""
    if second is not None:
        try:
            second = int(second)
        except Exception:
            raise CompileError("Invalid value for epoch value ({})".format(second))
    return time.strftime(string_format, time.localtime(second))
Exemple #11
0
 def need(self, key, msg="key and value needed"):
     """
     requires that key is set in self.kwargs
     errors with msg if key not set
     """
     err_msg = '{}: "{}": {}'.format(self.__class__.__name__, key, msg)
     if key not in self.kwargs:
         raise CompileError(err_msg)
def jsonnet_file(file_path, **kwargs):
    """
    Evaluate file_path jsonnet file.
    kwargs are documented in http://jsonnet.org/implementation/bindings.html
    """
    try:
        return jsonnet.evaluate_file(file_path, **kwargs)
    except Exception as e:
        raise CompileError("Jsonnet error: failed to compile {}:\n {}".format(file_path, e))
Exemple #13
0
def generate_inv_cache_hashes(inventory_path, targets, cache_paths):
    """
    generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes
    struct: {
        inventory:
            <target>:
                classes: <sha256>
                parameters: <sha256>
        folder:
            components: <sha256>
            docs: <sha256>
            lib: <sha256>
            scripts: <sha256>
            ...
    }
    """
    inv = inventory_reclass(inventory_path)
    cached.inv_cache = {}
    cached.inv_cache['inventory'] = {}
    cached.inv_cache['folder'] = {}

    if targets:
        for target in targets:
            try:
                cached.inv_cache['inventory'][target] = {}
                cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
                cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])
            except KeyError as e:
                raise CompileError("target not found: {}".format(target))
    else:
        for target in inv['nodes']:
            cached.inv_cache['inventory'][target] = {}
            cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
            cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])

            compile_obj = inv['nodes'][target]['parameters']['kapitan']['compile']
            for obj in compile_obj:
                for input_path in obj['input_paths']:
                    base_folder = os.path.dirname(input_path).split('/')[0]
                    if base_folder == '':
                        base_folder = os.path.basename(input_path).split('/')[0]

                    if base_folder not in cached.inv_cache['folder'].keys():
                        if os.path.exists(base_folder) and os.path.isdir(base_folder):
                            cached.inv_cache['folder'][base_folder] = directory_hash(base_folder)

                # Cache additional folders set by --cache-paths
                for path in cache_paths:
                    if path not in cached.inv_cache['folder'].keys():
                        if os.path.exists(path) and os.path.isdir(path):
                            cached.inv_cache['folder'][path] = directory_hash(path)

        # Most commonly changed but not referenced in input_paths
        for common in ('lib', 'vendor', 'secrets'):
            if common not in cached.inv_cache['folder'].keys():
                if os.path.exists(common) and os.path.isdir(common):
                    cached.inv_cache['folder'][common] = directory_hash(common)
Exemple #14
0
def compile_target(target_obj,
                   search_paths,
                   compile_path,
                   ref_controller,
                   globals_cached=None,
                   **kwargs):
    """Compiles target_obj and writes to compile_path"""
    start = time.time()
    compile_objs = target_obj["compile"]
    ext_vars = target_obj["vars"]
    target_name = ext_vars["target"]

    if globals_cached:
        cached.from_dict(globals_cached)

    for comp_obj in compile_objs:
        input_type = comp_obj["input_type"]
        output_path = comp_obj["output_path"]

        if input_type == "jinja2":
            input_compiler = Jinja2(compile_path, search_paths, ref_controller)
            if "input_params" in comp_obj:
                input_compiler.set_input_params(comp_obj["input_params"])
        elif input_type == "jsonnet":
            input_compiler = Jsonnet(compile_path, search_paths,
                                     ref_controller)
        elif input_type == "kadet":
            input_compiler = Kadet(compile_path, search_paths, ref_controller)
            if "input_params" in comp_obj:
                input_compiler.set_input_params(comp_obj["input_params"])
        elif input_type == "helm":
            input_compiler = Helm(compile_path, search_paths, ref_controller,
                                  comp_obj)
        elif input_type == "copy":
            ignore_missing = comp_obj.get("ignore_missing", False)
            input_compiler = Copy(compile_path, search_paths, ref_controller,
                                  ignore_missing)
        elif input_type == "remove":
            input_compiler = Remove(compile_path, search_paths, ref_controller)
        elif input_type == "external":
            input_compiler = External(compile_path, search_paths,
                                      ref_controller)
            if "args" in comp_obj:
                input_compiler.set_args(comp_obj["args"])
            if "env_vars" in comp_obj:
                input_compiler.set_env_vars(comp_obj["env_vars"])
        else:
            err_msg = 'Invalid input_type: "{}". Supported input_types: jsonnet, jinja2, kadet, helm, copy, remove, external'
            raise CompileError(err_msg.format(input_type))

        # logger.info("about to compile %s ", target_obj["target_full_path"])
        input_compiler.make_compile_dirs(target_name, output_path)
        input_compiler.compile_obj(comp_obj, ext_vars, **kwargs)

    logger.info("Compiled %s (%.2fs)", target_obj["target_full_path"],
                time.time() - start)
Exemple #15
0
def jsonnet_file(file_path, **kwargs):
    """
    Evaluate file_path jsonnet file.
    kwargs are documented in http://jsonnet.org/implementation/bindings.html
    """
    try:
        return jsonnet.evaluate_file(file_path, **kwargs)
    except Exception as e:
        logger.error("Jsonnet error: failed to compile %s:\n %s", file_path, str(e))
        raise CompileError(e)
Exemple #16
0
def compile_target(target_obj, search_paths, compile_path, ref_controller,
                   **kwargs):
    """Compiles target_obj and writes to compile_path"""
    start = time.time()
    compile_objs = target_obj["compile"]
    ext_vars = target_obj["vars"]
    target_name = ext_vars["target"]

    for comp_obj in compile_objs:
        input_type = comp_obj["input_type"]
        output_path = comp_obj["output_path"]

        if input_type == "jinja2":
            input_compiler = Jinja2(compile_path, search_paths, ref_controller)
        elif input_type == "jsonnet":
            input_compiler = Jsonnet(compile_path, search_paths,
                                     ref_controller)
        elif input_type == "kadet":
            input_compiler = Kadet(compile_path, search_paths, ref_controller)
            if "input_params" in comp_obj:
                input_compiler.set_input_params(comp_obj["input_params"])
        elif input_type == "helm":
            input_compiler = Helm(compile_path, search_paths, ref_controller)
            if "helm_values" in comp_obj:
                input_compiler.dump_helm_values(comp_obj["helm_values"])
            if "helm_params" in comp_obj:
                input_compiler.set_helm_params(comp_obj["helm_params"])
            if "helm_values_files" in comp_obj:
                input_compiler.set_helm_values_files(
                    comp_obj["helm_values_files"])
            if "kube_version" in comp_obj:
                input_compiler.set_kube_version(comp_obj["kube_version"])
        elif input_type == "copy":
            input_compiler = Copy(compile_path, search_paths, ref_controller)
        elif input_type == "remove":
            input_compiler = Remove(compile_path, search_paths, ref_controller)
        elif input_type == "external":
            input_compiler = External(compile_path, search_paths,
                                      ref_controller)
            if "args" in comp_obj:
                input_compiler.set_args(comp_obj["args"])
            if "env_vars" in comp_obj:
                input_compiler.set_env_vars(comp_obj["env_vars"])
        else:
            err_msg = 'Invalid input_type: "{}". Supported input_types: jsonnet, jinja2, kadet, helm, copy, remove, external'
            raise CompileError(err_msg.format(input_type))

        input_compiler.make_compile_dirs(target_name, output_path)
        input_compiler.compile_obj(comp_obj, ext_vars, **kwargs)

    logger.info("Compiled %s (%.2fs)", target_obj["target_full_path"],
                time.time() - start)
Exemple #17
0
def changed_targets(inventory_path, output_path):
    """returns a list of targets that have changed since last compilation"""
    targets = []
    inv = inventory_reclass(inventory_path)

    saved_inv_cache = None
    saved_inv_cache_path = os.path.join(output_path, "compiled/.kapitan_cache")
    if os.path.exists(saved_inv_cache_path):
        with open(saved_inv_cache_path, "r") as f:
            try:
                saved_inv_cache = yaml.safe_load(f)
            except Exception:
                raise CompileError("Failed to load kapitan cache: %s",
                                   saved_inv_cache_path)

    targets_list = list(inv['nodes'])

    # If .kapitan_cache doesn't exist or failed to load, recompile all targets
    if not saved_inv_cache:
        return targets_list
    else:
        for key, hash in cached.inv_cache['folder'].items():
            try:
                if hash != saved_inv_cache['folder'][key]:
                    logger.debug(
                        "%s folder hash changed, recompiling all targets", key)
                    return targets_list
            except KeyError:
                # Errors usually occur when saved_inv_cache doesn't contain a new folder
                # Recompile anyway to be safe
                return targets_list

        for target in targets_list:
            try:
                if cached.inv_cache['inventory'][target][
                        'classes'] != saved_inv_cache['inventory'][target][
                            'classes']:
                    logger.debug("classes hash changed in %s, recompiling",
                                 target)
                    targets.append(target)
                elif cached.inv_cache['inventory'][target][
                        'parameters'] != saved_inv_cache['inventory'][target][
                            'parameters']:
                    logger.debug("parameters hash changed in %s, recompiling",
                                 target)
                    targets.append(target)
            except KeyError:
                # Errors usually occur when saved_inv_cache doesn't contain a new target
                # Recompile anyway to be safe
                targets.append(target)

    return targets
Exemple #18
0
def yaml_load(search_paths, name):
    """returns content of yaml file as json string"""
    for path in search_paths:
        _full_path = os.path.join(path, name)
        logger.debug("yaml_load trying file %s", _full_path)
        if os.path.exists(_full_path) and (name.endswith(".yml") or name.endswith(".yaml")):
            logger.debug("yaml_load found file at %s", _full_path)
            try:
                with open(_full_path) as f:
                    return json.dumps(yaml.safe_load(f.read()))
            except Exception as e:
                raise CompileError("Parse yaml failed to parse {}: {}".format(_full_path, e))

    raise IOError("could not find any input yaml file: {}".format(_full_path))
Exemple #19
0
def yaml_load_stream(search_paths, name):
    """returns contents of yaml file as generator"""
    for path in search_paths:
        _full_path = os.path.join(path, name)
        logger.debug("yaml_load_stream trying file %s", _full_path)
        if os.path.exists(_full_path) and (name.endswith(".yml") or name.endswith(".yaml")):
            logger.debug("yaml_load_stream found file at %s", _full_path)
            try:
                with open(_full_path) as f:
                    _obj = yaml.load_all(f.read(), Loader=yaml.SafeLoader)
                    return json.dumps(list(_obj))
            except Exception as e:
                raise CompileError("Parse yaml failed to parse {}: {}".format(_full_path, e))

    raise IOError("could not find any input yaml file: {}".format(_full_path))
Exemple #20
0
def go_jsonnet_file(file_path, **kwargs):
    """
    Evaluate file_path jsonnet file using gojsonnet.
    kwargs are documented in http://jsonnet.org/implementation/bindings.html
    """
    try:
        if "_gojsonnet" not in sys.modules:
            import _gojsonnet
        return sys.modules["_gojsonnet"].evaluate_file(file_path, **kwargs)
    except ImportError:
        logger.info(
            "Note: Go-jsonnet is not installed or running on an unsupported architecture."
            " See https://kapitan.dev/compile/#jsonnet"
        )
    except Exception as e:
        raise CompileError(f"Jsonnet error: failed to compile {file_path}:\n {e}")
Exemple #21
0
def compile_target(target_obj, search_paths, compile_path, ref_controller, inventory_path, **kwargs):
    """Compiles target_obj and writes to compile_path"""
    start = time.time()
    compile_objs = target_obj["compile"]
    ext_vars = target_obj["vars"]
    target_name = ext_vars["target"]

    jinja2_compiler = Jinja2(compile_path, search_paths, ref_controller, inventory_path)
    jsonnet_compiler = Jsonnet(compile_path, search_paths, ref_controller)
    kadet_compiler = Kadet(compile_path, search_paths, ref_controller)
    helm_compiler = Helm(compile_path, search_paths, ref_controller)
    copy_compiler = Copy(compile_path, search_paths, ref_controller)
    remove_compiler = Remove(compile_path, search_paths, ref_controller)

    for comp_obj in compile_objs:
        input_type = comp_obj["input_type"]
        output_path = comp_obj["output_path"]
        if input_type == "jinja2":
            input_compiler = jinja2_compiler
        elif input_type == "jsonnet":
            input_compiler = jsonnet_compiler
        elif input_type == "kadet":
            input_compiler = kadet_compiler
            if "input_params" in comp_obj:
                kadet_compiler.set_input_params(comp_obj["input_params"])
        elif input_type == "helm":
            if "helm_values" in comp_obj:
                helm_compiler.dump_helm_values(comp_obj["helm_values"])
            if "helm_params" in comp_obj:
                helm_compiler.set_helm_params(comp_obj["helm_params"])
            if "helm_values_files" in comp_obj:
                helm_compiler.set_helm_values_files(comp_obj["helm_values_files"])
            input_compiler = helm_compiler
        elif input_type == "copy":
            input_compiler = copy_compiler
        elif input_type == "remove":
            input_compiler = remove_compiler
        else:
            err_msg = (
                'Invalid input_type: "{}". Supported input_types: jsonnet, jinja2, kadet, helm, copy, remove'
            )
            raise CompileError(err_msg.format(input_type))

        input_compiler.make_compile_dirs(target_name, output_path)
        input_compiler.compile_obj(comp_obj, ext_vars, **kwargs)

    logger.info("Compiled %s (%.2fs)", target_obj["target_full_path"], time.time() - start)
Exemple #22
0
def render_jinja2(path,
                  context,
                  jinja2_filters=defaults.DEFAULT_JINJA2_FILTERS_PATH,
                  search_paths=None):
    """
    Render files in path with context
    Returns a dict where the is key is the filename (with subpath)
    and value is a dict with content and mode
    Empty paths will not be rendered
    Path can be a single file or directory
    Ignores hidden files (.filename)
    """
    rendered = {}
    walk_root_files = []
    if os.path.isfile(path):
        dirname = os.path.dirname(path)
        basename = os.path.basename(path)
        walk_root_files = [(dirname, None, [basename])]
    else:
        walk_root_files = os.walk(path)

    for root, _, files in walk_root_files:
        for f in files:
            if f.startswith("."):
                logger.debug("render_jinja2: ignoring file %s", f)
                continue
            render_path = os.path.join(root, f)
            logger.debug("render_jinja2 rendering %s", render_path)
            # get subpath and filename, strip any leading/trailing /
            name = render_path[len(os.path.commonprefix([root, path])):].strip(
                "/")
            try:
                rendered[name] = {
                    "content":
                    render_jinja2_file(render_path,
                                       context,
                                       jinja2_filters=jinja2_filters,
                                       search_paths=search_paths),
                    "mode":
                    file_mode(render_path),
                }
            except Exception as e:
                raise CompileError(
                    f"Jinja2 error: failed to render {render_path}: {e}")

    return rendered
Exemple #23
0
def jinja2_render_file(search_path, name, ctx):
    """
    Render jinja2 file name with context ctx.
    search_path is used to find the file name
    as there is a limitation with jsonnet's native_callback approach:
    one can't access the current directory being evaluated
    """
    ctx = json.loads(ctx)
    _full_path = os.path.join(search_path, name)
    logger.debug("jinja2_render_file trying file %s", _full_path)
    try:
        if os.path.exists(_full_path):
            logger.debug("jinja2_render_file found file at %s", _full_path)
            return render_jinja2_file(_full_path, ctx)
        else:
            raise IOError("Could not find file %s" % name)
    except Exception as e:
        logger.error("Jsonnet jinja2 failed to render %s: %s", _full_path, str(e))
        raise CompileError(e)
Exemple #24
0
    def update_root(self, file_path):
        """
        update self.root with YAML/JSON content in file_path
        raises CompileError if file_path does not end with .yaml, .yml or .json
        """
        with open(file_path) as fp:
            if file_path.endswith(".yaml") or file_path.endswith(".yml"):
                yaml_obj = yaml.safe_load(fp)
                _copy = dict(self.root)
                _copy.update(yaml_obj)
                self.root = Dict(_copy)

            elif file_path.endswith(".json"):
                json_obj = json.load(fp)
                _copy = dict(self.root)
                _copy.update(json_obj)
                self.root = Dict(_copy)
            else:
                raise CompileError("file_path is neither JSON or YAML: {}".format(file_path))
Exemple #25
0
def render_jinja2(path, context):
    """
    Render files in path with context
    Returns a dict where the is key is the filename (with subpath)
    and value is a dict with content and mode
    Empty paths will not be rendered
    Path can be a single file or directory
    Ignores hidden files (.filename)
    """
    rendered = {}
    walk_root_files = []
    if os.path.isfile(path):
        dirname = os.path.dirname(path)
        basename = os.path.basename(path)
        walk_root_files = [(dirname, None, [basename])]
    else:
        walk_root_files = os.walk(path)

    for root, _, files in walk_root_files:
        for f in files:
            if f.startswith('.'):
                logger.debug('render_jinja2: ignoring file %s', f)
                continue
            render_path = os.path.join(root, f)
            logger.debug("render_jinja2 rendering %s", render_path)
            # get subpath and filename, strip any leading/trailing /
            name = render_path[len(os.path.commonprefix([root, path])):].strip(
                '/')
            try:
                rendered[name] = {
                    "content": render_jinja2_file(render_path, context),
                    "mode": file_mode(render_path)
                }
            except Exception as e:
                raise CompileError(
                    "Jinja2 error: failed to render {}: {}".format(
                        render_path, e))

    return rendered
Exemple #26
0
def render_jinja2_file(name,
                       context,
                       jinja2_filters=defaults.DEFAULT_JINJA2_FILTERS_PATH,
                       search_paths=None):
    """Render jinja2 file name with context"""
    path, filename = os.path.split(name)
    search_paths = [path or "./"] + (search_paths or [])
    env = jinja2.Environment(
        undefined=jinja2.StrictUndefined,
        loader=jinja2.FileSystemLoader(search_paths),
        trim_blocks=True,
        lstrip_blocks=True,
        extensions=["jinja2.ext.do"],
    )
    load_jinja2_filters(env)
    load_jinja2_filters_from_file(env, jinja2_filters)
    try:
        return env.get_template(filename).render(context)
    except jinja2.TemplateError as e:
        # Exception misses the line number info. Retreive it from traceback
        err_info = _jinja_error_info(traceback.extract_tb(sys.exc_info()[2]))
        raise CompileError(
            f"Jinja2 TemplateError: {e}, at {err_info[0]}:{err_info[1]}")
Exemple #27
0
def compile_target(target_obj, search_paths, compile_path, **kwargs):
    """Compiles target_obj and writes to compile_path"""
    start = time.time()

    ext_vars = target_obj["vars"]
    target_name = ext_vars["target"]
    compile_obj = target_obj["compile"]

    for obj in compile_obj:
        input_type = obj["input_type"]
        input_paths = obj["input_paths"]
        output_path = obj["output_path"]

        if input_type == "jsonnet":
            _compile_path = os.path.join(compile_path, target_name,
                                         output_path)
            # support writing to an already existent dir
            try:
                os.makedirs(_compile_path)
            except OSError as ex:
                # If directory exists, pass
                if ex.errno == errno.EEXIST:
                    pass

            output_type = obj[
                "output_type"]  # output_type is mandatory in jsonnet
            for input_path in input_paths:
                jsonnet_file_found = False
                for path in search_paths:
                    compile_file_sp = os.path.join(path, input_path)
                    if os.path.exists(compile_file_sp):
                        jsonnet_file_found = True
                        logger.debug("Compiling %s", compile_file_sp)
                        try:
                            compile_jsonnet(compile_file_sp,
                                            _compile_path,
                                            search_paths,
                                            ext_vars,
                                            output=output_type,
                                            target_name=target_name,
                                            **kwargs)
                        except CompileError as e:
                            logger.error(
                                "Compile error: failed to compile target: %s",
                                target_name)
                            raise e

                if not jsonnet_file_found:
                    logger.error(
                        "Compile error: %s for target: %s not found in " +
                        "search_paths: %s", input_path, target_name,
                        search_paths)
                    raise CompileError()

        if input_type == "jinja2":
            _compile_path = os.path.join(compile_path, target_name,
                                         output_path)
            # support writing to an already existent dir
            try:
                os.makedirs(_compile_path)
            except OSError as ex:
                # If directory exists, pass
                if ex.errno == errno.EEXIST:
                    pass
            for input_path in input_paths:
                jinja2_file_found = False
                for path in search_paths:
                    compile_path_sp = os.path.join(path, input_path)
                    if os.path.exists(compile_path_sp):
                        jinja2_file_found = True
                        # copy ext_vars to dedicated jinja2 context so we can update it
                        ctx = ext_vars.copy()
                        ctx["inventory"] = inventory(search_paths, target_name)
                        ctx["inventory_global"] = inventory(search_paths, None)
                        try:
                            compile_jinja2(compile_path_sp,
                                           ctx,
                                           _compile_path,
                                           target_name=target_name,
                                           **kwargs)
                        except CompileError as e:
                            logger.error(
                                "Compile error: failed to compile target: %s",
                                target_name)
                            raise e

                if not jinja2_file_found:
                    logger.error(
                        "Compile error: %s for target: %s not found in " +
                        "search_paths: %s", input_path, target_name,
                        search_paths)
                    raise CompileError()

    logger.info("Compiled %s (%.2fs)", target_name, time.time() - start)
Exemple #28
0
def compile_targets(inventory_path, search_paths, output_path, parallel,
                    targets, labels, ref_controller, **kwargs):
    """
    Searches and loads target files, and runs compile_target() on a
    multiprocessing pool with parallel number of processes.
    kwargs are passed to compile_target()
    """
    # temp_path will hold compiled items
    temp_path = tempfile.mkdtemp(suffix=".kapitan")

    updated_targets = targets
    try:
        updated_targets = search_targets(inventory_path, targets, labels)
    except CompileError as e:
        logger.error(e)
        sys.exit(1)

    # If --cache is set
    if kwargs.get("cache"):
        additional_cache_paths = kwargs.get("cache_paths")
        generate_inv_cache_hashes(inventory_path, targets,
                                  additional_cache_paths)

        if not targets:
            updated_targets = changed_targets(inventory_path, output_path)
            logger.debug("Changed targets since last compilation: %s",
                         updated_targets)
            if len(updated_targets) == 0:
                logger.info("No changes since last compilation.")
                return

    pool = multiprocessing.Pool(parallel)

    try:
        target_objs = load_target_inventory(inventory_path, updated_targets)

        # append "compiled" to output_path so we can safely overwrite it
        compile_path = os.path.join(output_path, "compiled")
        worker = partial(
            compile_target,
            search_paths=search_paths,
            compile_path=temp_path,
            ref_controller=ref_controller,
            **kwargs,
        )

        if not target_objs:
            raise CompileError("Error: no targets found")

        if kwargs.get("fetch_dependencies", False):
            fetch_dependencies(target_objs, pool)

        # compile_target() returns None on success
        # so p is only not None when raising an exception
        [p.get() for p in pool.imap_unordered(worker, target_objs) if p]

        os.makedirs(compile_path, exist_ok=True)

        # if '-t' is set on compile or only a few changed, only override selected targets
        if updated_targets:
            for target in updated_targets:
                compile_path_target = os.path.join(compile_path, target)
                temp_path_target = os.path.join(temp_path, target)

                os.makedirs(compile_path_target, exist_ok=True)

                shutil.rmtree(compile_path_target)
                shutil.copytree(temp_path_target, compile_path_target)
                logger.debug("Copied %s into %s", temp_path_target,
                             compile_path_target)
        # otherwise override all targets
        else:
            shutil.rmtree(compile_path)
            shutil.copytree(temp_path, compile_path)
            logger.debug("Copied %s into %s", temp_path, compile_path)

        # validate the compiled outputs
        if kwargs.get("validate", False):
            validate_map = create_validate_mapping(target_objs, compile_path)
            worker = partial(schema_validate_kubernetes_output,
                             cache_dir=kwargs.get("schemas_path", "./schemas"))
            [
                p.get()
                for p in pool.imap_unordered(worker, validate_map.items()) if p
            ]

        # Save inventory and folders cache
        save_inv_cache(compile_path, targets)
        pool.close()

    except ReclassException as e:
        if isinstance(e, NotFoundError):
            logger.error("Inventory reclass error: inventory not found")
        else:
            logger.error("Inventory reclass error: %s", e.message)
        raise InventoryError(e.message)
    except Exception as e:
        # if compile worker fails, terminate immediately
        pool.terminate()
        logger.debug("Compile pool terminated")
        # only print traceback for errors we don't know about
        if not isinstance(e, KapitanError):
            logger.exception("Unknown (Non-Kapitan) Error occurred")

        logger.error("\n")
        logger.error(e)
        sys.exit(1)
    finally:
        # always wait for other worker processes to terminate
        pool.join()
        shutil.rmtree(temp_path)
        logger.debug("Removed %s", temp_path)
Exemple #29
0
def compile_targets(inventory_path, search_paths, output_path, parallel, targets, ref_controller, **kwargs):
    """
    Searches and loads target files, and runs compile_target_file() on a
    multiprocessing pool with parallel number of processes.
    kwargs are passed to compile_target()
    """
    # temp_path will hold compiled items
    temp_path = tempfile.mkdtemp(suffix='.kapitan')

    updated_targets = targets
    # If --cache is set
    if kwargs.get('cache'):
        additional_cache_paths = kwargs.get('cache_paths')
        generate_inv_cache_hashes(inventory_path, targets, additional_cache_paths)

        if not targets:
            updated_targets = changed_targets(inventory_path, output_path)
            logger.debug("Changed targets since last compilation: %s", updated_targets)
            if len(updated_targets) == 0:
                logger.info("No changes since last compilation.")
                return

    target_objs = load_target_inventory(inventory_path, updated_targets)

    pool = multiprocessing.Pool(parallel)
    # append "compiled" to output_path so we can safely overwrite it
    compile_path = os.path.join(output_path, "compiled")
    worker = partial(compile_target, search_paths=search_paths, compile_path=temp_path, ref_controller=ref_controller,
                     **kwargs)

    try:
        if target_objs == []:
            raise CompileError("Error: no targets found")
        # compile_target() returns None on success
        # so p is only not None when raising an exception
        [p.get() for p in pool.imap_unordered(worker, target_objs) if p]

        if not os.path.exists(compile_path):
            os.makedirs(compile_path)

        # if '-t' is set on compile or only a few changed, only override selected targets
        if updated_targets:
            for target in updated_targets:
                compile_path_target = os.path.join(compile_path, target)
                temp_path_target = os.path.join(temp_path, target)

                if not os.path.exists(compile_path_target):
                    os.makedirs(compile_path_target)

                shutil.rmtree(compile_path_target)
                shutil.copytree(temp_path_target, compile_path_target)
                logger.debug("Copied %s into %s", temp_path_target, compile_path_target)
        # otherwise override all targets
        else:
            shutil.rmtree(compile_path)
            shutil.copytree(temp_path, compile_path)
            logger.debug("Copied %s into %s", temp_path, compile_path)

        # Save inventory and folders cache
        save_inv_cache(compile_path, targets)

    except Exception as e:
        # if compile worker fails, terminate immediately
        pool.terminate()
        pool.join()
        logger.debug("Compile pool terminated")
        # only print traceback for errors we don't know about
        if not isinstance(e, KapitanError):
            logger.error("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
            traceback.print_exc()

        logger.error("\n")
        logger.error(e)
        sys.exit(1)
    finally:
        shutil.rmtree(temp_path)
        logger.debug("Removed %s", temp_path)