예제 #1
0
def secret_update(args, ref_controller):
    "Update secret gpg recipients/gkms/awskms key"
    # TODO --update *might* mean something else for other types
    token_name = args.update
    if token_name.startswith("gpg:"):
        # args.recipients is a list, convert to recipients dict
        recipients = [dict([("name", name), ]) for name in args.recipients]
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError("parameters.kapitan.secrets not defined in {}".format(args.target_name))

            recipients = kap_inv_params['secrets']['gpg']['recipients']
        if not recipients:
            raise KapitanError("No GPG recipients specified. Use --recipients or specify them in " +
                               "parameters.kapitan.secrets.gpg.recipients and use --target")
        type_name, token_path = token_name.split(":")
        tag = '?{{gpg:{}}}'.format(token_path)
        secret_obj = ref_controller[tag]
        secret_obj.update_recipients(recipients)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("gkms:"):
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError("parameters.kapitan.secrets not defined in {}".format(args.target_name))

            key = kap_inv_params['secrets']['gkms']['key']
        if not key:
            raise KapitanError("No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.gkms.key and use --target")
        type_name, token_path = token_name.split(":")
        tag = '?{{gkms:{}}}'.format(token_path)
        secret_obj = ref_controller[tag]
        secret_obj.update_key(key)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("awskms:"):
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError("parameters.kapitan.secrets not defined in {}".format(args.target_name))

            key = kap_inv_params['secrets']['awskms']['key']
        if not key:
            raise KapitanError("No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.awskms.key and use --target")
        type_name, token_path = token_name.split(":")
        tag = '?{{awskms:{}}}'.format(token_path)
        secret_obj = ref_controller[tag]
        secret_obj.update_key(key)
        ref_controller[tag] = secret_obj

    else:
        fatal_error("Invalid token: {name}. Try using gpg/gkms/awskms:{name}".format(name=token_name))
예제 #2
0
파일: cli.py 프로젝트: zhenyulin/kapitan
def secret_write(args, ref_controller):
    "Write secret to ref_controller based on cli args"
    token_name = args.write
    file_name = args.file
    data = None

    if file_name is None:
        fatal_error('--file is required with --write')
    if file_name == '-':
        data = ''
        for line in sys.stdin:
            data += line
    else:
        with open(file_name) as fp:
            data = fp.read()

    if token_name.startswith("gpg:"):
        type_name, token_path = token_name.split(":")
        recipients = [dict((("name", name), )) for name in args.recipients]
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            try:
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['gpg']['recipients']
            except KeyError:
                # TODO: Keeping gpg recipients backwards-compatible until we make a breaking release
                logger.warning(
                    "WARNING: parameters.kapitan.secrets.recipients is deprecated, "
                    +
                    "please move them to parameters.kapitan.secrets.gpg.recipients"
                )
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
        secret_obj = GPGSecret(data, recipients, encode_base64=args.base64)
        tag = '?{{gpg:{}}}'.format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("gkms:"):
        type_name, token_path = token_name.split(":")
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            key = inv['nodes'][args.target_name]['parameters']['kapitan'][
                'secrets']['gkms']['key']
        if not key:
            raise KapitanError(
                "No KMS key specified. Use --key or specify in parameters.kapitan.secrets.gkms.key and use --target"
            )
        secret_obj = GoogleKMSSecret(data, key, encode_base64=args.base64)
        tag = '?{{gkms:{}}}'.format(token_path)
        ref_controller[tag] = secret_obj
    else:
        fatal_error("Invalid token: {}".format(token_name))
예제 #3
0
파일: cli.py 프로젝트: zhenyulin/kapitan
def secret_update(args, ref_controller):
    "Update secret recipients"
    # TODO --update *might* mean something else for other types
    token_name = args.update
    if token_name.startswith("gpg:"):
        # args.recipients is a list, convert to recipients dict
        recipients = [dict([
            ("name", name),
        ]) for name in args.recipients]
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
        try:
            recipients = inv['nodes'][args.target_name]['parameters'][
                'kapitan']['secrets']['gpg']['recipients']
        except KeyError:
            # TODO: Keeping gpg recipients backwards-compatible until we make a breaking release
            logger.warning(
                "WARNING: parameters.kapitan.secrets.recipients is deprecated, "
                +
                "please move them to parameters.kapitan.secrets.gpg.recipients"
            )
            recipients = inv['nodes'][args.target_name]['parameters'][
                'kapitan']['secrets']['recipients']
        type_name, token_path = token_name.split(":")
        tag = '?{{gpg:{}}}'.format(token_path)
        secret_obj = ref_controller[tag]
        secret_obj.update_recipients(recipients)
        ref_controller[tag] = secret_obj
    # TODO: Implement --update for KMS
    else:
        fatal_error("Invalid token: {}".format(token_name))
예제 #4
0
def search_targets(inventory_path, targets, labels):
    """returns a list of targets where the labels match, otherwise just return the original targets"""
    if not labels:
        return targets

    try:
        labels_dict = dict(label.split("=") for label in labels)
    except ValueError:
        raise CompileError(
            "Compile error: Failed to parse labels, should be formatted like: kapitan compile -l env=prod app=example"
        )

    targets_found = []
    inv = inventory_reclass(inventory_path)

    for target_name in inv["nodes"]:
        matched_all_labels = False
        for label, value in labels_dict.items():
            try:
                if inv["nodes"][target_name]["parameters"]["kapitan"]["labels"][label] == value:
                    matched_all_labels = True
                    continue
            except KeyError:
                logger.debug("search_targets: label %s=%s didn't match target %s", label, value, target_name)

            matched_all_labels = False
            break

        if matched_all_labels:
            targets_found.append(target_name)

    if len(targets_found) == 0:
        raise CompileError("No targets found with labels: {}".format(labels))

    return targets_found
예제 #5
0
def load_target_inventory(inventory_path,
                          targets,
                          ignore_class_notfound=False):
    """returns a list of target objects from the inventory"""
    target_objs = []
    inv = inventory_reclass(inventory_path, ignore_class_notfound)

    # if '-t' is set on compile, only loop through selected targets
    if targets:
        targets_list = targets
    else:
        targets_list = inv["nodes"]

    for target_name in targets_list:
        try:
            inv_target = inv["nodes"][target_name]
            target_obj = inv_target["parameters"]["kapitan"]
            target_obj["target_full_path"] = inv_target["__reclass__"][
                "node"].replace("./", "")
            require_compile = not ignore_class_notfound
            valid_target_obj(target_obj, require_compile)
            validate_matching_target_name(target_name, target_obj,
                                          inventory_path)
            logger.debug(
                "load_target_inventory: found valid kapitan target %s",
                target_name)
            target_objs.append(target_obj)
        except KeyError:
            logger.debug(
                "load_target_inventory: target %s has no kapitan compile obj",
                target_name)
            pass

    return target_objs
예제 #6
0
def load_target_inventory(inventory_path, targets):
    """retuns a list of target objects from the inventory"""
    target_objs = []
    inv = inventory_reclass(inventory_path)

    # if '-t' is set on compile, only loop through selected targets
    if targets:
        targets_list = targets
    else:
        targets_list = inv['nodes']

    for target_name in targets_list:
        try:
            target_obj = inv['nodes'][target_name]['parameters']['kapitan']
            valid_target_obj(target_obj)
            logger.debug(
                "load_target_inventory: found valid kapitan target %s",
                target_name)
            target_objs.append(target_obj)
        except KeyError:
            logger.debug(
                "load_target_inventory: target %s has no kapitan compile obj",
                target_name)
            pass

    return target_objs
예제 #7
0
def generate_inv_cache_hashes(inventory_path, targets, cache_paths):
    """
    generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes
    struct: {
        inventory:
            <target>:
                classes: <sha256>
                parameters: <sha256>
        folder:
            components: <sha256>
            docs: <sha256>
            lib: <sha256>
            scripts: <sha256>
            ...
    }
    """
    inv = inventory_reclass(inventory_path)
    cached.inv_cache = {}
    cached.inv_cache['inventory'] = {}
    cached.inv_cache['folder'] = {}

    if targets:
        for target in targets:
            try:
                cached.inv_cache['inventory'][target] = {}
                cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
                cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])
            except KeyError as e:
                logger.error("'%s' target not found", target)
                raise
    else:
        for target in inv['nodes']:
            cached.inv_cache['inventory'][target] = {}
            cached.inv_cache['inventory'][target]['classes'] = dictionary_hash(inv['nodes'][target]['classes'])
            cached.inv_cache['inventory'][target]['parameters'] = dictionary_hash(inv['nodes'][target]['parameters'])

            compile_obj = inv['nodes'][target]['parameters']['kapitan']['compile']
            for obj in compile_obj:
                for input_path in obj['input_paths']:
                    base_folder = os.path.dirname(input_path).split('/')[0]
                    if base_folder == '':
                        base_folder = os.path.basename(input_path).split('/')[0]

                    if base_folder not in cached.inv_cache['folder'].keys():
                        if os.path.exists(base_folder) and os.path.isdir(base_folder):
                            cached.inv_cache['folder'][base_folder] = directory_hash(base_folder)

                # Cache additional folders set by --cache-paths
                for path in cache_paths:
                    if path not in cached.inv_cache['folder'].keys():
                        if os.path.exists(path) and os.path.isdir(path):
                            cached.inv_cache['folder'][path] = directory_hash(path)

        # Most commonly changed but not referenced in input_paths
        for common in ('lib', 'vendor', 'secrets'):
            if common not in cached.inv_cache['folder'].keys():
                if os.path.exists(common) and os.path.isdir(common):
                    cached.inv_cache['folder'][common] = directory_hash(common)
예제 #8
0
def compile(config, cluster_id):
    if config.local:
        catalog_repo = _local_setup(config, cluster_id, TARGET)
    else:
        clean_working_tree(config)
        catalog_repo = _regular_setup(config, cluster_id, TARGET)

    # Compile kapitan inventory to extract component versions. Component
    # versions are assumed to be defined in the inventory key
    # 'parameters.component_versions'
    reset_reclass_cache()
    kapitan_inventory = inventory_reclass("inventory")["nodes"][TARGET]
    versions = kapitan_inventory["parameters"].get("component_versions", None)
    if versions and not config.local:
        set_component_overrides(config, versions)
        update_target(config, TARGET)
    # Rebuild reclass inventory to use new version of components
    reset_reclass_cache()
    kapitan_inventory = inventory_reclass("inventory")["nodes"][TARGET]
    jsonnet_libs = (kapitan_inventory["parameters"].get("commodore", {}).get(
        "jsonnet_libs", None))
    if jsonnet_libs and not config.local:
        fetch_jsonnet_libs(config, jsonnet_libs)

    if not config.local:
        write_jsonnetfile(config)
        fetch_jsonnet_libraries()

    clean_catalog(catalog_repo)

    # Generate Kapitan secret references from refs found in inventory
    # parameters
    update_refs(config, kapitan_inventory["parameters"])

    kapitan_compile(config, search_paths=["./vendor/"])

    postprocess_components(config, kapitan_inventory, TARGET,
                           config.get_components())

    update_catalog(config, TARGET, catalog_repo)

    click.secho("Catalog compiled! 🎉", bold=True)
예제 #9
0
def changed_targets(inventory_path, output_path):
    """returns a list of targets that have changed since last compilation"""
    targets = []
    inv = inventory_reclass(inventory_path)

    saved_inv_cache = None
    saved_inv_cache_path = os.path.join(output_path, "compiled/.kapitan_cache")
    if os.path.exists(saved_inv_cache_path):
        with open(saved_inv_cache_path, "r") as f:
            try:
                saved_inv_cache = yaml.safe_load(f)
            except Exception as e:
                logger.error("Failed to load kapitan cache: %s",
                             saved_inv_cache_path)

    targets_list = list(inv['nodes'])

    # If .kapitan_cache doesn't exist or failed to load, recompile all targets
    if not saved_inv_cache:
        return targets_list
    else:
        for key, hash in cached.inv_cache['folder'].items():
            try:
                if hash != saved_inv_cache['folder'][key]:
                    logger.debug(
                        "%s folder hash changed, recompiling all targets", key)
                    return targets_list
            except KeyError as e:
                # Errors usually occur when saved_inv_cache doesn't contain a new folder
                # Recompile anyway to be safe
                return targets_list

        for target in targets_list:
            try:
                if cached.inv_cache['inventory'][target][
                        'classes'] != saved_inv_cache['inventory'][target][
                            'classes']:
                    logger.debug("classes hash changed in %s, recompiling",
                                 target)
                    targets.append(target)
                elif cached.inv_cache['inventory'][target][
                        'parameters'] != saved_inv_cache['inventory'][target][
                            'parameters']:
                    logger.debug("parameters hash changed in %s, recompiling",
                                 target)
                    targets.append(target)
            except KeyError as e:
                # Errors usually occur when saved_inv_cache doesn't contain a new target
                # Recompile anyway to be safe
                targets.append(target)

    return targets
예제 #10
0
def compile(config, cluster_id):
    if config.local:
        cluster, target_name, catalog_repo = _local_setup(config, cluster_id)
    else:
        clean_working_tree(config)
        cluster, target_name, catalog_repo = _regular_setup(config, cluster_id)

    # Compile kapitan inventory to extract component versions. Component
    # versions are assumed to be defined in the inventory key
    # 'parameters.component_versions'
    kapitan_inventory = inventory_reclass('inventory')['nodes'][target_name]
    versions = kapitan_inventory['parameters'].get('component_versions', None)
    if versions and not config.local:
        set_component_overrides(config, versions)
        update_target(config, cluster)
    # Rebuild reclass inventory to use new version of components
    reset_reclass_cache()
    kapitan_inventory = inventory_reclass('inventory')['nodes'][target_name]
    jsonnet_libs = kapitan_inventory['parameters'].get('commodore', {}).get(
        'jsonnet_libs', None)
    if jsonnet_libs and not config.local:
        fetch_jsonnet_libs(config, jsonnet_libs)

    clean_catalog(catalog_repo)

    # Generate Kapitan secret references from refs found in inventory
    # parameters
    update_refs(config, kapitan_inventory['parameters'])

    kapitan_compile(config)

    postprocess_components(config, kapitan_inventory, target_name,
                           config.get_components())

    update_catalog(config, target_name, catalog_repo)

    click.secho('Catalog compiled! 🎉', bold=True)
예제 #11
0
    def test_compile_vars_target_missing(self):
        inventory_path = "inventory"
        target_filename = "minikube-es"
        target_obj = inventory_reclass(inventory_path)["nodes"][target_filename]["parameters"]["kapitan"]
        # delete vars.target
        del target_obj["vars"]["target"]

        with self.assertRaises(InventoryError) as ie:
            validate_matching_target_name(target_filename, target_obj, inventory_path)

        error_message = (
            'Target missing: target "{}" is missing parameters.kapitan.vars.target\n'
            "This parameter should be set to the target name"
        )
        self.assertTrue(error_message.format(target_filename), ie.exception.args[0])
예제 #12
0
파일: cli.py 프로젝트: zhenyulin/kapitan
def secret_update_validate(args, ref_controller):
    "Validate and/or update target secrets"
    # update recipients for all secrets in secrets_path
    # use --secrets-path to set scanning path
    inv = inventory_reclass(args.inventory_path)
    targets = set(inv['nodes'].keys())
    secrets_path = os.path.abspath(args.secrets_path)
    target_token_paths = search_target_token_paths(secrets_path, targets)
    ret_code = 0
    ref_controller.register_backend(
        GPGBackend(secrets_path))  # override gpg backend for new secrets_path
    # TODO: Implement --update-targets and --validate-targets for KMS
    for target_name, token_paths in target_token_paths.items():
        try:
            try:
                recipients = inv['nodes'][target_name]['parameters'][
                    'kapitan']['secrets']['gpg']['recipients']
            except KeyError:
                # TODO: Keeping gpg recipients backwards-compatible until we make a breaking release
                logger.warning(
                    "WARNING: parameters.kapitan.secrets.recipients is deprecated, "
                    +
                    "please move them to parameters.kapitan.secrets.gpg.recipients"
                )
                recipients = inv['nodes'][target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            for token_path in token_paths:
                secret_obj = ref_controller.backends['gpg'][token_path]
                target_fingerprints = set(lookup_fingerprints(recipients))
                secret_fingerprints = set(
                    lookup_fingerprints(secret_obj.recipients))
                if target_fingerprints != secret_fingerprints:
                    if args.validate_targets:
                        logger.info("%s recipient mismatch", token_path)
                        ret_code = 1
                    else:
                        new_recipients = [
                            dict([
                                ("fingerprint", f),
                            ]) for f in target_fingerprints
                        ]
                        secret_obj.update_recipients(new_recipients)
                        ref_controller.backends['gpg'][token_path] = secret_obj
        except KeyError:
            logger.debug(
                "secret_gpg_update_target: target: %s has no inventory recipients, skipping",
                target_name)
    sys.exit(ret_code)
예제 #13
0
def _discover_components(cfg, inventory_path):
    """
    Discover components in `inventory_path/`. Parse all classes found in
    inventory_path and look for class includes starting with `components.`.
    """
    reset_reclass_cache()
    kapitan_inventory = inventory_reclass(inventory_path, ignore_class_notfound=True)[
        "nodes"
    ]["cluster"]
    components = set()
    for kls in kapitan_inventory["classes"]:
        if kls.startswith("components."):
            component = kls.split(".")[1]
            if cfg.debug:
                click.echo(f"   > Found component {component}")
            components.add(component)
    return sorted(components)
예제 #14
0
def compile(config, cluster_id):
    if config.local:
        cluster, target_name, catalog_repo = _local_setup(config, cluster_id)
    else:
        clean(config)
        cluster, target_name, catalog_repo = _regular_setup(config, cluster_id)

    # Compile kapitan inventory to extract component versions. Component
    # versions are assumed to be defined in the inventory key
    # 'parameters.component_versions'
    kapitan_inventory = inventory_reclass('inventory')['nodes'][target_name]
    versions = kapitan_inventory['parameters'].get('component_versions', None)
    if versions and not config.local:
        set_component_versions(config, versions)
        update_target(config, cluster)

    jsonnet_libs = kapitan_inventory['parameters'].get('commodore', {}).get(
        'jsonnet_libs', None)
    if jsonnet_libs and not config.local:
        fetch_jsonnet_libs(jsonnet_libs)

    clean_catalog(catalog_repo)

    # Generate Kapitan secret references from refs found in inventory
    # parameters
    update_refs(config, kapitan_inventory['parameters'])

    p = kapitan_compile()
    if p.returncode != 0:
        raise click.ClickException('Kapitan catalog compilation failed.')

    postprocess_components(kapitan_inventory, target_name,
                           config.get_components())

    update_catalog(config, target_name, catalog_repo)

    click.secho('Catalog compiled! 🎉', bold=True)
예제 #15
0
파일: cli.py 프로젝트: jannaspam/kapitan
def main():
    """main function for command line usage"""
    parser = argparse.ArgumentParser(prog=PROJECT_NAME,
                                     description=DESCRIPTION)
    parser.add_argument('--version', action='version', version=VERSION)
    subparser = parser.add_subparsers(help="commands")

    eval_parser = subparser.add_parser('eval', help='evaluate jsonnet file')
    eval_parser.add_argument('jsonnet_file', type=str)
    eval_parser.add_argument('--output',
                             type=str,
                             choices=('yaml', 'json'),
                             default=from_dot_kapitan('eval', 'output',
                                                      'yaml'),
                             help='set output format, default is "yaml"')
    eval_parser.add_argument('--vars',
                             type=str,
                             default=from_dot_kapitan('eval', 'vars', []),
                             nargs='*',
                             metavar='VAR',
                             help='set variables')
    eval_parser.add_argument('--search-paths',
                             '-J',
                             type=str,
                             nargs='+',
                             default=from_dot_kapitan('eval', 'search-paths',
                                                      ['.']),
                             metavar='JPATH',
                             help='set search paths, default is ["."]')

    compile_parser = subparser.add_parser('compile', help='compile targets')
    compile_parser.add_argument('--search-paths',
                                '-J',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'compile', 'search-paths', ['.', 'lib']),
                                metavar='JPATH',
                                help='set search paths, default is ["."]')
    compile_parser.add_argument(
        '--jinja2-filters',
        '-J2F',
        type=str,
        default=from_dot_kapitan('compile', 'jinja2-filters',
                                 default_jinja2_filters_path),
        metavar='FPATH',
        help='load custom jinja2 filters from any file, default is to put\
                                them inside lib/jinja2_filters.py')
    compile_parser.add_argument('--verbose',
                                '-v',
                                help='set verbose mode',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'verbose', False))
    compile_parser.add_argument('--prune',
                                help='prune jsonnet output',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'prune', False))
    compile_parser.add_argument('--quiet',
                                help='set quiet mode, only critical output',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'quiet', False))
    compile_parser.add_argument('--output-path',
                                type=str,
                                default=from_dot_kapitan(
                                    'compile', 'output-path', '.'),
                                metavar='PATH',
                                help='set output path, default is "."')
    compile_parser.add_argument('--fetch',
                                help='fetches external dependencies',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'fetch', False))
    compile_parser.add_argument(
        '--validate',
        help=
        'validate compile output against schemas as specified in inventory',
        action='store_true',
        default=from_dot_kapitan('compile', 'validate', False))
    compile_parser.add_argument('--targets',
                                '-t',
                                help='targets to compile, default is all',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'compile', 'targets', []),
                                metavar='TARGET')
    compile_parser.add_argument(
        '--parallelism',
        '-p',
        type=int,
        default=from_dot_kapitan('compile', 'parallelism', 4),
        metavar='INT',
        help='Number of concurrent compile processes, default is 4')
    compile_parser.add_argument(
        '--indent',
        '-i',
        type=int,
        default=from_dot_kapitan('compile', 'indent', 2),
        metavar='INT',
        help='Indentation spaces for YAML/JSON, default is 2')
    compile_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default=from_dot_kapitan('compile', 'secrets-path', './secrets'))
    compile_parser.add_argument(
        '--reveal',
        help='reveal secrets (warning: this will write sensitive data)',
        action='store_true',
        default=from_dot_kapitan('compile', 'reveal', False))
    compile_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('compile', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    compile_parser.add_argument(
        '--cache',
        '-c',
        help='enable compilation caching to .kapitan_cache, default is False',
        action='store_true',
        default=from_dot_kapitan('compile', 'cache', False))
    compile_parser.add_argument(
        '--cache-paths',
        type=str,
        nargs='+',
        default=from_dot_kapitan('compile', 'cache-paths', []),
        metavar='PATH',
        help='cache additional paths to .kapitan_cache, default is []')
    compile_parser.add_argument('--ignore-version-check',
                                help='ignore the version from .kapitan',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'ignore-version-check', False))
    compile_parser.add_argument(
        '--schemas-path',
        default=from_dot_kapitan('validate', 'schemas-path', './schemas'),
        help='set schema cache path, default is "./schemas"')

    inventory_parser = subparser.add_parser('inventory', help='show inventory')
    inventory_parser.add_argument(
        '--target-name',
        '-t',
        default=from_dot_kapitan('inventory', 'target-name', ''),
        help='set target name, default is all targets')
    inventory_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('inventory', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    inventory_parser.add_argument('--flat',
                                  '-F',
                                  help='flatten nested inventory variables',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'inventory', 'flat', False))
    inventory_parser.add_argument(
        '--pattern',
        '-p',
        default=from_dot_kapitan('inventory', 'pattern', ''),
        help=
        'filter pattern (e.g. parameters.mysql.storage_class, or storage_class,'
        + ' or storage_*), default is ""')
    inventory_parser.add_argument('--verbose',
                                  '-v',
                                  help='set verbose mode',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'inventory', 'verbose', False))

    searchvar_parser = subparser.add_parser(
        'searchvar', help='show all inventory files where var is declared')
    searchvar_parser.add_argument(
        'searchvar',
        type=str,
        metavar='VARNAME',
        help=
        'e.g. parameters.mysql.storage_class, or storage_class, or storage_*')
    searchvar_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('searchvar', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    searchvar_parser.add_argument('--verbose',
                                  '-v',
                                  help='set verbose mode',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'searchvar', 'verbose', False))
    searchvar_parser.add_argument('--pretty-print',
                                  '-p',
                                  help='Pretty print content of var',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'searchvar', 'pretty-print', False))

    secrets_parser = subparser.add_parser('secrets', help='manage secrets')
    secrets_parser.add_argument(
        '--write',
        '-w',
        help='write secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument(
        '--update',
        help='update recipients for secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument('--update-targets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'update-targets', False),
                                help='update target secrets')
    secrets_parser.add_argument('--validate-targets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'validate-targets', False),
                                help='validate target secrets')
    secrets_parser.add_argument('--base64',
                                '-b64',
                                help='base64 encode file content',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'base64', False))
    secrets_parser.add_argument('--reveal',
                                '-r',
                                help='reveal secrets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'reveal', False))
    secrets_parser.add_argument(
        '--file',
        '-f',
        help='read file or directory, set "-" for stdin',
        metavar='FILENAME')
    secrets_parser.add_argument('--target-name',
                                '-t',
                                help='grab recipients from target name')
    secrets_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('secrets', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    secrets_parser.add_argument('--recipients',
                                '-R',
                                help='set GPG recipients',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'secrets', 'recipients', []),
                                metavar='RECIPIENT')
    secrets_parser.add_argument('--key',
                                '-K',
                                help='set KMS key',
                                default=from_dot_kapitan('secrets', 'key', ''),
                                metavar='KEY')
    secrets_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default=from_dot_kapitan('secrets', 'secrets-path', './secrets'))
    secrets_parser.add_argument(
        '--verbose',
        '-v',
        help='set verbose mode (warning: this will show sensitive data)',
        action='store_true',
        default=from_dot_kapitan('secrets', 'verbose', False))

    lint_parser = subparser.add_parser('lint',
                                       help='linter for inventory and secrets')
    lint_parser.add_argument(
        '--fail-on-warning',
        default=from_dot_kapitan('lint', 'fail-on-warning', False),
        action='store_true',
        help='exit with failure code if warnings exist, default is False')
    lint_parser.add_argument(
        '--skip-class-checks',
        action='store_true',
        help='skip checking for unused classes, default is False',
        default=from_dot_kapitan('lint', 'skip-class-checks', False))
    lint_parser.add_argument(
        '--skip-yamllint',
        action='store_true',
        help='skip running yamllint on inventory, default is False',
        default=from_dot_kapitan('lint', 'skip-yamllint', False))
    lint_parser.add_argument(
        '--search-secrets',
        default=from_dot_kapitan('lint', 'search-secrets', False),
        action='store_true',
        help='searches for plaintext secrets in inventory, default is False')
    lint_parser.add_argument('--secrets-path',
                             help='set secrets path, default is "./secrets"',
                             default=from_dot_kapitan('lint', 'secrets-path',
                                                      './secrets'))
    lint_parser.add_argument('--compiled-path',
                             default=from_dot_kapitan('lint', 'compiled-path',
                                                      './compiled'),
                             help='set compiled path, default is "./compiled"')
    lint_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('lint', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')

    init_parser = subparser.add_parser(
        'init',
        help=
        'initialize a directory with the recommended kapitan project skeleton.'
    )
    init_parser.add_argument(
        '--directory',
        default=from_dot_kapitan('init', 'directory', '.'),
        help=
        'set path, in which to generate the project skeleton, assumes directory already exists. default is "./"'
    )

    validate_parser = subparser.add_parser(
        'validate',
        help=
        'validates the compile output against schemas as specified in inventory'
    )
    validate_parser.add_argument(
        '--compiled-path',
        default=from_dot_kapitan('compile', 'compiled-path', './compiled'),
        help='set compiled path, default is "./compiled')
    validate_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('compile', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    validate_parser.add_argument('--targets',
                                 '-t',
                                 help='targets to validate, default is all',
                                 type=str,
                                 nargs='+',
                                 default=from_dot_kapitan(
                                     'compile', 'targets', []),
                                 metavar='TARGET'),
    validate_parser.add_argument(
        '--schemas-path',
        default=from_dot_kapitan('validate', 'schemas-path', './schemas'),
        help='set schema cache path, default is "./schemas"')
    validate_parser.add_argument(
        '--parallelism',
        '-p',
        type=int,
        default=from_dot_kapitan('validate', 'parallelism', 4),
        metavar='INT',
        help='Number of concurrent validate processes, default is 4')
    args = parser.parse_args()

    logger.debug('Running with args: %s', args)

    try:
        cmd = sys.argv[1]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    if hasattr(args, 'verbose') and args.verbose:
        logging.basicConfig(
            level=logging.DEBUG,
            format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
    elif hasattr(args, 'quiet') and args.quiet:
        logging.basicConfig(level=logging.CRITICAL, format="%(message)s")
    else:
        logging.basicConfig(level=logging.INFO, format="%(message)s")

    if cmd == 'eval':
        file_path = args.jsonnet_file
        search_paths = [os.path.abspath(path) for path in args.search_paths]
        ext_vars = {}
        if args.vars:
            ext_vars = dict(var.split('=') for var in args.vars)
        json_output = None

        def _search_imports(cwd, imp):
            return search_imports(cwd, imp, search_paths)

        json_output = jsonnet_file(
            file_path,
            import_callback=_search_imports,
            native_callbacks=resource_callbacks(search_paths),
            ext_vars=ext_vars)
        if args.output == 'yaml':
            json_obj = json.loads(json_output)
            yaml.safe_dump(json_obj, sys.stdout, default_flow_style=False)
        elif json_output:
            print(json_output)

    elif cmd == 'compile':
        search_paths = [os.path.abspath(path) for path in args.search_paths]

        if not args.ignore_version_check:
            check_version()

        ref_controller = RefController(args.secrets_path)

        compile_targets(args.inventory_path,
                        search_paths,
                        args.output_path,
                        args.parallelism,
                        args.targets,
                        ref_controller,
                        prune=(args.prune),
                        indent=args.indent,
                        reveal=args.reveal,
                        cache=args.cache,
                        cache_paths=args.cache_paths,
                        fetch_dependencies=args.fetch,
                        validate=args.validate,
                        schemas_path=args.schemas_path,
                        jinja2_filters=args.jinja2_filters)

    elif cmd == 'inventory':
        if args.pattern and args.target_name == '':
            parser.error("--pattern requires --target_name")
        try:
            inv = inventory_reclass(args.inventory_path)
            if args.target_name != '':
                inv = inv['nodes'][args.target_name]
                if args.pattern != '':
                    pattern = args.pattern.split(".")
                    inv = deep_get(inv, pattern)
            if args.flat:
                inv = flatten_dict(inv)
                yaml.dump(inv,
                          sys.stdout,
                          width=10000,
                          default_flow_style=False)
            else:
                yaml.dump(inv,
                          sys.stdout,
                          Dumper=PrettyDumper,
                          default_flow_style=False)
        except Exception as e:
            if not isinstance(e, KapitanError):
                logger.exception("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
            sys.exit(1)

    elif cmd == 'searchvar':
        searchvar(args.searchvar, args.inventory_path, args.pretty_print)

    elif cmd == 'lint':
        start_lint(args.fail_on_warning, args.skip_class_checks,
                   args.skip_yamllint, args.inventory_path,
                   args.search_secrets, args.secrets_path, args.compiled_path)

    elif cmd == 'init':
        initialise_skeleton(args.directory)

    elif cmd == 'secrets':
        ref_controller = RefController(args.secrets_path)

        if args.write is not None:
            secret_write(args, ref_controller)
        elif args.reveal:
            secret_reveal(args, ref_controller)
        elif args.update:
            secret_update(args, ref_controller)
        elif args.update_targets or args.validate_targets:
            secret_update_validate(args, ref_controller)

    elif cmd == 'validate':
        schema_validate_compiled(args.targets,
                                 inventory_path=args.inventory_path,
                                 compiled_path=args.compiled_path,
                                 schema_cache_path=args.schemas_path,
                                 parallel=args.parallelism)
예제 #16
0
def generate_inv_cache_hashes(inventory_path, targets, cache_paths):
    """
    generates the hashes for the inventory per target and jsonnet/jinja2 folders for caching purposes
    struct: {
        inventory:
            <target>:
                classes: <sha256>
                parameters: <sha256>
        folder:
            components: <sha256>
            docs: <sha256>
            lib: <sha256>
            scripts: <sha256>
            ...
    }
    """
    inv = inventory_reclass(inventory_path)
    cached.inv_cache = {}
    cached.inv_cache["inventory"] = {}
    cached.inv_cache["folder"] = {}

    if targets:
        for target in targets:
            try:
                cached.inv_cache["inventory"][target] = {}
                cached.inv_cache["inventory"][target][
                    "classes"] = dictionary_hash(
                        inv["nodes"][target]["classes"])
                cached.inv_cache["inventory"][target][
                    "parameters"] = dictionary_hash(
                        inv["nodes"][target]["parameters"])
            except KeyError:
                raise CompileError("target not found: {}".format(target))
    else:
        for target in inv["nodes"]:
            cached.inv_cache["inventory"][target] = {}
            cached.inv_cache["inventory"][target]["classes"] = dictionary_hash(
                inv["nodes"][target]["classes"])
            cached.inv_cache["inventory"][target][
                "parameters"] = dictionary_hash(
                    inv["nodes"][target]["parameters"])

            compile_obj = inv["nodes"][target]["parameters"]["kapitan"][
                "compile"]
            for obj in compile_obj:
                for input_path in obj["input_paths"]:
                    base_folder = os.path.dirname(input_path).split("/")[0]
                    if base_folder == "":
                        base_folder = os.path.basename(input_path).split(
                            "/")[0]

                    if base_folder not in cached.inv_cache["folder"].keys():
                        if os.path.exists(base_folder) and os.path.isdir(
                                base_folder):
                            cached.inv_cache["folder"][
                                base_folder] = directory_hash(base_folder)

                # Cache additional folders set by --cache-paths
                for path in cache_paths:
                    if path not in cached.inv_cache["folder"].keys():
                        if os.path.exists(path) and os.path.isdir(path):
                            cached.inv_cache["folder"][path] = directory_hash(
                                path)

        # Most commonly changed but not referenced in input_paths
        for common in ("lib", "vendor", "secrets"):
            if common not in cached.inv_cache["folder"].keys():
                if os.path.exists(common) and os.path.isdir(common):
                    cached.inv_cache["folder"][common] = directory_hash(common)
예제 #17
0
파일: cli.py 프로젝트: slimakcz/kapitan
def main():
    """main function for command line usage"""
    parser = argparse.ArgumentParser(prog=PROJECT_NAME,
                                     description=DESCRIPTION)
    parser.add_argument('--version', action='version', version=VERSION)
    subparser = parser.add_subparsers(help="commands")

    eval_parser = subparser.add_parser('eval', help='evaluate jsonnet file')
    eval_parser.add_argument('jsonnet_file', type=str)
    eval_parser.add_argument('--output',
                             type=str,
                             choices=('yaml', 'json', 'str'),
                             default=from_dot_kapitan('eval', 'output',
                                                      'yaml'),
                             help='set output format, default is "yaml"')
    eval_parser.add_argument('--vars',
                             type=str,
                             default=from_dot_kapitan('eval', 'vars', []),
                             nargs='*',
                             metavar='VAR',
                             help='set variables')
    eval_parser.add_argument('--search-paths',
                             '-J',
                             type=str,
                             nargs='+',
                             default=from_dot_kapitan('eval', 'search-paths',
                                                      ['.']),
                             metavar='JPATH',
                             help='set search paths, default is ["."]')

    compile_parser = subparser.add_parser('compile', help='compile targets')
    compile_parser.add_argument('--search-paths',
                                '-J',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'compile', 'search-paths', ['.']),
                                metavar='JPATH',
                                help='set search paths, default is ["."]')
    compile_parser.add_argument('--verbose',
                                '-v',
                                help='set verbose mode',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'verbose', False))
    compile_parser.add_argument('--prune',
                                help='prune jsonnet output',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'prune', False))
    compile_parser.add_argument('--quiet',
                                help='set quiet mode, only critical output',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'quiet', False))
    compile_parser.add_argument('--output-path',
                                type=str,
                                default=from_dot_kapitan(
                                    'compile', 'output-path', '.'),
                                metavar='PATH',
                                help='set output path, default is "."')
    compile_parser.add_argument('--targets',
                                '-t',
                                help='targets to compile, default is all',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'compile', 'targets', []),
                                metavar='TARGET')
    compile_parser.add_argument(
        '--parallelism',
        '-p',
        type=int,
        default=from_dot_kapitan('compile', 'parallelism', 4),
        metavar='INT',
        help='Number of concurrent compile processes, default is 4')
    compile_parser.add_argument(
        '--indent',
        '-i',
        type=int,
        default=from_dot_kapitan('compile', 'indent', 2),
        metavar='INT',
        help='Indentation spaces for YAML/JSON, default is 2')
    compile_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default=from_dot_kapitan('compile', 'secrets-path', './secrets'))
    compile_parser.add_argument(
        '--reveal',
        help='reveal secrets (warning: this will write sensitive data)',
        action='store_true',
        default=from_dot_kapitan('compile', 'reveal', False))
    compile_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('compile', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    compile_parser.add_argument(
        '--cache',
        '-c',
        help='enable compilation caching to .kapitan_cache, default is False',
        action='store_true',
        default=from_dot_kapitan('compile', 'cache', False))
    compile_parser.add_argument(
        '--cache-paths',
        type=str,
        nargs='+',
        default=from_dot_kapitan('compile', 'cache-paths', []),
        metavar='PATH',
        help='cache additional paths to .kapitan_cache, default is []')
    compile_parser.add_argument('--ignore-version-check',
                                help='ignore the version from .kapitan',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'compile', 'ignore-version-check', False))

    inventory_parser = subparser.add_parser('inventory', help='show inventory')
    inventory_parser.add_argument(
        '--target-name',
        '-t',
        default=from_dot_kapitan('inventory', 'target-name', ''),
        help='set target name, default is all targets')
    inventory_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('inventory', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    inventory_parser.add_argument('--flat',
                                  '-F',
                                  help='flatten nested inventory variables',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'inventory', 'flat', False))
    inventory_parser.add_argument(
        '--pattern',
        '-p',
        default=from_dot_kapitan('inventory', 'pattern', ''),
        help=
        'filter pattern (e.g. parameters.mysql.storage_class, or storage_class,'
        + ' or storage_*), default is ""')
    inventory_parser.add_argument('--verbose',
                                  '-v',
                                  help='set verbose mode',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'inventory', 'verbose', False))

    searchvar_parser = subparser.add_parser(
        'searchvar', help='show all inventory files where var is declared')
    searchvar_parser.add_argument(
        'searchvar',
        type=str,
        metavar='VARNAME',
        help=
        'e.g. parameters.mysql.storage_class, or storage_class, or storage_*')
    searchvar_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('searchvar', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    searchvar_parser.add_argument('--verbose',
                                  '-v',
                                  help='set verbose mode',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'searchvar', 'verbose', False))
    searchvar_parser.add_argument('--pretty-print',
                                  '-p',
                                  help='Pretty print content of var',
                                  action='store_true',
                                  default=from_dot_kapitan(
                                      'searchvar', 'pretty-print', False))

    secrets_parser = subparser.add_parser('secrets', help='manage secrets')
    secrets_parser.add_argument(
        '--write',
        '-w',
        help='write secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument(
        '--update',
        help='update recipients for secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument('--update-targets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'update-targets', False),
                                help='update target secrets')
    secrets_parser.add_argument('--validate-targets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'validate-targets', False),
                                help='validate target secrets')
    secrets_parser.add_argument('--base64',
                                '-b64',
                                help='base64 encode file content',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'base64', False))
    secrets_parser.add_argument('--reveal',
                                '-r',
                                help='reveal secrets',
                                action='store_true',
                                default=from_dot_kapitan(
                                    'secrets', 'reveal', False))
    secrets_parser.add_argument(
        '--file',
        '-f',
        help='read file or directory, set "-" for stdin',
        metavar='FILENAME')
    secrets_parser.add_argument('--target-name',
                                '-t',
                                help='grab recipients from target name')
    secrets_parser.add_argument(
        '--inventory-path',
        default=from_dot_kapitan('secrets', 'inventory-path', './inventory'),
        help='set inventory path, default is "./inventory"')
    secrets_parser.add_argument('--recipients',
                                '-R',
                                help='set recipients',
                                type=str,
                                nargs='+',
                                default=from_dot_kapitan(
                                    'secrets', 'recipients', []),
                                metavar='RECIPIENT')
    secrets_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default=from_dot_kapitan('secrets', 'secrets-path', './secrets'))
    secrets_parser.add_argument('--backend',
                                help='set secrets backend, default is "gpg"',
                                type=str,
                                choices=('gpg', ),
                                default=from_dot_kapitan(
                                    'secrets', 'backend', 'gpg'))
    secrets_parser.add_argument(
        '--verbose',
        '-v',
        help='set verbose mode (warning: this will show sensitive data)',
        action='store_true',
        default=from_dot_kapitan('secrets', 'verbose', False))

    args = parser.parse_args()

    logger.debug('Running with args: %s', args)

    try:
        cmd = sys.argv[1]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    if cmd == 'eval':
        file_path = args.jsonnet_file
        search_paths = [os.path.abspath(path) for path in args.search_paths]
        ext_vars = {}
        if args.vars:
            ext_vars = dict(var.split('=') for var in args.vars)
        json_output = None

        def _search_imports(cwd, imp):
            return search_imports(cwd, imp, search_paths)

        json_output = jsonnet_file(
            file_path,
            import_callback=_search_imports,
            native_callbacks=resource_callbacks(search_paths),
            ext_vars=ext_vars)
        if args.output == 'yaml':
            json_obj = json.loads(json_output)
            yaml.safe_dump(json_obj, sys.stdout, default_flow_style=False)
        elif json_output:
            print(json_output)

    elif cmd == 'compile':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        elif args.quiet:
            logging.basicConfig(level=logging.CRITICAL, format="%(message)s")
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")

        search_paths = [os.path.abspath(path) for path in args.search_paths]

        if not args.ignore_version_check:
            check_version()

        ref_controller = RefController(args.secrets_path)

        compile_targets(args.inventory_path,
                        search_paths,
                        args.output_path,
                        args.parallelism,
                        args.targets,
                        ref_controller,
                        prune=(args.prune),
                        indent=args.indent,
                        reveal=args.reveal,
                        cache=args.cache,
                        cache_paths=args.cache_paths)

    elif cmd == 'inventory':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")

        if args.pattern and args.target_name == '':
            parser.error("--pattern requires --target_name")
        try:
            inv = inventory_reclass(args.inventory_path)
            if args.target_name != '':
                inv = inv['nodes'][args.target_name]
                if args.pattern != '':
                    pattern = args.pattern.split(".")
                    inv = deep_get(inv, pattern)
            if args.flat:
                inv = flatten_dict(inv)
                yaml.dump(inv, sys.stdout, width=10000)
            else:
                yaml.dump(inv,
                          sys.stdout,
                          Dumper=PrettyDumper,
                          default_flow_style=False)
        except Exception as e:
            if not isinstance(e, KapitanError):
                logger.error("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
                traceback.print_exc()
            sys.exit(1)

    elif cmd == 'searchvar':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")

        searchvar(args.searchvar, args.inventory_path, args.pretty_print)

    elif cmd == 'secrets':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")

        ref_controller = RefController(args.secrets_path)

        if args.write is not None:
            if args.file is None:
                parser.error('--file is required with --write')
            data = None
            recipients = [dict((("name", name), )) for name in args.recipients]
            if args.target_name:
                inv = inventory_reclass(args.inventory_path)
                # TODO move into kapitan:secrets:gpg:recipients key
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            if args.file == '-':
                data = ''
                for line in sys.stdin:
                    data += line
            else:
                with open(args.file) as fp:
                    data = fp.read()
            # TODO deprecate backend and move to passing ref tags in command line
            if args.backend == "gpg":
                secret_obj = GPGSecret(data, recipients, args.base64)
                ref_controller.backends['gpg'][args.write] = secret_obj
        elif args.reveal:
            revealer = Revealer(ref_controller)
            if args.file is None:
                parser.error('--file is required with --reveal')
            if args.file == '-':
                # TODO deal with RefHashMismatchError or KeyError exceptions
                out = revealer.reveal_raw_file(None)
                sys.stdout.write(out)
            elif args.file:
                for rev_obj in revealer.reveal_path(args.file):
                    sys.stdout.write(rev_obj.content)
        elif args.update:
            # update recipients for secret tag
            # args.recipients is a list, convert to recipients dict
            recipients = [
                dict([
                    ("name", name),
                ]) for name in args.recipients
            ]
            if args.target_name:
                inv = inventory_reclass(args.inventory_path)
                # TODO move into kapitan:secrets:gpg:recipients key
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            if args.backend == "gpg":
                secret_obj = ref_controller.backends['gpg'][args.update]
                secret_obj.update_recipients(recipients)
                ref_controller.backends['gpg'][args.update] = secret_obj
        elif args.update_targets or args.validate_targets:
            # update recipients for all secrets in secrets_path
            # use --secrets-path to set scanning path
            inv = inventory_reclass(args.inventory_path)
            targets = set(inv['nodes'].keys())
            secrets_path = os.path.abspath(args.secrets_path)
            target_token_paths = search_target_token_paths(
                secrets_path, targets)
            ret_code = 0
            # override gpg backend for new secrets_path
            ref_controller.register_backend(GPGBackend(secrets_path))
            for target_name, token_paths in target_token_paths.items():
                try:
                    recipients = inv['nodes'][target_name]['parameters'][
                        'kapitan']['secrets']['recipients']
                    for token_path in token_paths:
                        secret_obj = ref_controller.backends['gpg'][token_path]
                        target_fingerprints = set(
                            lookup_fingerprints(recipients))
                        secret_fingerprints = set(
                            lookup_fingerprints(secret_obj.recipients))
                        if target_fingerprints != secret_fingerprints:
                            if args.validate_targets:
                                logger.info("%s recipient mismatch",
                                            token_path)
                                ret_code = 1
                            else:
                                new_recipients = [
                                    dict([
                                        ("fingerprint", f),
                                    ]) for f in target_fingerprints
                                ]
                                secret_obj.update_recipients(new_recipients)
                                ref_controller.backends['gpg'][
                                    token_path] = secret_obj
                except KeyError:
                    logger.debug(
                        "secret_gpg_update_target: target: %s has no inventory recipients, skipping",
                        target_name)
            sys.exit(ret_code)
예제 #18
0
def secret_update_validate(args, ref_controller):
    "Validate and/or update target secrets"
    # update gpg recipients/gkms/awskms key for all secrets in secrets_path
    # use --refs-path to set scanning path
    inv = inventory_reclass(args.inventory_path)
    targets = set(inv["nodes"].keys())
    secrets_path = os.path.abspath(args.refs_path)
    target_token_paths = search_target_token_paths(secrets_path, targets)
    ret_code = 0

    for target_name, token_paths in target_token_paths.items():
        kap_inv_params = inv["nodes"][target_name]["parameters"]["kapitan"]
        if "secrets" not in kap_inv_params:
            raise KapitanError(
                "parameters.kapitan.secrets not defined in {}".format(
                    target_name))

        try:
            recipients = kap_inv_params["secrets"]["gpg"]["recipients"]
        except KeyError:
            recipients = None
        try:
            gkey = kap_inv_params["secrets"]["gkms"]["key"]
        except KeyError:
            gkey = None
        try:
            awskey = kap_inv_params["secrets"]["awskms"]["key"]
        except KeyError:
            awskey = None
        try:
            vaultkv = kap_inv_params["secrets"]["vaultkv"]["auth"]
        except KeyError:
            vaultkv = None

        for token_path in token_paths:
            if token_path.startswith("?{gpg:"):
                if not recipients:
                    logger.debug(
                        "secret_update_validate: target: %s has no inventory gpg recipients, skipping %s",
                        target_name,
                        token_path,
                    )
                    continue
                secret_obj = ref_controller[token_path]
                target_fingerprints = set(lookup_fingerprints(recipients))
                secret_fingerprints = set(
                    lookup_fingerprints(secret_obj.recipients))
                if target_fingerprints != secret_fingerprints:
                    if args.validate_targets:
                        logger.info("%s recipient mismatch", token_path)
                        to_remove = secret_fingerprints.difference(
                            target_fingerprints)
                        to_add = target_fingerprints.difference(
                            secret_fingerprints)
                        if to_remove:
                            logger.info("%s needs removal", to_remove)
                        if to_add:
                            logger.info("%s needs addition", to_add)
                        ret_code = 1
                    else:
                        new_recipients = [
                            dict([
                                ("fingerprint", f),
                            ]) for f in target_fingerprints
                        ]
                        secret_obj.update_recipients(new_recipients)
                        ref_controller[token_path] = secret_obj

            elif token_path.startswith("?{gkms:"):
                if not gkey:
                    logger.debug(
                        "secret_update_validate: target: %s has no inventory gkms key, skipping %s",
                        target_name,
                        token_path,
                    )
                    continue
                secret_obj = ref_controller[token_path]
                if gkey != secret_obj.key:
                    if args.validate_targets:
                        logger.info("%s key mismatch", token_path)
                        ret_code = 1
                    else:
                        secret_obj.update_key(gkey)
                        ref_controller[token_path] = secret_obj

            elif token_path.startswith("?{awskms:"):
                if not awskey:
                    logger.debug(
                        "secret_update_validate: target: %s has no inventory awskms key, skipping %s",
                        target_name,
                        token_path,
                    )
                    continue
                secret_obj = ref_controller[token_path]
                if awskey != secret_obj.key:
                    if args.validate_targets:
                        logger.info("%s key mismatch", token_path)
                        ret_code = 1
                    else:
                        secret_obj.update_key(awskey)
                        ref_controller[token_path] = secret_obj

            else:
                logger.info("Invalid secret %s, could not get type, skipping",
                            token_path)

    sys.exit(ret_code)
예제 #19
0
def ref_write(args, ref_controller):
    "Write ref to ref_controller based on cli args"
    token_name = args.write
    file_name = args.file
    data = None

    if file_name is None:
        fatal_error("--file is required with --write")
    if file_name == "-":
        data = ""
        for line in sys.stdin:
            data += line
    else:
        with open(file_name) as fp:
            data = fp.read()

    if token_name.startswith("gpg:"):
        type_name, token_path = token_name.split(":")
        recipients = [dict((("name", name), )) for name in args.recipients]
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv["nodes"][
                args.target_name]["parameters"]["kapitan"]
            if "secrets" not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in inventory of target {}"
                    .format(args.target_name))

            recipients = kap_inv_params["secrets"]["gpg"]["recipients"]
        if not recipients:
            raise KapitanError(
                "No GPG recipients specified. Use --recipients or specify them in "
                +
                "parameters.kapitan.secrets.gpg.recipients and use --target-name"
            )
        secret_obj = GPGSecret(data, recipients, encode_base64=args.base64)
        tag = "?{{gpg:{}}}".format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("gkms:"):
        type_name, token_path = token_name.split(":")
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv["nodes"][
                args.target_name]["parameters"]["kapitan"]
            if "secrets" not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in inventory of target {}"
                    .format(args.target_name))

            key = kap_inv_params["secrets"]["gkms"]["key"]
        if not key:
            raise KapitanError(
                "No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.gkms.key and use --target-name"
            )
        secret_obj = GoogleKMSSecret(data, key, encode_base64=args.base64)
        tag = "?{{gkms:{}}}".format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("awskms:"):
        type_name, token_path = token_name.split(":")
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv["nodes"][
                args.target_name]["parameters"]["kapitan"]
            if "secrets" not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in inventory of target {}"
                    .format(args.target_name))

            key = kap_inv_params["secrets"]["awskms"]["key"]
        if not key:
            raise KapitanError(
                "No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.awskms.key and use --target-name"
            )
        secret_obj = AWSKMSSecret(data, key, encode_base64=args.base64)
        tag = "?{{awskms:{}}}".format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("base64:"):
        type_name, token_path = token_name.split(":")
        _data = data.encode()
        encoding = "original"
        if args.base64:
            _data = base64.b64encode(_data).decode()
            _data = _data.encode()
            encoding = "base64"
        ref_obj = Base64Ref(_data, encoding=encoding)
        tag = "?{{base64:{}}}".format(token_path)
        ref_controller[tag] = ref_obj

    elif token_name.startswith("vaultkv:"):
        type_name, token_path = token_name.split(":")
        _data = data.encode()
        vault_params = {}
        encoding = "original"
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv["nodes"][
                args.target_name]["parameters"]["kapitan"]
            if "secrets" not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in inventory of target {}"
                    .format(args.target_name))

            vault_params = kap_inv_params["secrets"]["vaultkv"]
        if args.vault_auth:
            vault_params["auth"] = args.vault_auth
        if vault_params.get("auth") is None:
            raise KapitanError(
                "No Authentication type parameter specified. Specify it"
                " in parameters.kapitan.secrets.vaultkv.auth and use --target-name or use --vault-auth"
            )

        secret_obj = VaultSecret(_data, vault_params)
        tag = "?{{vaultkv:{}}}".format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("plain:"):
        type_name, token_path = token_name.split(":")
        _data = data.encode()
        encoding = "original"
        if args.base64:
            _data = base64.b64encode(_data).decode()
            _data = _data.encode()
            encoding = "base64"
        ref_obj = PlainRef(_data, encoding=encoding)
        tag = "?{{plain:{}}}".format(token_path)
        ref_controller[tag] = ref_obj

    else:
        fatal_error(
            "Invalid token: {name}. Try using gpg/gkms/awskms/vaultkv/base64/plain:{name}"
            .format(name=token_name))
예제 #20
0
def main():
    """main function for command line usage"""
    parser = argparse.ArgumentParser(prog=PROJECT_NAME,
                                     description=DESCRIPTION)
    parser.add_argument("--version", action="version", version=VERSION)
    subparser = parser.add_subparsers(help="commands")

    eval_parser = subparser.add_parser("eval", help="evaluate jsonnet file")
    eval_parser.add_argument("jsonnet_file", type=str)
    eval_parser.add_argument(
        "--output",
        type=str,
        choices=("yaml", "json"),
        default=from_dot_kapitan("eval", "output", "yaml"),
        help='set output format, default is "yaml"',
    )
    eval_parser.add_argument(
        "--vars",
        type=str,
        default=from_dot_kapitan("eval", "vars", []),
        nargs="*",
        metavar="VAR",
        help="set variables",
    )
    eval_parser.add_argument(
        "--search-paths",
        "-J",
        type=str,
        nargs="+",
        default=from_dot_kapitan("eval", "search-paths", ["."]),
        metavar="JPATH",
        help='set search paths, default is ["."]',
    )

    compile_parser = subparser.add_parser("compile", help="compile targets")
    compile_parser.add_argument(
        "--search-paths",
        "-J",
        type=str,
        nargs="+",
        default=from_dot_kapitan("compile", "search-paths", [".", "lib"]),
        metavar="JPATH",
        help='set search paths, default is ["."]',
    )
    compile_parser.add_argument(
        "--jinja2-filters",
        "-J2F",
        type=str,
        default=from_dot_kapitan("compile", "jinja2-filters",
                                 defaults.DEFAULT_JINJA2_FILTERS_PATH),
        metavar="FPATH",
        help="load custom jinja2 filters from any file, default is to put\
                                them inside lib/jinja2_filters.py",
    )
    compile_parser.add_argument(
        "--verbose",
        "-v",
        help="set verbose mode",
        action="store_true",
        default=from_dot_kapitan("compile", "verbose", False),
    )
    compile_parser.add_argument(
        "--prune",
        help="prune jsonnet output",
        action="store_true",
        default=from_dot_kapitan("compile", "prune", False),
    )
    compile_parser.add_argument(
        "--quiet",
        help="set quiet mode, only critical output",
        action="store_true",
        default=from_dot_kapitan("compile", "quiet", False),
    )
    compile_parser.add_argument(
        "--output-path",
        type=str,
        default=from_dot_kapitan("compile", "output-path", "."),
        metavar="PATH",
        help='set output path, default is "."',
    )
    compile_parser.add_argument(
        "--fetch",
        help="fetches external dependencies",
        action="store_true",
        default=from_dot_kapitan("compile", "fetch", False),
    )
    compile_parser.add_argument(
        "--validate",
        help=
        "validate compile output against schemas as specified in inventory",
        action="store_true",
        default=from_dot_kapitan("compile", "validate", False),
    )
    compile_parser.add_argument(
        "--parallelism",
        "-p",
        type=int,
        default=from_dot_kapitan("compile", "parallelism", 4),
        metavar="INT",
        help="Number of concurrent compile processes, default is 4",
    )
    compile_parser.add_argument(
        "--indent",
        "-i",
        type=int,
        default=from_dot_kapitan("compile", "indent", 2),
        metavar="INT",
        help="Indentation spaces for YAML/JSON, default is 2",
    )
    compile_parser.add_argument(
        "--refs-path",
        help='set refs path, default is "./refs"',
        default=from_dot_kapitan("compile", "refs-path", "./refs"),
    )
    compile_parser.add_argument(
        "--reveal",
        help=
        "reveal refs (warning: this will potentially write sensitive data)",
        action="store_true",
        default=from_dot_kapitan("compile", "reveal", False),
    )
    compile_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("compile", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )
    compile_parser.add_argument(
        "--cache",
        "-c",
        help="enable compilation caching to .kapitan_cache, default is False",
        action="store_true",
        default=from_dot_kapitan("compile", "cache", False),
    )
    compile_parser.add_argument(
        "--cache-paths",
        type=str,
        nargs="+",
        default=from_dot_kapitan("compile", "cache-paths", []),
        metavar="PATH",
        help="cache additional paths to .kapitan_cache, default is []",
    )
    compile_parser.add_argument(
        "--ignore-version-check",
        help="ignore the version from .kapitan",
        action="store_true",
        default=from_dot_kapitan("compile", "ignore-version-check", False),
    )
    compile_parser.add_argument(
        "--schemas-path",
        default=from_dot_kapitan("validate", "schemas-path", "./schemas"),
        help='set schema cache path, default is "./schemas"',
    )

    compile_selector_parser = compile_parser.add_mutually_exclusive_group()
    compile_selector_parser.add_argument(
        "--targets",
        "-t",
        help="targets to compile, default is all",
        type=str,
        nargs="+",
        default=from_dot_kapitan("compile", "targets", []),
        metavar="TARGET",
    )
    compile_selector_parser.add_argument(
        "--labels",
        "-l",
        help="compile targets matching the labels, default is all",
        type=str,
        nargs="*",
        default=from_dot_kapitan("compile", "labels", []),
        metavar="key=value",
    )

    inventory_parser = subparser.add_parser("inventory", help="show inventory")
    inventory_parser.add_argument(
        "--target-name",
        "-t",
        default=from_dot_kapitan("inventory", "target-name", ""),
        help="set target name, default is all targets",
    )
    inventory_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("inventory", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )
    inventory_parser.add_argument(
        "--flat",
        "-F",
        help="flatten nested inventory variables",
        action="store_true",
        default=from_dot_kapitan("inventory", "flat", False),
    )
    inventory_parser.add_argument(
        "--pattern",
        "-p",
        default=from_dot_kapitan("inventory", "pattern", ""),
        help=
        "filter pattern (e.g. parameters.mysql.storage_class, or storage_class,"
        + ' or storage_*), default is ""',
    )
    inventory_parser.add_argument(
        "--verbose",
        "-v",
        help="set verbose mode",
        action="store_true",
        default=from_dot_kapitan("inventory", "verbose", False),
    )

    searchvar_parser = subparser.add_parser(
        "searchvar", help="show all inventory files where var is declared")
    searchvar_parser.add_argument(
        "searchvar",
        type=str,
        metavar="VARNAME",
        help=
        "e.g. parameters.mysql.storage_class, or storage_class, or storage_*",
    )
    searchvar_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("searchvar", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )
    searchvar_parser.add_argument(
        "--verbose",
        "-v",
        help="set verbose mode",
        action="store_true",
        default=from_dot_kapitan("searchvar", "verbose", False),
    )
    searchvar_parser.add_argument(
        "--pretty-print",
        "-p",
        help="Pretty print content of var",
        action="store_true",
        default=from_dot_kapitan("searchvar", "pretty-print", False),
    )

    subparser.add_parser("secrets", help="(DEPRECATED) please use refs")

    refs_parser = subparser.add_parser("refs", help="manage refs")
    refs_parser.add_argument(
        "--write",
        "-w",
        help="write ref token",
        metavar="TOKENNAME",
    )
    refs_parser.add_argument(
        "--update",
        help="update GPG recipients for ref token",
        metavar="TOKENNAME",
    )
    refs_parser.add_argument(
        "--update-targets",
        action="store_true",
        default=from_dot_kapitan("refs", "update-targets", False),
        help="update target secret refs",
    )
    refs_parser.add_argument(
        "--validate-targets",
        action="store_true",
        default=from_dot_kapitan("refs", "validate-targets", False),
        help="validate target secret refs",
    )
    refs_parser.add_argument(
        "--base64",
        "-b64",
        help="base64 encode file content",
        action="store_true",
        default=from_dot_kapitan("refs", "base64", False),
    )
    refs_parser.add_argument(
        "--reveal",
        "-r",
        help="reveal refs",
        action="store_true",
        default=from_dot_kapitan("refs", "reveal", False),
    )
    refs_parser.add_argument("--file",
                             "-f",
                             help='read file or directory, set "-" for stdin',
                             metavar="FILENAME")
    refs_parser.add_argument("--target-name",
                             "-t",
                             help="grab recipients from target name")
    refs_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("refs", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )
    refs_parser.add_argument(
        "--recipients",
        "-R",
        help="set GPG recipients",
        type=str,
        nargs="+",
        default=from_dot_kapitan("refs", "recipients", []),
        metavar="RECIPIENT",
    )
    refs_parser.add_argument("--key",
                             "-K",
                             help="set KMS key",
                             default=from_dot_kapitan("refs", "key", ""),
                             metavar="KEY")
    refs_parser.add_argument(
        "--vault-auth",
        help="set authentication type for vaultkv secrets",
        default=from_dot_kapitan("refs", "vault-auth", ""),
        metavar="AUTH",
    )
    refs_parser.add_argument(
        "--refs-path",
        help='set refs path, default is "./refs"',
        default=from_dot_kapitan("refs", "refs-path", "./refs"),
    )
    refs_parser.add_argument(
        "--verbose",
        "-v",
        help=
        "set verbose mode (warning: this will potentially show sensitive data)",
        action="store_true",
        default=from_dot_kapitan("refs", "verbose", False),
    )

    lint_parser = subparser.add_parser("lint",
                                       help="linter for inventory and refs")
    lint_parser.add_argument(
        "--fail-on-warning",
        default=from_dot_kapitan("lint", "fail-on-warning", False),
        action="store_true",
        help="exit with failure code if warnings exist, default is False",
    )
    lint_parser.add_argument(
        "--skip-class-checks",
        action="store_true",
        help="skip checking for unused classes, default is False",
        default=from_dot_kapitan("lint", "skip-class-checks", False),
    )
    lint_parser.add_argument(
        "--skip-yamllint",
        action="store_true",
        help="skip running yamllint on inventory, default is False",
        default=from_dot_kapitan("lint", "skip-yamllint", False),
    )
    lint_parser.add_argument(
        "--search-secrets",
        default=from_dot_kapitan("lint", "search-secrets", False),
        action="store_true",
        help="searches for plaintext secrets in inventory, default is False",
    )
    lint_parser.add_argument(
        "--refs-path",
        help='set refs path, default is "./refs"',
        default=from_dot_kapitan("lint", "refs-path", "./refs"),
    )
    lint_parser.add_argument(
        "--compiled-path",
        default=from_dot_kapitan("lint", "compiled-path", "./compiled"),
        help='set compiled path, default is "./compiled"',
    )
    lint_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("lint", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )

    init_parser = subparser.add_parser(
        "init",
        help=
        "initialize a directory with the recommended kapitan project skeleton."
    )
    init_parser.add_argument(
        "--directory",
        default=from_dot_kapitan("init", "directory", "."),
        help="set path, in which to generate the project skeleton,"
        'assumes directory already exists. default is "./"',
    )

    validate_parser = subparser.add_parser(
        "validate",
        help=
        "validates the compile output against schemas as specified in inventory"
    )
    validate_parser.add_argument(
        "--compiled-path",
        default=from_dot_kapitan("compile", "compiled-path", "./compiled"),
        help='set compiled path, default is "./compiled',
    )
    validate_parser.add_argument(
        "--inventory-path",
        default=from_dot_kapitan("compile", "inventory-path", "./inventory"),
        help='set inventory path, default is "./inventory"',
    )
    validate_parser.add_argument(
        "--targets",
        "-t",
        help="targets to validate, default is all",
        type=str,
        nargs="+",
        default=from_dot_kapitan("compile", "targets", []),
        metavar="TARGET",
    ),
    validate_parser.add_argument(
        "--schemas-path",
        default=from_dot_kapitan("validate", "schemas-path", "./schemas"),
        help='set schema cache path, default is "./schemas"',
    )
    validate_parser.add_argument(
        "--parallelism",
        "-p",
        type=int,
        default=from_dot_kapitan("validate", "parallelism", 4),
        metavar="INT",
        help="Number of concurrent validate processes, default is 4",
    )
    args = parser.parse_args()

    logger.debug("Running with args: %s", args)

    try:
        cmd = sys.argv[1]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    # cache args where key is subcommand
    cached.args[sys.argv[1]] = args

    if hasattr(args, "verbose") and args.verbose:
        logging.basicConfig(
            level=logging.DEBUG,
            format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s")
    elif hasattr(args, "quiet") and args.quiet:
        logging.basicConfig(level=logging.CRITICAL, format="%(message)s")
    else:
        logging.basicConfig(level=logging.INFO, format="%(message)s")

    if cmd == "eval":
        file_path = args.jsonnet_file
        search_paths = [os.path.abspath(path) for path in args.search_paths]
        ext_vars = {}
        if args.vars:
            ext_vars = dict(var.split("=") for var in args.vars)
        json_output = None

        def _search_imports(cwd, imp):
            return search_imports(cwd, imp, search_paths)

        json_output = jsonnet_file(
            file_path,
            import_callback=_search_imports,
            native_callbacks=resource_callbacks(search_paths),
            ext_vars=ext_vars,
        )
        if args.output == "yaml":
            json_obj = json.loads(json_output)
            yaml.safe_dump(json_obj, sys.stdout, default_flow_style=False)
        elif json_output:
            print(json_output)

    elif cmd == "compile":
        search_paths = [os.path.abspath(path) for path in args.search_paths]

        if not args.ignore_version_check:
            check_version()

        ref_controller = RefController(args.refs_path)
        # cache controller for use in reveal_maybe jinja2 filter
        cached.ref_controller_obj = ref_controller
        cached.revealer_obj = Revealer(ref_controller)

        compile_targets(
            args.inventory_path,
            search_paths,
            args.output_path,
            args.parallelism,
            args.targets,
            args.labels,
            ref_controller,
            prune=(args.prune),
            indent=args.indent,
            reveal=args.reveal,
            cache=args.cache,
            cache_paths=args.cache_paths,
            fetch_dependencies=args.fetch,
            validate=args.validate,
            schemas_path=args.schemas_path,
            jinja2_filters=args.jinja2_filters,
        )

    elif cmd == "inventory":
        if args.pattern and args.target_name == "":
            parser.error("--pattern requires --target_name")
        try:
            inv = inventory_reclass(args.inventory_path)
            if args.target_name != "":
                inv = inv["nodes"][args.target_name]
                if args.pattern != "":
                    pattern = args.pattern.split(".")
                    inv = deep_get(inv, pattern)
            if args.flat:
                inv = flatten_dict(inv)
                yaml.dump(inv,
                          sys.stdout,
                          width=10000,
                          default_flow_style=False)
            else:
                yaml.dump(inv,
                          sys.stdout,
                          Dumper=PrettyDumper,
                          default_flow_style=False)
        except Exception as e:
            if not isinstance(e, KapitanError):
                logger.exception("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
            sys.exit(1)

    elif cmd == "searchvar":
        searchvar(args.searchvar, args.inventory_path, args.pretty_print)

    elif cmd == "lint":
        start_lint(
            args.fail_on_warning,
            args.skip_class_checks,
            args.skip_yamllint,
            args.inventory_path,
            args.search_secrets,
            args.refs_path,
            args.compiled_path,
        )

    elif cmd == "init":
        initialise_skeleton(args.directory)

    elif cmd == "secrets":
        logger.error(
            "Secrets have been renamed to refs, please refer to: '$ kapitan refs --help'"
        )
        sys.exit(1)

    elif cmd == "refs":
        ref_controller = RefController(args.refs_path)

        if args.write is not None:
            ref_write(args, ref_controller)
        elif args.reveal:
            ref_reveal(args, ref_controller)
        elif args.update:
            secret_update(args, ref_controller)
        elif args.update_targets or args.validate_targets:
            secret_update_validate(args, ref_controller)

    elif cmd == "validate":
        schema_validate_compiled(
            args.targets,
            inventory_path=args.inventory_path,
            compiled_path=args.compiled_path,
            schema_cache_path=args.schemas_path,
            parallel=args.parallelism,
        )
예제 #21
0
def compile_component(config: Config, component_path, value_files,
                      search_paths, output_path):
    # Resolve all input to absolute paths to fix symlinks
    component_path = P(component_path).resolve()
    value_files = [P(f).resolve() for f in value_files]
    search_paths = [P(d).resolve() for d in search_paths]
    output_path = P(output_path).resolve()
    # Ignore 'component-' prefix in dir name
    component_name = component_path.stem.replace('component-', '')

    click.secho(f"Compile component {component_name}...", bold=True)

    temp_dir = P(tempfile.mkdtemp(prefix='component-')).resolve()
    original_working_dir = os.getcwd()
    os.chdir(temp_dir)
    try:
        if config.debug:
            click.echo(f"   > Created temp workspace: {temp_dir}")

        _prepare_fake_inventory(temp_dir, component_name, component_path,
                                value_files)

        # Create class for fake parameters
        with open(temp_dir / 'inventory/classes/fake.yml', 'w') as file:
            file.write("""
parameters:
  cloud:
    provider: cloudscale
    region: rma1
  cluster:
    catalog_url: ssh://[email protected]/org/repo.git
    dist: test-distribution
    name: c-green-test-1234
  customer:
    name: t-silent-test-1234
  argocd:
    namespace: test

  kapitan:
    vars:
      target: test
      namespace: test
""")

        # Create test target
        with open(temp_dir / 'inventory/targets/test.yml', 'w') as file:
            value_classes = "\n".join([f"- {c.stem}" for c in value_files])
            file.write(f"""
classes:
- fake
- defaults.{component_name}
- components.{component_name}
{value_classes}
""")

        # Fake Argo CD lib
        (temp_dir / 'dependencies/lib').mkdir(exist_ok=True)
        with open(temp_dir / 'dependencies/lib/argocd.libjsonnet',
                  'w') as file:
            file.write("""
local ArgoApp(component, namespace, project='', secrets=true) = {};
local ArgoProject(name) = {};

{
  App: ArgoApp,
  Project: ArgoProject,
}
""")

        # Fetch Jsonnet libs
        fetch_jsonnet_libs(config, libs)

        # Compile component
        kapitan_compile(config,
                        target='test',
                        output_dir=output_path,
                        search_paths=search_paths,
                        fake_refs=True,
                        reveal=True)
        click.echo(f" > Component compiled to {output_path / 'compiled/test'}")

        # prepare inventory and fake component object for postprocess
        inventory = inventory_reclass(temp_dir / 'inventory')['nodes']['test']
        component = Component(component_name, Repo(component_path),
                              'https://fake.repo.url/', 'master')
        # We change the working directory to the output_path directory here,
        # as postprocess expects to find `compiled/<target>` in the working
        # directory.
        os.chdir(output_path)
        postprocess_components(config, inventory, 'test',
                               {component_name: component})
    finally:
        os.chdir(original_working_dir)
        if config.trace:
            click.echo(f" > Temp dir left in place {temp_dir}")
        else:
            if config.debug:
                click.echo(f" > Remove temp dir {temp_dir}")
            shutil.rmtree(temp_dir)
예제 #22
0
파일: cli.py 프로젝트: marcelomata/kapitan
def main():
    "main function for command line usage"
    parser = argparse.ArgumentParser(prog=PROJECT_NAME,
                                     description=DESCRIPTION)
    parser.add_argument('--version', action='version', version=VERSION)
    subparser = parser.add_subparsers(help="commands")

    eval_parser = subparser.add_parser('eval', help='evaluate jsonnet file')
    eval_parser.add_argument('jsonnet_file', type=str)
    eval_parser.add_argument('--output',
                             type=str,
                             choices=('yaml', 'json'),
                             default='yaml',
                             help='set output format, default is "yaml"')
    eval_parser.add_argument('--vars',
                             type=str,
                             default=[],
                             nargs='*',
                             metavar='VAR',
                             help='set variables')
    eval_parser.add_argument('--search-path',
                             '-J',
                             type=str,
                             default='.',
                             metavar='JPATH',
                             help='set search path, default is "."')

    compile_parser = subparser.add_parser('compile',
                                          help='compile target files')
    compile_parser.add_argument('--target-file',
                                '-f',
                                type=str,
                                nargs='+',
                                default=[],
                                metavar='TARGET',
                                help='target files')
    compile_parser.add_argument('--search-path',
                                '-J',
                                type=str,
                                default='.',
                                metavar='JPATH',
                                help='set search path, default is "."')
    compile_parser.add_argument('--verbose',
                                '-v',
                                help='set verbose mode',
                                action='store_true',
                                default=False)
    compile_parser.add_argument('--no-prune',
                                help='do not prune jsonnet output',
                                action='store_true',
                                default=False)
    compile_parser.add_argument('--quiet',
                                help='set quiet mode, only critical output',
                                action='store_true',
                                default=False)
    compile_parser.add_argument(
        '--output-path',
        type=str,
        default='compiled',
        metavar='PATH',
        help='set output path, default is "./compiled"')
    compile_parser.add_argument(
        '--parallelism',
        '-p',
        type=int,
        default=4,
        metavar='INT',
        help='Number of concurrent compile processes, default is 4')
    compile_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default='./secrets',
    )
    compile_parser.add_argument(
        '--reveal',
        help='reveal secrets (warning: this will write sensitive data)',
        action='store_true',
        default=False)

    inventory_parser = subparser.add_parser('inventory', help='show inventory')
    inventory_parser.add_argument(
        '--target-name',
        '-t',
        default='',
        help='set target name, default is all targets')
    inventory_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')
    inventory_parser.add_argument('--flat',
                                  '-F',
                                  help='flatten nested inventory variables',
                                  action='store_true',
                                  default=False)

    searchvar_parser = subparser.add_parser(
        'searchvar', help='show all inventory files where var is declared')
    searchvar_parser.add_argument(
        'searchvar',
        type=str,
        metavar='VARNAME',
        help='flattened full variable name. Example: ' +
        'parameters.cluster.type')
    searchvar_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')

    secrets_parser = subparser.add_parser('secrets', help='manage secrets')
    secrets_parser.add_argument(
        '--write',
        '-w',
        help='write secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument('--reveal',
                                '-r',
                                help='reveal secrets',
                                action='store_true',
                                default=False)
    secrets_parser.add_argument('--file',
                                '-f',
                                help='read file, set "-" for stdin',
                                required=True,
                                metavar='FILENAME')
    secrets_parser.add_argument('--target-name',
                                '-t',
                                help='grab recipients from target name')
    secrets_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')
    secrets_parser.add_argument('--recipients',
                                '-R',
                                help='set recipients',
                                type=str,
                                nargs='+',
                                default=[],
                                metavar='RECIPIENT')
    secrets_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default='./secrets',
    )
    secrets_parser.add_argument('--backend',
                                help='set secrets backend, default is "gpg"',
                                type=str,
                                choices=('gpg', ),
                                default='gpg')
    secrets_parser.add_argument(
        '--verbose',
        '-v',
        help='set verbose mode (warning: this will show sensitive data)',
        action='store_true',
        default=False)
    secrets_parser.add_argument('--no-verify',
                                help='do not verify secret hashes on reveal',
                                action='store_true',
                                default=False)

    args = parser.parse_args()

    logger.debug('Running with args: %s', args)

    cmd = sys.argv[1]
    if cmd == 'eval':
        file_path = args.jsonnet_file
        search_path = os.path.abspath(args.search_path)
        ext_vars = {}
        if args.vars:
            ext_vars = dict(var.split('=') for var in args.vars)
        json_output = None
        _search_imports = lambda cwd, imp: search_imports(
            cwd, imp, search_path)
        json_output = jsonnet_file(
            file_path,
            import_callback=_search_imports,
            native_callbacks=resource_callbacks(search_path),
            ext_vars=ext_vars)
        if args.output == 'yaml':
            json_obj = json.loads(json_output)
            yaml_output = yaml.safe_dump(json_obj, default_flow_style=False)
            print yaml_output
        elif json_output:
            print json_output
    elif cmd == 'compile':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        elif args.quiet:
            logging.basicConfig(level=logging.CRITICAL, format="%(message)s")
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")
        search_path = os.path.abspath(args.search_path)
        gpg_obj = secret_gpg_backend()
        if args.target_file:
            pool = multiprocessing.Pool(args.parallelism)
            worker = partial(compile_target_file,
                             search_path=search_path,
                             output_path=args.output_path,
                             prune=(not args.no_prune),
                             secrets_path=args.secrets_path,
                             secrets_reveal=args.reveal,
                             gpg_obj=gpg_obj)
            try:
                pool.map(worker, args.target_file)
            except RuntimeError:
                # if compile worker fails, terminate immediately
                pool.terminate()
                raise
        else:
            logger.error("Nothing to compile")
    elif cmd == 'inventory':
        inv = inventory_reclass(args.inventory_path)
        if args.target_name != '':
            inv = inv['nodes'][args.target_name]
        if args.flat:
            inv = flatten_dict(inv)
            print yaml.dump(inv, width=10000)
        else:
            print yaml.dump(inv, Dumper=PrettyDumper, default_flow_style=False)
    elif cmd == 'searchvar':
        searchvar(args.searchvar, args.inventory_path)
    elif cmd == 'secrets':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")
        gpg_obj = secret_gpg_backend()
        if args.write is not None:
            data = None
            recipients = args.recipients
            if args.target_name:
                inv = inventory_reclass(args.inventory_path)
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            if args.file == '-':
                data = ''
                for line in sys.stdin:
                    data += line
            else:
                with open(args.file) as fp:
                    data = fp.read()
            secret_gpg_write(gpg_obj, args.secrets_path, args.write, data,
                             recipients)
        elif args.reveal:
            if args.file == '-':
                secret_gpg_reveal(gpg_obj,
                                  args.secrets_path,
                                  None,
                                  verify=(not args.no_verify))
            elif args.file:
                # TODO if it is a directory, reveal every file there
                with open(args.file) as fp:
                    secret_gpg_reveal(gpg_obj,
                                      args.secrets_path,
                                      args.file,
                                      verify=(not args.no_verify))
예제 #23
0
def secret_update_validate(args, ref_controller):
    "Validate and/or update target secrets"
    # update gpg recipients/gkms key for all secrets in secrets_path
    # use --secrets-path to set scanning path
    inv = inventory_reclass(args.inventory_path)
    targets = set(inv['nodes'].keys())
    secrets_path = os.path.abspath(args.secrets_path)
    target_token_paths = search_target_token_paths(secrets_path, targets)
    ret_code = 0

    for target_name, token_paths in target_token_paths.items():
        kap_inv_params = inv['nodes'][target_name]['parameters']['kapitan']
        if 'secrets' not in kap_inv_params:
            raise KapitanError(
                "parameters.kapitan.secrets not defined in {}".format(
                    target_name))

        try:
            try:
                recipients = kap_inv_params['secrets']['gpg']['recipients']
            except KeyError:
                # TODO: Keeping gpg recipients backwards-compatible until we make a breaking release
                logger.warning(
                    "WARNING: parameters.kapitan.secrets.recipients is deprecated, "
                    + "please use parameters.kapitan.secrets.gpg.recipients")
                recipients = kap_inv_params['secrets']['recipients']
        except KeyError:
            recipients = None

        try:
            key = kap_inv_params['secrets']['gkms']['key']
        except KeyError:
            key = None

        for token_path in token_paths:
            if token_path.startswith("?{gpg:"):
                if not recipients:
                    logger.debug(
                        "secret_update_validate: target: %s has no inventory gpg recipients, skipping %s",
                        target_name, token_path)
                    continue
                secret_obj = ref_controller[token_path]
                target_fingerprints = set(lookup_fingerprints(recipients))
                secret_fingerprints = set(
                    lookup_fingerprints(secret_obj.recipients))
                if target_fingerprints != secret_fingerprints:
                    if args.validate_targets:
                        logger.info("%s recipient mismatch", token_path)
                        to_remove = secret_fingerprints.difference(
                            target_fingerprints)
                        to_add = target_fingerprints.difference(
                            secret_fingerprints)
                        if to_remove:
                            logger.info("%s needs removal", to_remove)
                        if to_add:
                            logger.info("%s needs addition", to_add)
                        ret_code = 1
                    else:
                        new_recipients = [
                            dict([
                                ("fingerprint", f),
                            ]) for f in target_fingerprints
                        ]
                        secret_obj.update_recipients(new_recipients)
                        ref_controller[token_path] = secret_obj

            elif token_path.startswith("?{gkms:"):
                if not key:
                    logger.debug(
                        "secret_update_validate: target: %s has no inventory gkms key, skipping %s",
                        target_name, token_path)
                    continue
                secret_obj = ref_controller[token_path]
                if key != secret_obj.key:
                    if args.validate_targets:
                        logger.info("%s key mismatch", token_path)
                        ret_code = 1
                    else:
                        secret_obj.update_key(key)
                        ref_controller[token_path] = secret_obj

            else:
                logger.info("Invalid secret %s, could not get type, skipping",
                            token_path)
                ret_code = 1

    sys.exit(ret_code)
예제 #24
0
def main():
    "main function for command line usage"
    parser = argparse.ArgumentParser(prog=PROJECT_NAME,
                                     description=DESCRIPTION)
    parser.add_argument('--version', action='version', version=VERSION)
    subparser = parser.add_subparsers(help="commands")

    eval_parser = subparser.add_parser('eval', help='evaluate jsonnet file')
    eval_parser.add_argument('jsonnet_file', type=str)
    eval_parser.add_argument('--output',
                             type=str,
                             choices=('yaml', 'json'),
                             default='yaml',
                             help='set output format, default is "yaml"')
    eval_parser.add_argument('--vars',
                             type=str,
                             default=[],
                             nargs='*',
                             metavar='VAR',
                             help='set variables')
    eval_parser.add_argument('--search-path',
                             '-J',
                             type=str,
                             default='.',
                             metavar='JPATH',
                             help='set search path, default is "."')

    compile_parser = subparser.add_parser('compile', help='compile targets')
    compile_parser.add_argument('--search-path',
                                '-J',
                                type=str,
                                default='.',
                                metavar='JPATH',
                                help='set search path, default is "."')
    compile_parser.add_argument('--verbose',
                                '-v',
                                help='set verbose mode',
                                action='store_true',
                                default=False)
    compile_parser.add_argument('--no-prune',
                                help='do not prune jsonnet output',
                                action='store_true',
                                default=False)
    compile_parser.add_argument('--quiet',
                                help='set quiet mode, only critical output',
                                action='store_true',
                                default=False)
    compile_parser.add_argument('--output-path',
                                type=str,
                                default='.',
                                metavar='PATH',
                                help='set output path, default is "."')
    compile_parser.add_argument('--targets',
                                '-t',
                                help='targets to compile, default is all',
                                type=str,
                                nargs='+',
                                default=[],
                                metavar='TARGET')
    compile_parser.add_argument(
        '--parallelism',
        '-p',
        type=int,
        default=4,
        metavar='INT',
        help='Number of concurrent compile processes, default is 4')
    compile_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default='./secrets',
    )
    compile_parser.add_argument(
        '--reveal',
        help='reveal secrets (warning: this will write sensitive data)',
        action='store_true',
        default=False)
    compile_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')

    inventory_parser = subparser.add_parser('inventory', help='show inventory')
    inventory_parser.add_argument(
        '--target-name',
        '-t',
        default='',
        help='set target name, default is all targets')
    inventory_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')
    inventory_parser.add_argument('--flat',
                                  '-F',
                                  help='flatten nested inventory variables',
                                  action='store_true',
                                  default=False)
    inventory_parser.add_argument(
        '--pattern',
        '-p',
        default='',
        help='filter pattern (e.g. parameters.mysql), default is ""')

    searchvar_parser = subparser.add_parser(
        'searchvar', help='show all inventory files where var is declared')
    searchvar_parser.add_argument(
        'searchvar',
        type=str,
        metavar='VARNAME',
        help='flattened full variable name. Example: ' +
        'parameters.cluster.type')
    searchvar_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')

    secrets_parser = subparser.add_parser('secrets', help='manage secrets')
    secrets_parser.add_argument(
        '--write',
        '-w',
        help='write secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument(
        '--update',
        help='update recipients for secret token',
        metavar='TOKENNAME',
    )
    secrets_parser.add_argument('--update-targets',
                                action='store_true',
                                default=False,
                                help='update target secrets')
    secrets_parser.add_argument('--validate-targets',
                                action='store_true',
                                default=False,
                                help='validate target secrets')
    secrets_parser.add_argument('--base64',
                                '-b64',
                                help='base64 encode file content',
                                action='store_true',
                                default=False)
    secrets_parser.add_argument('--reveal',
                                '-r',
                                help='reveal secrets',
                                action='store_true',
                                default=False)
    secrets_parser.add_argument(
        '--file',
        '-f',
        help='read file or directory, set "-" for stdin',
        metavar='FILENAME')
    secrets_parser.add_argument('--target-name',
                                '-t',
                                help='grab recipients from target name')
    secrets_parser.add_argument(
        '--inventory-path',
        default='./inventory',
        help='set inventory path, default is "./inventory"')
    secrets_parser.add_argument('--recipients',
                                '-R',
                                help='set recipients',
                                type=str,
                                nargs='+',
                                default=[],
                                metavar='RECIPIENT')
    secrets_parser.add_argument(
        '--secrets-path',
        help='set secrets path, default is "./secrets"',
        default='./secrets',
    )
    secrets_parser.add_argument('--backend',
                                help='set secrets backend, default is "gpg"',
                                type=str,
                                choices=('gpg', ),
                                default='gpg')
    secrets_parser.add_argument(
        '--verbose',
        '-v',
        help='set verbose mode (warning: this will show sensitive data)',
        action='store_true',
        default=False)
    secrets_parser.add_argument('--no-verify',
                                help='do not verify secret hashes on reveal',
                                action='store_true',
                                default=False)

    args = parser.parse_args()

    logger.debug('Running with args: %s', args)

    try:
        cmd = sys.argv[1]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    if cmd == 'eval':
        file_path = args.jsonnet_file
        search_path = os.path.abspath(args.search_path)
        ext_vars = {}
        if args.vars:
            ext_vars = dict(var.split('=') for var in args.vars)
        json_output = None
        _search_imports = lambda cwd, imp: search_imports(
            cwd, imp, search_path)
        json_output = jsonnet_file(
            file_path,
            import_callback=_search_imports,
            native_callbacks=resource_callbacks(search_path),
            ext_vars=ext_vars)
        if args.output == 'yaml':
            json_obj = json.loads(json_output)
            yaml.safe_dump(json_obj, sys.stdout, default_flow_style=False)
        elif json_output:
            print(json_output)

    elif cmd == 'compile':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        elif args.quiet:
            logging.basicConfig(level=logging.CRITICAL, format="%(message)s")
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")
        search_path = os.path.abspath(args.search_path)
        gpg_obj = secret_gpg_backend()

        compile_targets(args.inventory_path,
                        search_path,
                        args.output_path,
                        args.parallelism,
                        args.targets,
                        prune=(not args.no_prune),
                        secrets_path=args.secrets_path,
                        secrets_reveal=args.reveal,
                        gpg_obj=gpg_obj)

    elif cmd == 'inventory':
        try:
            logging.basicConfig(level=logging.INFO, format="%(message)s")
            inv = inventory_reclass(args.inventory_path)
            if args.target_name != '':
                inv = inv['nodes'][args.target_name]
                if args.pattern != '':
                    pattern = args.pattern.split(".")
                    inv = deep_get(inv, pattern)
            if args.flat:
                inv = flatten_dict(inv)
                yaml.dump(inv, sys.stdout, width=10000)
            else:
                yaml.dump(inv,
                          sys.stdout,
                          Dumper=PrettyDumper,
                          default_flow_style=False)
        except Exception as e:
            if not isinstance(e, KapitanError):
                logger.error("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
                traceback.print_exc()
            sys.exit(1)

    elif cmd == 'searchvar':
        searchvar(args.searchvar, args.inventory_path)

    elif cmd == 'secrets':
        if args.verbose:
            logging.basicConfig(
                level=logging.DEBUG,
                format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        else:
            logging.basicConfig(level=logging.INFO, format="%(message)s")
        gpg_obj = secret_gpg_backend()
        if args.write is not None:
            if args.file is None:
                parser.error('--file is required with --write')
            data = None
            recipients = [dict((("name", name), )) for name in args.recipients]
            if args.target_name:
                inv = inventory_reclass(args.inventory_path)
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            if args.file == '-':
                data = ''
                for line in sys.stdin:
                    data += line
            else:
                with open(args.file) as fp:
                    data = fp.read()
            secret_gpg_write(gpg_obj, args.secrets_path, args.write, data,
                             args.base64, recipients)
        elif args.reveal:
            if args.file is None:
                parser.error('--file is required with --reveal')
            if args.file == '-':
                secret_gpg_reveal_raw(gpg_obj,
                                      args.secrets_path,
                                      None,
                                      verify=(not args.no_verify))
            elif args.file:
                if os.path.isfile(args.file):
                    out = secret_gpg_reveal_file(gpg_obj,
                                                 args.secrets_path,
                                                 args.file,
                                                 verify=(not args.no_verify))
                    sys.stdout.write(out)
                elif os.path.isdir(args.file):
                    secret_gpg_reveal_dir(gpg_obj,
                                          args.secrets_path,
                                          args.file,
                                          verify=(not args.no_verify))
        elif args.update:
            # update recipients for secret tag
            recipients = [
                dict([
                    ("name", name),
                ]) for name in args.recipients
            ]
            if args.target_name:
                inv = inventory_reclass(args.inventory_path)
                recipients = inv['nodes'][args.target_name]['parameters'][
                    'kapitan']['secrets']['recipients']
            secret_gpg_update_recipients(gpg_obj, args.secrets_path,
                                         args.update, recipients)
        elif args.update_targets or args.validate_targets:
            # update recipients for all secrets in secrets_path
            # use --secrets-path to set scanning path
            inv = inventory_reclass(args.inventory_path)
            targets = set(inv['nodes'].keys())
            secrets_path = os.path.abspath(args.secrets_path)
            target_token_paths = search_target_token_paths(
                secrets_path, targets)
            ret_code = 0
            for target_name, token_paths in target_token_paths.items():
                try:
                    recipients = inv['nodes'][target_name]['parameters'][
                        'kapitan']['secrets']['recipients']
                    for token_path in token_paths:
                        target_fingerprints = set(
                            lookup_fingerprints(gpg_obj, recipients))
                        secret_fingerprints = set(
                            secret_gpg_raw_read_fingerprints(
                                secrets_path, token_path))
                        if target_fingerprints != secret_fingerprints:
                            if args.validate_targets:
                                logger.info("%s recipient mismatch",
                                            token_path)
                                ret_code = 1
                            else:
                                new_recipients = [
                                    dict([
                                        ("fingerprint", f),
                                    ]) for f in target_fingerprints
                                ]
                                secret_gpg_update_recipients(
                                    gpg_obj, secrets_path, token_path,
                                    new_recipients)
                except KeyError:
                    logger.debug(
                        "secret_gpg_update_target: target: %s has no inventory recipients, skipping",
                        target_name)
            sys.exit(ret_code)
예제 #25
0
파일: cli.py 프로젝트: jannaspam/kapitan
def secret_write(args, ref_controller):
    "Write secret to ref_controller based on cli args"
    token_name = args.write
    file_name = args.file
    data = None

    if file_name is None:
        fatal_error('--file is required with --write')
    if file_name == '-':
        data = ''
        for line in sys.stdin:
            data += line
    else:
        with open(file_name) as fp:
            data = fp.read()

    if token_name.startswith("gpg:"):
        type_name, token_path = token_name.split(":")
        recipients = [dict((("name", name), )) for name in args.recipients]
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][
                args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in {}".format(
                        args.target_name))

            recipients = kap_inv_params['secrets']['gpg']['recipients']
        if not recipients:
            raise KapitanError(
                "No GPG recipients specified. Use --recipients or specify them in "
                + "parameters.kapitan.secrets.gpg.recipients and use --target")
        secret_obj = GPGSecret(data, recipients, encode_base64=args.base64)
        tag = '?{{gpg:{}}}'.format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("gkms:"):
        type_name, token_path = token_name.split(":")
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][
                args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in {}".format(
                        args.target_name))

            key = kap_inv_params['secrets']['gkms']['key']
        if not key:
            raise KapitanError(
                "No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.gkms.key and use --target"
            )
        secret_obj = GoogleKMSSecret(data, key, encode_base64=args.base64)
        tag = '?{{gkms:{}}}'.format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("awskms:"):
        type_name, token_path = token_name.split(":")
        key = args.key
        if args.target_name:
            inv = inventory_reclass(args.inventory_path)
            kap_inv_params = inv['nodes'][
                args.target_name]['parameters']['kapitan']
            if 'secrets' not in kap_inv_params:
                raise KapitanError(
                    "parameters.kapitan.secrets not defined in {}".format(
                        args.target_name))

            key = kap_inv_params['secrets']['awskms']['key']
        if not key:
            raise KapitanError(
                "No KMS key specified. Use --key or specify it in parameters.kapitan.secrets.awskms.key and use --target"
            )
        secret_obj = AWSKMSSecret(data, key, encode_base64=args.base64)
        tag = '?{{awskms:{}}}'.format(token_path)
        ref_controller[tag] = secret_obj

    elif token_name.startswith("ref:"):
        type_name, token_path = token_name.split(":")
        _data = data.encode()
        encoding = 'original'
        if args.base64:
            _data = base64.b64encode(_data).decode()
            _data = _data.encode()
            encoding = 'base64'
        ref_obj = Ref(_data, encoding=encoding)
        tag = '?{{ref:{}}}'.format(token_path)
        ref_controller[tag] = ref_obj

    else:
        fatal_error(
            "Invalid token: {name}. Try using gpg/gkms/awskms/ref:{name}".
            format(name=token_name))