Ejemplo n.º 1
0
 def register_args(self, parser):
     parser.add_argument("source",
                         nargs="+",
                         help=dedent("""
         The source directory where configuration files will be merged from.
         When multiple sources directories are provided, start with the most general and end
         with the specific; later sources will override values from the earlier ones.
         Supports wildcards so a typical Unix ``conf.d/##-NAME`` directory structure works well."""
                                     )).completer = DirectoriesCompleter()
     parser.add_argument("--target",
                         "-t",
                         help=dedent("""
         Directory where the merged files will be stored.
         Typically either 'default' or 'local'""")
                         ).completer = DirectoriesCompleter()
     parser.add_argument("--dry-run",
                         "-D",
                         default=False,
                         action="store_true",
                         help=dedent("""
         Enable dry-run mode.
         Instead of writing to TARGET, preview changes as a 'diff'.
         If TARGET doesn't exist, then show the merged file."""))
     parser.add_argument(
         "--banner",
         "-b",
         default=
         " **** WARNING: This file is managed by 'ksconf combine', do "
         "not edit hand-edit this file! ****",
         help=
         "A banner or warning comment added to the top of the TARGET file. "
         "Used to discourage Splunk admins from editing an auto-generated "
         "file.")
Ejemplo n.º 2
0
    def register_args(self, parser):
        import argparse
        parser.add_argument("conf", metavar="FILE", nargs="+",
                            default=["-"],
                            help="Input file to sort, or standard input."
                            ).completer = conf_files_completer

        # Pick mode:  target (sysout) vs inplace
        mode = parser.add_mutually_exclusive_group()
        mode.add_argument("--target", "-t", metavar="FILE",
                          type=argparse.FileType('w'), default=self.stdout,
                          help="File to write results to.  Defaults to standard output."
                          ).completer = conf_files_completer
        mode.add_argument("--inplace", "-i",
                          action="store_true", default=False, help=dedent("""\
                          Replace the input file with a sorted version.

                          WARNING:  This a potentially destructive operation that
                          may move/remove comments."""))

        # Inplace update arguments
        grp1 = parser.add_argument_group("In-place update arguments")
        grp1.add_argument("-F", "--force", action="store_true",
                          help=dedent("""\
                          Force file sorting for all files, even for files containing the special
                          'KSCONF-NO-SORT' marker."""))
        grp1.add_argument("-q", "--quiet", action="store_true",
                          help=dedent("""\
                          Reduce the output.
                          Reports only updated or invalid files.
                          This is useful for pre-commit hooks, for example."""))

        parser.add_argument("-n", "--newlines", metavar="LINES", type=int, default=1,
                            help="Number of lines between stanzas.")
Ejemplo n.º 3
0
    def register_args(self, parser):
        # type: (ArgumentParser) -> None
        parser.add_argument("conf", metavar="CONF", nargs="+",
                            type=ConfFileType("r", "load", parse_profile=PARSECONF_LOOSE),
                            help="Configuration file(s) to export settings from."
                            ).completer = conf_files_completer
        parser.add_argument("--output", "-t", metavar="FILE",
                            type=FileType("w"), default=sys.stdout,
                            help="Save the shell script output to this file.  "
                                 "If not provided, the output is written to standard output.")

        prsout = parser.add_argument_group("Output Control")

        '''
        prsout.add_argument("--syntax", choices=["curl", "powershell"],  # curl-windows?
                            default="curl",
                            help="Pick the output syntax mode.  "
                                 "Currently only 'curl' is supported.")
        '''
        prsout.add_argument("--disable-auth-output", action="store_true", default=False,
                            help="Turn off sample login curl commands from the output.")
        prsout.add_argument("--pretty-print", "-p", action="store_true", default=False,
                            help=dedent("""\
            Enable pretty-printing.
            Make shell output a bit more readable by splitting entries across lines."""))

        parsg1 = parser.add_mutually_exclusive_group(required=False)
        parsg1.add_argument("-u", "--update", action="store_true", default=False,
                            help="Assume that the REST entities already exist.  "
                                 "By default, output assumes stanzas are being created.")
        parsg1.add_argument("-D", "--delete", action="store_true", default=False,
                            help=dedent("""\
            Remove existing REST entities.  This is a destructive operation.
            In this mode, stanza attributes are unnecessary and ignored.
            NOTE:  This works for 'local' entities only; the default folder cannot be updated.
            """))

        parser.add_argument("--url", default="https://localhost:8089",
                            help="URL of Splunkd.  Default:  %(default)s")
        parser.add_argument("--app", default="$SPLUNK_APP",
                            help="Set the namespace (app name) for the endpoint")

        parser.add_argument("--user", help="Deprecated.  Use --owner instead.")
        parser.add_argument("--owner", default="nobody",
                            help="Set the object owner.  Typically, the default of 'nobody' is "
                                 "ideal if you want to share the configurations at the app-level.")
        parser.add_argument("--conf", dest="conf_type", metavar="TYPE",
                            help=dedent("""\
            Explicitly set the configuration file type.  By default, this is derived from CONF, but
            sometimes it's helpful to set this explicitly.  Can be any valid Splunk conf file type.
            Examples include: 'app', 'props', 'tags', 'savedsearches', etc."""))

        parser.add_argument("--extra-args", action="append",
                            help=dedent("""\
            Extra arguments to pass to all CURL commands.
            Quote arguments on the command line to prevent confusion between arguments to ksconf vs
            curl."""))
Ejemplo n.º 4
0
    def register_args(self, parser):
        # type: (ArgumentParser) -> None
        parser.add_argument(
            "conf",
            metavar="CONF",
            nargs="+",
            type=ConfFileType("r", "load", parse_profile=PARSECONF_LOOSE),
            help="Configuration file(s) to export settings from."
        ).completer = conf_files_completer

        parser.add_argument("--conf",
                            dest="conf_type",
                            metavar="TYPE",
                            help=dedent("""\
            Explicitly set the configuration file type.  By default, this is derived from CONF, but
            sometimes it's helpful to set this explicitly. Can be any valid Splunk conf file type.
            Examples include: 'app', 'props', 'tags', 'savedsearches', etc.""")
                            )
        parser.add_argument(
            "-m",
            "--meta",
            action="append",
            help=
            "Specify one or more ``.meta`` files to determine the desired read & "
            "write ACLs, owner, and sharing for objects in the CONF file.")

        #add_splunkd_namespace(
        #    add_splunkd_access_args(parser.add_argument("Splunkd endpoint")))

        add_splunkd_namespace(add_splunkd_access_args(parser))

        parsg1 = parser.add_mutually_exclusive_group(required=False)
        '''
        parsg1.add_argument("-u", "--update", action="store_true", default=False,
                            help="Assume that the REST entities already exist.")
        parsg1.add_argument("--update-only", action="store_true", default=False,
                            help="Only update existing entities.  "
                                 "Non-existent entries will be skipped.")
        '''
        parsg1.add_argument("-D",
                            "--delete",
                            action="store_true",
                            default=False,
                            help=dedent("""\
            Remove existing REST entities.  This is a destructive operation.
            In this mode, stanza attributes are unnecessary.
            NOTE:  This works for 'local' entities only; the default folder cannot be updated.
            """))
Ejemplo n.º 5
0
 def register_args(self, parser):
     parser.add_argument("source",
                         nargs="+",
                         help=dedent("""
         The source directory where configuration files will be merged from.
         When multiple source directories are provided, start with the most general and end
         with the specific; later sources will override values from the earlier ones.
         Supports wildcards so a typical Unix ``conf.d/##-NAME`` directory structure works well."""
                                     )).completer = DirectoriesCompleter()
     parser.add_argument("--target",
                         "-t",
                         help=dedent("""
         Directory where the merged files will be stored.
         Typically either 'default' or 'local'""")
                         ).completer = DirectoriesCompleter()
     parser.add_argument("--dry-run",
                         "-D",
                         default=False,
                         action="store_true",
                         help=dedent("""
         Enable dry-run mode.
         Instead of writing to TARGET, preview changes as a 'diff'.
         If TARGET doesn't exist, then show the merged file."""))
     parser.add_argument(
         "--follow-symlink",
         "-l",
         action="store_true",
         default=False,
         help="Follow symbolic links pointing to directories.  "
         "Symlinks to files are followed.")
     parser.add_argument(
         "--banner",
         "-b",
         default=
         " **** WARNING: This file is managed by 'ksconf combine', do "
         "not edit hand-edit this file! ****",
         help=
         "A banner or warning comment added to the top of the TARGET file. "
         "Used to discourage Splunk admins from editing an auto-generated "
         "file.")
     parser.add_argument("--disable-marker",
                         action="store_true",
                         default=False,
                         help=dedent("""
         Prevents the creation of or checking for the '{}' marker file safety check.
         This file is typically used indicate that the destination folder is managed by ksconf.
         This option should be reserved for well-controlled batch processing scenarios.
         """.format(CONTROLLED_DIR_MARKER)))
Ejemplo n.º 6
0
class DiffCmd(KsconfCmd):
    help = "Compare settings differences between two .conf files ignoring spacing and sort order"
    description = dedent("""
    Compares the content differences of two .conf files

    This command ignores textual differences (like order, spacing, and comments) and
    focuses strictly on comparing stanzas, keys, and values.  Note that spaces
    within any given value will be compared.  Multiline fields are compared in are
    compared in a more traditional 'diff' output so that long savedsearches and
    macros can be compared more easily.
    """)
    format = "manual"

    def register_args(self, parser):
        parser.add_argument("conf1",
                            metavar="CONF1",
                            help="Left side of the comparison",
                            type=ConfFileType("r",
                                              "load",
                                              parse_profile=PARSECONF_MID_NC)
                            ).completer = conf_files_completer
        parser.add_argument("conf2",
                            metavar="CONF2",
                            help="Right side of the comparison",
                            type=ConfFileType("r",
                                              "load",
                                              parse_profile=PARSECONF_MID_NC)
                            ).completer = conf_files_completer
        parser.add_argument(
            "-o",
            "--output",
            metavar="FILE",
            type=argparse.FileType('w'),
            default=self.stdout,
            help="File where difference is stored.  Defaults to standard out.")
        parser.add_argument(
            "--comments",
            "-C",
            action="store_true",
            default=False,
            help=
            "Enable comparison of comments.  (Unlikely to work consistently)")

    def run(self, args):
        ''' Compare two configuration files. '''
        args.conf1.set_parser_option(keep_comments=args.comments)
        args.conf2.set_parser_option(keep_comments=args.comments)

        cfg1 = args.conf1.data
        cfg2 = args.conf2.data

        diffs = compare_cfgs(cfg1, cfg2)
        rc = show_diff(args.output,
                       diffs,
                       headers=(args.conf1.name, args.conf2.name))
        if rc == EXIT_CODE_DIFF_EQUAL:
            self.stderr.write("Files are the same.\n")
        elif rc == EXIT_CODE_DIFF_NO_COMMON:
            self.stderr.write("No common stanzas between files.\n")
        return rc
Ejemplo n.º 7
0
 def register_args(self, parser):
     parser.add_argument(
         "path",
         metavar="PATH",
         nargs="+",
         type=str,
         help="Directory from which to load configuration files.  "
         "All .conf and .meta file are included recursively."
     ).completer = DirectoriesCompleter()
     parser.add_argument("--output",
                         "-o",
                         metavar="FILE",
                         type=FileType("w"),
                         default=self.stdout,
                         help=dedent("""\
         Save the snapshot to the named files.  If not provided, the snapshot is written to
         standard output.""")).completer = FilesCompleter(
                             allowednames=["*.json"])
     parser.add_argument(
         "--minimize",
         action="store_true",
         default=False,
         help=
         "Reduce the size of the JSON output by removing whitespace.  Reduces readability."
     )
Ejemplo n.º 8
0
 def register_args(self, parser):
     parser.add_argument("conf", metavar="FILE", nargs="+", help=dedent("""\
         One or more configuration files to check.
         If '-' is given, then read a list of files to validate from standard input""")
                      ).completer = conf_files_completer
     parser.add_argument("--quiet", "-q", default=False, action="store_true",
                         help="Reduce the volume of output.")
     ''' # Do we really need this?
Ejemplo n.º 9
0
    def register_args(self, parser):
        parser.add_argument(
            "conf",
            metavar="FILE",
            nargs="+",
            help="The source configuration file(s) to collect settings from."
        ).completer = conf_files_completer
        parser.add_argument("--target",
                            "-t",
                            metavar="FILE",
                            type=ConfFileType("r+",
                                              "none",
                                              parse_profile=PARSECONF_STRICT),
                            default=ConfFileProxy("<stdout>", "w",
                                                  self.stdout),
                            help=dedent("""\
            Save the merged configuration files to this target file.
            If not provided, the merged conf is written to standard output.""")
                            ).completer = conf_files_completer

        # This is helpful when writing bash expressions like MyApp/{default,local}/props.conf;
        # when either default or local may not be present.
        parser.add_argument("--ignore-missing",
                            "-s",
                            default=False,
                            action="store_true",
                            help="Silently ignore any missing CONF files.")

        parser.add_argument("--dry-run",
                            "-D",
                            default=False,
                            action="store_true",
                            help=dedent("""\
            Enable dry-run mode.
            Instead of writing to TARGET, preview changes in 'diff' format.
            If TARGET doesn't exist, then show the merged file."""))
        parser.add_argument("--banner",
                            "-b",
                            default="",
                            help=dedent("""\
            A banner or warning comment added to the top of the TARGET file.
            Used to discourage Splunk admins from editing an auto-generated file."""
                                        ))
Ejemplo n.º 10
0
class MergeCmd(KsconfCmd):
    help = "Merge two or more .conf files"
    description = dedent("""\
    Merge two or more .conf files into a single combined .conf file.  This could be
    used to merge the props.conf file from ALL technology addons into a single file:

    ksconf merge --target=all-ta-props.conf etc/apps/*TA*/{default,local}/props.conf

    """)
    format = "manual"
    maturity = "stable"

    def register_args(self, parser):
        parser.add_argument(
            "conf",
            metavar="FILE",
            nargs="+",
            type=ConfFileType("r", "load", parse_profile=PARSECONF_MID),
            help="The source configuration file to pull changes from."
        ).completer = conf_files_completer
        parser.add_argument("--target",
                            "-t",
                            metavar="FILE",
                            type=ConfFileType("r+",
                                              "none",
                                              parse_profile=PARSECONF_STRICT),
                            default=ConfFileProxy("<stdout>", "w",
                                                  self.stdout),
                            help="""
            Save the merged configuration files to this target file.
            If not provided. the the merged conf is written to standard output."""
                            ).completer = conf_files_completer
        parser.add_argument("--dry-run",
                            "-D",
                            default=False,
                            action="store_true",
                            help="""
            Enable dry-run mode.
            Instead of writing to TARGET, preview changes in 'diff' format.
            If TARGET doesn't exist, then show the merged file.""")
        parser.add_argument("--banner",
                            "-b",
                            default="",
                            help="""
            A banner or warning comment added to the top of the TARGET file.
            This is often used to warn Splunk admins from editing an auto-generated file."""
                            )

    def run(self, args):
        ''' Merge multiple configuration files into one '''
        merge_conf_files(args.target,
                         args.conf,
                         dry_run=args.dry_run,
                         banner_comment=args.banner)
        return EXIT_CODE_SUCCESS
Ejemplo n.º 11
0
class SnapshotCmd(KsconfCmd):
    help = "Snapshot .conf file directories into a JSON dump format"
    description = dedent("""\
    Build a static snapshot of various configuration files stored within a structured json export
    format.  If the .conf files being captured are within a standard Splunk directory structure,
    then certain metadata and namespace information is assumed based on typical path locations.
    Individual apps or conf files can be collected as well, but less metadata may be extracted.
    """)

    def register_args(self, parser):
        parser.add_argument(
            "path",
            metavar="PATH",
            nargs="+",
            type=str,
            help="Directory from which to load configuration files.  "
            "All .conf and .meta file are included recursively."
        ).completer = DirectoriesCompleter()
        parser.add_argument("--output",
                            "-o",
                            metavar="FILE",
                            type=FileType("w"),
                            default=self.stdout,
                            help=dedent("""\
            Save the snapshot to the named files.  If not provided, the snapshot is written to
            standard output.""")).completer = FilesCompleter(
                                allowednames=["*.json"])
        parser.add_argument(
            "--minimize",
            action="store_true",
            default=False,
            help=
            "Reduce the size of the JSON output by removing whitespace.  Reduces readability."
        )

    def run(self, args):
        ''' Snapshot multiple configuration files into a single json snapshot. '''
        cfg = ConfSnapshotConfig()
        confSnap = ConfSnapshot(cfg)
        for path in args.path:
            if os.path.isfile(path):
                confSnap.snapshot_file_conf(path)
            elif os.path.isdir(path):
                confSnap.snapshot_dir(path)
            else:
                self.stderr.write(
                    "No such file or directory {}\n".format(path))
                return EXIT_CODE_NO_SUCH_FILE

        if args.minimize:
            confSnap.write_snapshot(args.output)
        else:
            confSnap.write_snapshot(args.output, indent=2)
        return EXIT_CODE_SUCCESS
Ejemplo n.º 12
0
 def register_args(self, parser):
     parser.add_argument("xml",
                         metavar="FILE",
                         nargs="+",
                         help=dedent("""\
         One or more XML files to check.
         If '-' is given, then a list of files is read from standard input"""
                                     )).completer = conf_files_completer
     parser.add_argument(
         "--indent",
         type=int,
         default=2,
         help="Number of spaces.  This is only used if indentation cannot be "
         "guessed from the existing file.")
     parser.add_argument("--quiet",
                         "-q",
                         default=False,
                         action="store_true",
                         help="Reduce the volume of output.")
Ejemplo n.º 13
0
class PromoteCmd(KsconfCmd):
    help = dedent("""\
    Promote .conf settings between layers using either either in batch mode (all
    changes) or interactive mode.

    Frequently this is used to promote conf changes made via the UI (stored in
    the ``local`` folder) to a version-controlled directory, often ``default``.
    """)
    description = dedent("""\
    Propagate .conf settings applied in one file to another.  Typically this is used
    to move ``local`` changes (made via the UI) into another layer, such as the
    ``default`` or a named ``default.d/50-xxxxx``) folder.

    Promote has two modes:  batch and interactive.  In batch mode all changes are
    applied automatically and the (now empty) source file is removed.  In interactive
    mode the user is prompted to select stanzas to promote.  This way local changes
    can be held without being promoted.

    NOTE: Changes are *MOVED* not copied, unless ``--keep`` is used.
    """)
    format = "manual"
    maturity = "beta"

    def register_args(self, parser):
        parser.set_defaults(mode="ask")
        parser.add_argument(
            "source",
            metavar="SOURCE",
            type=ConfFileType("r+", "load", parse_profile=PARSECONF_STRICT_NC),
            help="The source configuration file to pull changes from. "
            "Typically the :file:`local` conf file)"
        ).completer = conf_files_completer
        parser.add_argument("target",
                            metavar="TARGET",
                            type=ConfFileType("r+",
                                              "none",
                                              accept_dir=True,
                                              parse_profile=PARSECONF_STRICT),
                            help=dedent("""\
            Configuration file or directory to push the changes into.
            (Typically the :file:`default` folder)
            """)).completer = conf_files_completer
        grp1 = parser.add_mutually_exclusive_group()
        grp1.add_argument("--batch",
                          "-b",
                          action="store_const",
                          dest="mode",
                          const="batch",
                          help=dedent("""\
            Use batch mode where all configuration settings are automatically promoted.
            All changes are removed from source and applied to target.
            The source file will be removed, unless
            ``--keep-empty`` is used."""))
        grp1.add_argument("--interactive",
                          "-i",
                          action="store_const",
                          dest="mode",
                          const="interactive",
                          help=dedent("""\
            Enable interactive mode where the user will be prompted to approve
            the promotion of specific stanzas and attributes.
            The user will be able to apply, skip, or edit the changes being promoted."""
                                      ))
        parser.add_argument(
            "--force",
            "-f",
            action="store_true",
            default=False,
            help="Disable safety checks. "
            "Don't check to see if SOURCE and TARGET share the same basename.")
        parser.add_argument("--keep",
                            "-k",
                            action="store_true",
                            default=False,
                            help=dedent("""\
            Keep conf settings in the source file.
            All changes will be copied into the target file instead of being moved there.
            This is typically a bad idea since local always overrides default."""
                                        ))
        parser.add_argument("--keep-empty",
                            action="store_true",
                            default=False,
                            help=dedent("""\
            Keep the source file, even if after the settings promotions the file has no content.
            By default, SOURCE will be removed after all content has been moved into TARGET.
            Splunk will re-create any necessary local files on the fly."""))

    def run(self, args):
        if isinstance(args.target, ConfDirProxy):
            # If a directory is given instead of a target file, then assume the source filename
            # and target filename are the same.
            # Also handle local/default meta:     e.g.:   ksconf promote local.meta .
            source_basename = os.path.basename(args.source.name)
            if source_basename == "local.meta":
                args.target = args.target.get_file("default.meta")
            else:
                args.target = args.target.get_file(source_basename)
            del source_basename

        if not os.path.isfile(args.target.name):
            self.stdout.write(
                "Target file {} does not exist.  Moving source file {} to the target."
                .format(args.target.name, args.source.name))
            # For windows:  Close out any open file descriptors first
            args.target.close()
            args.source.close()
            if args.keep:
                shutil.copy2(args.source.name, args.target.name)
            else:
                shutil.move(args.source.name, args.target.name)
            return

        # If src/dest are the same, then the file ends up being deleted.  Whoops!
        if _samefile(args.source.name, args.target.name):
            self.stderr.write(
                "Aborting.  SOURCE and TARGET are the same file!\n")
            return EXIT_CODE_FAILED_SAFETY_CHECK

        fp_source = file_fingerprint(args.source.name)
        fp_target = file_fingerprint(args.target.name)

        # Todo: Add a safety check prevent accidental merge of unrelated files.
        # Scenario: promote local/props.conf into default/transforms.conf
        # Possible check (1) Are basenames are different?  (props.conf vs transforms.conf)
        # Possible check (2) Are there key's in common? (DEST_KEY vs REPORT)
        # Using #1 for now, consider if there's value in #2
        bn_source = os.path.basename(args.source.name)
        bn_target = os.path.basename(args.target.name)
        if bn_source.endswith(".meta") and bn_target.endswith(".meta"):
            # Allow local.meta -> default.meta without --force or a warning message
            pass
        elif bn_source != bn_target:
            # Todo: Allow for interactive prompting when in interactive but not force mode.
            if args.force:
                self.stderr.write(
                    "Promoting content across conf file types ({0} --> {1}) because the "
                    "'--force' CLI option was set.\n".format(
                        bn_source, bn_target))
            else:
                self.stderr.write(
                    "Refusing to promote content between different types of configuration "
                    "files.  {0} --> {1}  If this is intentional, override this safety"
                    "check with '--force'\n".format(bn_source, bn_target))
                return EXIT_CODE_FAILED_SAFETY_CHECK

        # Todo:  Preserve comments in the TARGET file.  Worry with promoting of comments later...
        # Parse all config files
        cfg_src = args.source.data
        cfg_tgt = args.target.data

        if not cfg_src:
            self.stderr.write(
                "No settings in {}.  Nothing to promote.\n".format(
                    args.source.name))
            return EXIT_CODE_NOTHING_TO_DO

        if args.mode == "ask":
            # Show a summary of how many new stanzas would be copied across; how many key changes.
            # And either accept all (batch) or pick selectively (batch)
            delta = compare_cfgs(cfg_tgt, cfg_src, allow_level0=False)
            delta = [op for op in delta if op.tag != DIFF_OP_DELETE]
            summarize_cfg_diffs(delta, self.stderr)

            while True:
                resp = input("Would you like to apply ALL changes?  (y/n/d/q)")
                resp = resp[:1].lower()
                if resp == 'q':
                    return EXIT_CODE_USER_QUIT
                elif resp == 'd':
                    show_diff(self.stdout,
                              delta,
                              headers=(args.source.name, args.target.name))
                elif resp == 'y':
                    args.mode = "batch"
                    break
                elif resp == 'n':
                    args.mode = "interactive"
                    break

        if args.mode == "interactive":
            (cfg_final_src, cfg_final_tgt) = self._do_promote_interactive(
                cfg_src, cfg_tgt, args)
        else:
            (cfg_final_src, cfg_final_tgt) = self._do_promote_automatic(
                cfg_src, cfg_tgt, args)

        # Minimize race condition:  Do file mtime/hash check here.  Abort on external change.
        # Todo: Eventually use temporary files and atomic renames to further minimize the risk
        # Todo: Make backup '.bak' files (user configurable)
        # Todo: Avoid rewriting files if NO changes were made. (preserve prior backups)
        # Todo: Restore file modes and such

        if file_fingerprint(args.source.name, fp_source):
            self.stderr.write(
                "Aborting!  External source file changed: {0}\n".format(
                    args.source.name))
            return EXIT_CODE_EXTERNAL_FILE_EDIT
        if file_fingerprint(args.target.name, fp_target):
            self.stderr.write(
                "Aborting!  External target file changed: {0}\n".format(
                    args.target.name))
            return EXIT_CODE_EXTERNAL_FILE_EDIT
        # Reminder:  conf entries are being removed from source and promoted into target
        args.target.dump(cfg_final_tgt)
        if not args.keep:
            # If --keep is set, we never touch the source file.
            if cfg_final_src:
                args.source.dump(cfg_final_src)
            else:
                # Config file is empty.  Should we write an empty file, or remove it?
                if args.keep_empty:
                    args.source.dump(cfg_final_src)
                else:
                    args.source.unlink()

    @staticmethod
    def _do_promote_automatic(cfg_src, cfg_tgt, args):
        # Promote ALL entries;  simply, isn't it...  ;-)
        final_cfg = merge_conf_dicts(cfg_tgt, cfg_src)
        return ({}, final_cfg)

    def _do_promote_interactive(self, cfg_src, cfg_tgt, args):
        ''' Interactively "promote" settings from one configuration file into another

        Model after git's "patch" mode, from git docs:

        This lets you choose one path out of a status like selection. After choosing the path, it
        presents the diff between the index and the working tree file and asks you if you want to
        stage the change of each hunk. You can select one of the following options and type return:

           y - stage this hunk
           n - do not stage this hunk
           q - quit; do not stage this hunk or any of the remaining ones
           a - stage this hunk and all later hunks in the file
           d - do not stage this hunk or any of the later hunks in the file
           g - select a hunk to go to
           / - search for a hunk matching the given regex
           j - leave this hunk undecided, see next undecided hunk
           J - leave this hunk undecided, see next hunk
           k - leave this hunk undecided, see previous undecided hunk
           K - leave this hunk undecided, see previous hunk
           s - split the current hunk into smaller hunks
           e - manually edit the current hunk
           ? - print help


        Note:  In git's "edit" mode you are literally editing a patch file, so you can modify both
        the working tree file as well as the file that's being staged.  While this is nifty, as
        git's own documentation points out (in other places), that "some changes may have confusing
        results".  Therefore, it probably makes sense to limit what the user can edit.

        ============================================================================================

        Options we may be able to support:

           Pri k   Description
           --- -   -----------
           [1] y - stage this section or key
           [1] n - do not stage this section or key
           [1] q - quit; do not stage this or any of the remaining sections or attributes
           [2] a - stage this section or key and all later sections in the file
           [2] d - do not stage this section or key or any of the later section or key in the file
           [1] s - split the section into individual attributes
           [3] e - edit the current section or key
           [2] ? - print help

        Q:  Is it less confusing to the user to adopt the 'local' and 'default' paradigm here?
        Even though we know that change promotions will not *always* be between default and local.
        (We can and should assume some familiarity with Splunk conf terms, less so than familiarity
        with git lingo.)
        '''
        def prompt_yes_no(prompt):
            while True:
                r = input(prompt + " (y/n)")
                if r.lower().startswith("y"):
                    return True
                elif r.lower().startswith("n"):
                    return False

        out_src = deepcopy(cfg_src)
        out_cfg = deepcopy(cfg_tgt)
        ###  Todo:  IMPLEMENT A MANUAL MERGE/DIFF HERE:
        # What ever is migrated, move it OUT of cfg_src, and into cfg_tgt

        diff = compare_cfgs(cfg_tgt, cfg_src, allow_level0=False)
        for op in diff:
            if op.tag == DIFF_OP_DELETE:
                # This is normal.   Not all default entries will be updated in local.
                continue
            elif op.tag == DIFF_OP_EQUAL:
                # Q:  Should we simply remove everything from the source file that already lines
                #     up with the target?  (Probably?)  For now just skip...
                if prompt_yes_no("Remove matching entry {0}  ".format(
                        op.location)):
                    if isinstance(op.location, DiffStanza):
                        del out_src[op.location.stanza]
                    else:
                        del out_src[op.location.stanza][op.location.key]
            else:
                '''
                self.stderr.write("Found change:  <{0}> {1!r}\n-{2!r}\n+{3!r}\n\n\n"
                    .format(op.tag, op.location, op.b, op.a))
                '''
                if isinstance(op.location, DiffStanza):
                    # Move entire stanza
                    show_diff(self.stdout, [op])
                    if prompt_yes_no("Apply  [{0}]".format(
                            op.location.stanza)):
                        out_cfg[op.location.stanza] = op.a
                        del out_src[op.location.stanza]
                else:
                    show_diff(self.stdout, [op])
                    if prompt_yes_no("Apply [{0}] {1}".format(
                            op.location.stanza, op.location.key)):
                        # Move key
                        out_cfg[op.location.stanza][op.location.key] = op.a
                        del out_src[op.location.stanza][op.location.key]
                        # If last remaining key in the src stanza?  Then delete the entire stanza
                        if not out_src[op.location.stanza]:
                            del out_src[op.location.stanza]
        return (out_src, out_cfg)
Ejemplo n.º 14
0
class UnarchiveCmd(KsconfCmd):
    help = "Install or upgrade an existing app in a git-friendly and safe way"
    description = dedent("""
    Install or overwrite an existing app in a git-friendly way.
    If the app already exist, steps will be taken to upgrade it safely.

    The 'default' folder can be redirected to another path (i.e., 'default.d/10-upstream' or
    whatever which is helpful if you're using the ksconf 'combine' mode.)

    Supports tarballs (.tar.gz, .spl), and less-common zip files (.zip)
    """)
    format = "manual"
    maturity = "beta"

    def register_args(self, parser):
        parser.add_argument("tarball", metavar="SPL",
                            help="The path to the archive to install."
                            ).completer = FilesCompleter(allowednames=allowed_extentions)
        parser.add_argument("--dest", metavar="DIR", default=".", help="""
            Set the destination path where the archive will be extracted.
            By default the current directory is used, but sane values include etc/apps,
            etc/deployment-apps, and so on.
            This could also be a git repository working tree where splunk apps are stored."""
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--app-name", metavar="NAME", default=None, help="""
            The app name to use when expanding the archive.
            By default, the app name is taken from the archive as the top-level path included
            in the archive (by convention).
            Expanding archives that contain multiple (ITSI) or nested apps (NIX, ES)
            is not supported.)""")
        parser.add_argument("--default-dir", default="default", metavar="DIR", help="""
            Name of the directory where the default contents will be stored.
            This is a useful feature for apps that use a dynamic default directory
            that's created and managed by the 'combine' mode."""
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--exclude", "-e", action="append", default=[], help="""
            Add a file pattern to exclude.  Splunk's psudo-glob patterns are supported here.
            '*' for any non-directory match,
            '...' for ANY (including directories),
            and '?' for a single character.""")
        parser.add_argument("--keep", "-k", action="append", default=[],
                            help="""
            Specify a pattern for files to preserve during an upgrade.
            Repeat this argument to keep multiple patterns.""")
        parser.add_argument("--allow-local", default=False, action="store_true", help="""
            Allow local/ and local.meta files to be extracted from the archive.
            Shipping local files is a Splunk app packaging violation so local files are blocked
            to prevent content from being overridden.""")
        parser.add_argument("--git-sanity-check",
                            choices=["off", "changed", "untracked", "ignored"],
                            default="untracked", help="""
            By default 'git status' is run on the destination folder to detect working tree or
            index modifications before the unarchive process starts, but this is configurable.
            Sanity check choices go from least restrictive to most thorough:
            Use 'off' to prevent any 'git status' safely checks.
            Use 'changed' to abort only upon local modifications to files tracked by git.
            Use 'untracked' (the default) to look for changed and untracked files before
            considering the tree clean.
            Use 'ignored' to enable the most intense safety check which will abort if local
            changes, untracked, or ignored files are found.
            NOTE:  Sanity checks are automatically disabled if the app is not in a git working
            tree, or git is not installed.""")
        parser.add_argument("--git-mode", default="stage",
                            choices=["nochange", "stage", "commit"], help="""
            Set the desired level of git integration.
            The default mode is 'stage', where new, updated, or removed files are automatically
            handled for you.
            If 'commit' mode is selected, then files are committed with an auto-generated
            commit message.
            To prevent any 'git add' or 'git rm' commands from being run, pick the
            'nochange' mode.
            Notes:  (1) The git mode is irrelevant if the app is not in a git working tree.
            (2) If a git commit is incorrect, simply roll it back with 'git reset' or fix
            it with a 'git commit --amend' before the changes are pushed anywhere else.
            (That's why you're using git in the first place, right?)""")
        parser.add_argument("--no-edit",
                            action="store_true", default=False, help="""
            Tell git to skip opening your editor.
            By default you will be prompted to review/edit the commit message.
            (Git Tip:  Delete the content of the message to abort the commit.)""")
        parser.add_argument("--git-commit-args", "-G", default=[], action="append")

    def run(self, args):
        """ Install / upgrade a Splunk app from an archive file """
        # Handle ignored files by preserving them as much as possible.
        # Add --dry-run mode?  j/k - that's what git is for!

        if not os.path.isfile(args.tarball):
            self.stderr.write("No such file or directory {}\n".format(args.tarball))
            return EXIT_CODE_FAILED_SAFETY_CHECK

        if not os.path.isdir(args.dest):
            self.stderr.write("Destination directory does not exist: {}\n".format(args.dest))
            return EXIT_CODE_FAILED_SAFETY_CHECK

        f_hash = file_hash(args.tarball)
        self.stdout.write("Inspecting archive:               {}\n".format(args.tarball))

        new_app_name = args.app_name
        # ARCHIVE PRE-CHECKS:  Archive must contain only one app, no weird paths, ...
        app_name = set()
        app_conf = {}
        files = 0
        local_files = set()
        a = extract_archive(args.tarball, extract_filter=gaf_filter_name_like("app.conf"))
        for gaf in sanity_checker(a):
            gaf_app, gaf_relpath = gaf.path.split("/", 1)
            files += 1
            if gaf.path.endswith("app.conf") and gaf.payload:
                conffile = StringIO(gaf.payload.decode(default_encoding))
                conffile.name = os.path.join(args.tarball, gaf.path)
                app_conf = parse_conf(conffile, profile=PARSECONF_LOOSE)
                del conffile
            elif gaf_relpath.startswith("local") or gaf_relpath.endswith("local.meta"):
                local_files.add(gaf_relpath)
            app_name.add(gaf.path.split("/", 1)[0])
            del gaf_app, gaf_relpath
        if len(app_name) > 1:
            self.stderr.write("The 'unarchive' command only supports extracting a single splunk"
                              " app at a time.\nHowever the archive {} contains {} apps:  {}\n"
                              "".format(args.tarball, len(app_name), ", ".join(app_name)))
            return EXIT_CODE_FAILED_SAFETY_CHECK
        else:
            app_name = app_name.pop()
        del a
        if local_files:
            self.stderr.write("Local {} files found in the archive.  ".format(len(local_files)))
            if args.allow_local:
                self.stderr.write("Keeping these due to the '--allow-local' flag\n")
            else:
                self.stderr.write("Excluding local files by default.  "
                                  "Use '--allow-local' to override.")

        if not new_app_name and True:  # if not --no-app-name-fixes
            if app_name.endswith("-master"):
                self.stdout.write("Automatically dropping '-master' from the app name.  "
                                  "This is often the result of a github export.\n")
                # Trick, but it works...
                new_app_name = app_name[:-7]
            mo = re.search(r"(.*)-\d+\.[\d.-]+$", app_name)
            if mo:
                self.stdout.write("Automatically removing the version suffix from the app name.  "
                                  "'{}' will be extracted as '{}'\n".format(app_name, mo.group(1)))
                new_app_name = mo.group(1)

        app_basename = new_app_name or app_name
        dest_app = os.path.join(args.dest, app_basename)
        self.stdout.write("Inspecting destination folder:    {}\n".format(os.path.abspath(dest_app)))

        # FEEDBACK TO THE USER:   UPGRADE VS INSTALL, GIT?, APP RENAME, ...
        app_name_msg = app_name
        vc_msg = "without version control support"

        old_app_conf = {}
        if os.path.isdir(dest_app):
            mode = "upgrade"
            is_git = git_is_working_tree(dest_app)
            try:
                # Ignoring the 'local' entries since distributed apps shouldn't contain local
                old_app_conf_file = os.path.join(dest_app, args.default_dir or "default", "app.conf")
                old_app_conf = parse_conf(old_app_conf_file, profile=PARSECONF_LOOSE)
            except ConfParserException:
                self.stderr.write("Unable to read app.conf from existing install.\n")
        else:
            mode = "install"
            is_git = git_is_working_tree(args.dest)
        if is_git:
            vc_msg = "with git support"
        if new_app_name and new_app_name != app_name:
            app_name_msg = "{} (renamed from {})".format(new_app_name, app_name)

        def show_pkg_info(conf, label):
            self.stdout.write("{} packaging info:    '{}' by {} (version {})\n".format(
                label,
                conf.get("ui", {}).get("label", "Unknown"),
                conf.get("launcher", {}).get("author", "Unknown"),
                conf.get("launcher", {}).get("version", "Unknown")))

        if old_app_conf:
            show_pkg_info(old_app_conf, " Installed app")
        if app_conf:
            show_pkg_info(app_conf, "   Tarball app")

        self.stdout.write("About to {} the {} app {}.\n".format(mode, app_name_msg, vc_msg))

        existing_files = set()
        if mode == "upgrade":
            if is_git:
                existing_files.update(git_ls_files(dest_app))
                if not existing_files:
                    self.stderr.write("App is in a git repository but no files have been staged "
                                      "or committed.  Either commit or remove '{}' and try again."
                                      "\n".format(dest_app))
                    return EXIT_CODE_FAILED_SAFETY_CHECK
                if args.git_sanity_check == "off":
                    self.stdout.write("The 'git status' safety checks have been disabled via CLI"
                                      "argument.  Skipping.\n")
                else:
                    d = {
                        #        untracked, ignored
                        "changed": (False, False),
                        "untracked": (True, False),
                        "ignored": (True, True)
                    }
                    is_clean = git_is_clean(dest_app, *d[args.git_sanity_check])
                    del d
                    if is_clean:
                        self.stdout.write("Git folder is clean.  "
                                          "Okay to proceed with the upgrade.\n")
                    else:
                        self.stderr.write("Unable to move forward without a clean working tree.\n"
                                          "Clean up and try again.  "
                                          "Modifications are listed below.\n\n")
                        self.stderr.flush()
                        if args.git_sanity_check == "changed":
                            git_status_ui(dest_app, "--untracked-files=no")
                        elif args.git_sanity_check == "ignored":
                            git_status_ui(dest_app, "--ignored")
                        else:
                            git_status_ui(dest_app)
                        return EXIT_CODE_FAILED_SAFETY_CHECK
            else:
                for (root, dirs, filenames) in os.walk(dest_app):
                    for fn in filenames:
                        existing_files.add(os.path.join(root, fn))
            self.stdout.write("Before upgrade.  App has {} files\n".format(len(existing_files)))
        elif is_git:
            self.stdout.write("Git clean check skipped.  Not needed for a fresh app install.\n")

        def fixup_pattern_bw(patterns, prefix=None):
            modified = []
            for pattern in patterns:
                if pattern.startswith("./"):
                    if prefix:
                        pattern = "{0}/{1}".format(prefix, pattern[2:])
                    else:
                        pattern = pattern[2:]
                    modified.append(pattern)
                # If a pattern like 'tags.conf' or '*.bak' is provided, ues basename match (any dir)
                elif "/" not in pattern:
                    modified.append("(^|.../)" + pattern)
                else:
                    modified.append(pattern)
            return modified

        # PREP ARCHIVE EXTRACTION
        installed_files = set()
        excludes = list(args.exclude)
        '''
        for pattern in args.exclude:
            # If a pattern like 'default.meta' or '*.bak' is provided, assume it's a basename match.
            if "/" not in pattern:
                excludes.append(".../" + pattern)
            else:
                excludes.append(pattern)
        '''
        if not args.allow_local:
            for pattern in local_files:
                excludes.append("./" + pattern)
        excludes = fixup_pattern_bw(excludes, app_basename)
        self.stderr.write("Extraction exclude patterns:  {!r}\n".format(excludes))
        path_rewrites = []
        files_iter = extract_archive(args.tarball)
        if True:
            files_iter = sanity_checker(files_iter)
        if args.default_dir:
            rep = "/{}/".format(args.default_dir.strip("/"))
            path_rewrites.append(("/default/", rep))
            del rep
        if new_app_name:
            # We do have the "app_name" extracted from our first pass above, but
            regex = re.compile(r'^([^/]+)(?=/)')
            path_rewrites.append((regex, new_app_name))
        if path_rewrites:
            files_iter = gen_arch_file_remapper(files_iter, path_rewrites)

        self.stdout.write("Extracting app now...\n")
        for gaf in files_iter:
            if match_bwlist(gaf.path, excludes, escape=False):
                self.stdout.write("Skipping [blacklist] {}\n".format(gaf.path))
                continue
            if not is_git or args.git_mode in ("nochange", "stage"):
                self.stdout.write("{0:60s} {2:o} {1:-6d}\n".format(gaf.path, gaf.size, gaf.mode))
            installed_files.add(gaf.path.split("/", 1)[1])
            full_path = os.path.join(args.dest, gaf.path)
            dir_exists(os.path.dirname(full_path))
            with open(full_path, "wb") as fp:
                fp.write(gaf.payload)
            os.chmod(full_path, gaf.mode)
            del fp, full_path

        files_new, files_upd, files_del = _cmp_sets(installed_files, existing_files)
        '''
        print "New: \n\t{}".format("\n\t".join(sorted(files_new)))
        print "Existing: \n\t{}".format("\n\t".join(sorted(files_upd)))
        print "Removed:  \n\t{}".format("\n\t".join(sorted(files_del)))
        '''

        self.stdout.write("Extracted {} files:  {} new, {} existing, and {} removed\n".format(
            len(installed_files), len(files_new), len(files_upd), len(files_del)))

        # Filer out "removed" files; and let us keep some based on a keep-whitelist:  This should
        # include things like local, ".gitignore", ".gitattributes" and so on

        keep_list = [".git*"]
        keep_list.extend(args.keep)
        if not args.allow_local:
            keep_list += ["local/...", "local.meta"]
        keep_list = fixup_pattern_bw(keep_list)
        self.stderr.write("Keep file patterns:  {!r}\n".format(keep_list))

        files_to_delete = []
        files_to_keep = []
        for fn in files_del:
            if match_bwlist(fn, keep_list, escape=False):
                # How to handle a keep of "default.d/..." when we DO want to cleanup the default
                # redirect folder of "default.d/10-upstream"?
                # This may be an academic question since most apps will continue to send
                # an ever increasing list of default files (to mask out old/unused ones)
                self.stdout.write("Keeping {}\n".format(fn))
                files_to_keep.append(fn)
            else:
                files_to_delete.append(fn)
        if files_to_keep:
            self.stdout.write("Keeping {} of {} files marked for deletion due to whitelist.\n"
                              .format(len(files_to_keep), len(files_del)))
        git_rm_queue = []

        if files_to_delete:
            self.stdout.write("Removing files not present in the upgraded version of the app.\n")
        for fn in files_to_delete:
            path = os.path.join(dest_app, fn)
            if is_git and args.git_mode in ("stage", "commit"):
                self.stdout.write("git rm -f {}\n".format(path))
                git_rm_queue.append(fn)
            else:
                self.stdout.write("rm -f {}\n".format(path))
                os.unlink(path)

        if git_rm_queue:
            # Run 'git rm file1 file2 file3 ..." (using an xargs like mechanism)
            git_cmd_iterable(["rm"], git_rm_queue, cwd=dest_app)
        del git_rm_queue

        if is_git:
            if args.git_mode in ("stage", "commit"):
                git_cmd(["add", os.path.basename(dest_app)], cwd=os.path.dirname(dest_app))
                # self.stdout.write("git add {}\n".format(os.path.basename(dest_app)))
            '''
            else:
                self.stdout.write("git add {}\n".format(dest_app))
            '''

            # Is there anything to stage/commit?
            if git_is_clean(os.path.dirname(dest_app), check_untracked=False):
                self.stderr.write("No changes detected.  Nothing to {}\n".format(args.git_mode))
                return

            git_commit_app_name = app_conf.get("ui", {}).get("label", os.path.basename(dest_app))
            git_commit_new_version = app_conf.get("launcher", {}).get("version", None)
            if mode == "install":
                git_commit_message = "Install {}".format(git_commit_app_name)

                if git_commit_new_version:
                    git_commit_message += " version {}".format(git_commit_new_version)
            else:
                # Todo:  Specify Upgrade/Downgrade/Refresh
                git_commit_message = "Upgrade {}".format(
                    git_commit_app_name)
                git_commit_old_version = old_app_conf.get("launcher", {}).get("version", None)
                if git_commit_old_version and git_commit_new_version:
                    git_commit_message += " version {} (was {})".format(git_commit_new_version,
                                                                        git_commit_old_version)
                elif git_commit_new_version:
                    git_commit_message += " to version {}".format(git_commit_new_version)
            # Could possibly include some CLI arg details, like what file patterns were excluded
            git_commit_message += "\n\nSHA256 {} {}\n\nSplunk-App-managed-by: ksconf" \
                .format(f_hash, os.path.basename(args.tarball))
            git_commit_cmd = ["commit", os.path.basename(dest_app), "-m", git_commit_message]

            if not args.no_edit:
                git_commit_cmd.append("--edit")

            git_commit_cmd.extend(args.git_commit_args)

            if args.git_mode == "commit":
                capture_std = True if args.no_edit else False
                proc = git_cmd(git_commit_cmd, cwd=os.path.dirname(dest_app),
                               capture_std=capture_std)
                if proc.returncode == 0:
                    self.stderr.write(dedent("""\
                    Your changes have been committed.  Please review before pushing.  If you
                    find any issues, here are some possible solutions:


                    To fix issues in the last commit, edit and add the files to be fixed, then run:

                        git commit --amend

                    To roll back the last commit but KEEP the app upgrade, run:

                        git reset --soft HEAD^1

                    To roll back the last commit and REVERT the app upgrade, run:

                        git reset --hard HEAD^1

                    NOTE:  Make sure you have *no* other uncommitted changes before running 'reset'.
                    """))
                else:
                    self.stderr.write("Git commit failed.  Return code {}.  Git args:  git {}\n"
                                      .format(proc.returncode, list2cmdline(git_commit_cmd)))
                    return EXIT_CODE_GIT_FAILURE
            elif args.git_mode == "stage":
                self.stdout.write("To commit later, use the following\n")
                self.stdout.write(
                    "\tgit {}\n".format(list2cmdline(git_commit_cmd).replace("\n", "\\n")))
Ejemplo n.º 15
0
class CombineCmd(KsconfCmd):
    help = dedent("""\
    Combine configuration files across multiple source directories into a single
    destination directory.  This allows for an arbitrary number of splunk
    configuration layers to coexist within a single app.  Useful in both ongoing
    merge and one-time ad-hoc use.
    """)
    description = dedent("""\
    Merge .conf settings from multiple source directories into a combined target
    directory.   Configuration files can be stored in a ``/etc/*.d`` like directory
    structure and consolidated back into a single 'default' directory.

    This command supports both one-time operations and recurring merge jobs.  For
    example, this command can be used to combine all users knowledge objects (stored
    in 'etc/users') after a server migration, or to merge a single user's settings
    after an their account has been renamed.  Recurring operations assume some type
    of external scheduler is being used.  A best-effort is made to only write to
    target files as needed.

    The 'combine' command takes your logical layers of configs (upstream, corporate,
    splunk admin fixes, and power user knowledge objects, ...) expressed as
    individual folders and merges them all back into the single ``default`` folder
    that Splunk reads from.  One way to keep the 'default' folder up-to-date is
    using client-side git hooks.

    No directory layout is mandatory, but but one simple approach is to model your
    layers using a prioritized 'default.d' directory structure. (This idea is
    borrowed from the Unix System V concept where many services natively read their
    config files from ``/etc/*.d`` directories.)
    """)
    format = "manual"
    maturity = "beta"

    def register_args(self, parser):
        parser.add_argument("source",
                            nargs="+",
                            help=dedent("""
            The source directory where configuration files will be merged from.
            When multiple sources directories are provided, start with the most general and end
            with the specific; later sources will override values from the earlier ones.
            Supports wildcards so a typical Unix ``conf.d/##-NAME`` directory structure works well."""
                                        )).completer = DirectoriesCompleter()
        parser.add_argument("--target",
                            "-t",
                            help=dedent("""
            Directory where the merged files will be stored.
            Typically either 'default' or 'local'""")
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--dry-run",
                            "-D",
                            default=False,
                            action="store_true",
                            help=dedent("""
            Enable dry-run mode.
            Instead of writing to TARGET, preview changes as a 'diff'.
            If TARGET doesn't exist, then show the merged file."""))
        parser.add_argument(
            "--banner",
            "-b",
            default=
            " **** WARNING: This file is managed by 'ksconf combine', do "
            "not edit hand-edit this file! ****",
            help=
            "A banner or warning comment added to the top of the TARGET file. "
            "Used to discourage Splunk admins from editing an auto-generated "
            "file.")

    def run(self, args):
        # Ignores case sensitivity.  If you're on Windows, name your files right.
        conf_file_re = re.compile("([a-z]+\.conf|(default|local)\.meta)$")

        if args.target is None:
            self.stderr.write("Must provide the '--target' directory.\n")
            return EXIT_CODE_MISSING_ARG

        self.stderr.write("Combining conf files into directory {}\n".format(
            args.target))
        args.source = list(_expand_glob_list(args.source))
        for src in args.source:
            self.stderr.write(
                "Reading conf files from directory {}\n".format(src))

        marker_file = os.path.join(args.target, CONTROLLED_DIR_MARKER)
        if os.path.isdir(args.target):
            if not os.path.isfile(
                    os.path.join(args.target, CONTROLLED_DIR_MARKER)):
                self.stderr.write(
                    "Target directory already exists, but it appears to have been"
                    "created by some other means.  Marker file missing.\n")
                return EXIT_CODE_COMBINE_MARKER_MISSING
        elif args.dry_run:
            self.stderr.write(
                "Skipping creating destination directory {0} (dry-run)\n".
                format(args.target))
        else:
            self.stderr.write("Creating destination directory {0}\n".format(
                args.target))
            os.mkdir(args.target)
            open(marker_file, "w").write(
                "This directory is managed by KSCONF.  Don't touch\n")

        # Build a common tree of all src files.
        src_file_index = defaultdict(list)
        for src_root in args.source:
            for (root, dirs, files) in relwalk(src_root):
                for fn in files:
                    # Todo: Add blacklist CLI support:  defaults to consider: *sw[po], .git*, .bak, .~
                    if fn.endswith(".swp") or fn.endswith("*.bak"):
                        continue  # pragma: no cover  (peephole optimization)
                    src_file = os.path.join(root, fn)
                    src_path = os.path.join(src_root, root, fn)
                    src_file_index[src_file].append(src_path)

        # Find a set of files that exist in the target folder, but in NO source folder (for cleanup)
        target_extra_files = set()
        for (root, dirs, files) in relwalk(args.target):
            for fn in files:
                tgt_file = os.path.join(root, fn)
                if tgt_file not in src_file_index:
                    # Todo:  Add support for additional blacklist wildcards (using fnmatch)
                    if fn == CONTROLLED_DIR_MARKER or fn.endswith(".bak"):
                        continue  # pragma: no cover (peephole optimization)
                    target_extra_files.add(tgt_file)

        for (dest_fn, src_files) in sorted(src_file_index.items()):
            # Source file must be in sort order (10-x is lower prio and therefore replaced by 90-z)
            src_files = sorted(src_files)
            dest_path = os.path.join(args.target, dest_fn)

            # Make missing destination folder, if missing
            dest_dir = os.path.dirname(dest_path)
            if not os.path.isdir(dest_dir) and not args.dry_run:
                os.makedirs(dest_dir)

            # Handle conf files and non-conf files separately
            if not conf_file_re.search(dest_fn):
                # self.stderr.write("Considering {0:50}  NON-CONF Copy from source:  {1!r}\n".format(dest_fn, src_files[-1]))
                # Always use the last file in the list (since last directory always wins)
                src_file = src_files[-1]
                if args.dry_run:
                    if os.path.isfile(dest_path):
                        if file_compare(src_file, dest_path):
                            smart_rc = SMART_NOCHANGE
                        else:
                            if (_is_binary_file(src_file)
                                    or _is_binary_file(dest_path)):
                                # Binary files.  Can't compare...
                                smart_rc = "DRY-RUN (NO-DIFF=BIN)"
                            else:
                                show_text_diff(self.stdout, dest_path,
                                               src_file)
                                smart_rc = "DRY-RUN (DIFF)"
                    else:
                        smart_rc = "DRY-RUN (NEW)"
                else:
                    smart_rc = smart_copy(src_file, dest_path)
                if smart_rc != SMART_NOCHANGE:
                    self.stderr.write("Copy <{0}>   {1:50}  from {2}\n".format(
                        smart_rc, dest_path, src_file))
            else:
                try:
                    # Handle merging conf files
                    dest = ConfFileProxy(os.path.join(args.target, dest_fn),
                                         "r+",
                                         parse_profile=PARSECONF_MID)
                    srcs = [
                        ConfFileProxy(sf, "r", parse_profile=PARSECONF_STRICT)
                        for sf in src_files
                    ]
                    # self.stderr.write("Considering {0:50}  CONF MERGE from source:  {1!r}\n".format(dest_fn, src_files[0]))
                    smart_rc = merge_conf_files(dest,
                                                srcs,
                                                dry_run=args.dry_run,
                                                banner_comment=args.banner)
                    if smart_rc != SMART_NOCHANGE:
                        self.stderr.write(
                            "Merge <{0}>   {1:50}  from {2!r}\n".format(
                                smart_rc, dest_path, src_files))
                finally:
                    # Protect against any dangling open files:  (ResourceWarning: unclosed file)
                    dest.close()
                    for src in srcs:
                        src.close()

        if True and target_extra_files:  # Todo: Allow for cleanup to be disabled via CLI
            self.stderr.write(
                "Cleaning up extra files not part of source tree(s):  {0} files.\n"
                .format(len(target_extra_files)))
            for dest_fn in target_extra_files:
                self.stderr.write("Remove unwanted file {0}\n".format(dest_fn))
                os.unlink(os.path.join(args.target, dest_fn))
Ejemplo n.º 16
0
class SortCmd(KsconfCmd):
    help = "Sort a Splunk .conf file creating a normalized format appropriate for version control"
    description = dedent("""\
    Sort a Splunk .conf file.  Sort has two modes:  (1) by default, the sorted
    config file will be echoed to the screen.  (2) the config files are updated
    in-place when the ``-i`` option is used.

    Manually managed conf files can be protected against changes by adding a comment containing the
    string ``KSCONF-NO-SORT`` to the top of any .conf file.
    """)
    format = "manual"
    maturity = "stable"

    def register_args(self, parser):
        import argparse
        parser.add_argument("conf", metavar="FILE", nargs="+",
                            default=["-"],
                            help="Input file to sort, or standard input."
                            ).completer = conf_files_completer

        # Pick mode:  target (sysout) vs inplace
        mode = parser.add_mutually_exclusive_group()
        mode.add_argument("--target", "-t", metavar="FILE",
                          type=argparse.FileType('w'), default=self.stdout,
                          help="File to write results to.  Defaults to standard output."
                          ).completer = conf_files_completer
        mode.add_argument("--inplace", "-i",
                          action="store_true", default=False, help=dedent("""\
                          Replace the input file with a sorted version.

                          WARNING:  This a potentially destructive operation that
                          may move/remove comments."""))

        # Inplace update arguments
        grp1 = parser.add_argument_group("In-place update arguments")
        grp1.add_argument("-F", "--force", action="store_true",
                          help=dedent("""\
                          Force file sorting for all files, even for files containing the special
                          'KSCONF-NO-SORT' marker."""))
        grp1.add_argument("-q", "--quiet", action="store_true",
                          help=dedent("""\
                          Reduce the output.
                          Reports only updated or invalid files.
                          This is useful for pre-commit hooks, for example."""))

        parser.add_argument("-n", "--newlines", metavar="LINES", type=int, default=1,
                            help="Number of lines between stanzas.")

    def pre_run(self, args):
        # For Windows users, expand any glob patterns as needed.
        args.conf = list(expand_glob_list(args.conf))

    def run(self, args):
        ''' Sort one or more configuration file. '''
        stanza_delims = "\n" * args.newlines
        self.parse_profile = PARSECONF_STRICT
        if args.inplace:
            failure = False
            changes = 0
            for conf in args.conf:
                try:
                    if not args.force and _has_nosort_marker(conf):
                        if not args.quiet:
                            self.stderr.write("Skipping no-sort file {}\n".format(conf))
                        continue
                    c = self.parse_conf(conf, mode='r+', raw_exec=True)
                    #c = parse_conf(conf, profile=PARSECONF_STRICT)
                    data = c.data
                    smart_rc = c.dump(c.data, stanza_delim=stanza_delims, sort=True)
                    #smart_rc = smart_write_conf(conf, data, stanza_delim=stanza_delims,
                    #                            sort=True)
                except ConfParserException as e:
                    smart_rc = None
                    self.stderr.write("Error trying to process file {0}.  "
                                      "Error:  {1}\n".format(conf, e))
                    failure = True
                if smart_rc == SMART_NOCHANGE:
                    if not args.quiet:
                        self.stderr.write("Nothing to update.  "
                                          "File {0} is already sorted\n".format(conf))
                else:
                    self.stderr.write("Replaced file {0} with sorted content.\n".format(conf))
                    changes += 1
            if failure:
                return EXIT_CODE_BAD_CONF_FILE
            if changes:
                return EXIT_CODE_SORT_APPLIED
        else:
            for conf in args.conf:
                if len(args.conf) > 1:
                    args.target.write("---------------- [ {0} ] ----------------\n\n"
                                      .format(conf))
                data = self.parse_conf(conf).data
                write_conf(args.target, data, stanza_delim=stanza_delims, sort=True)
            return EXIT_CODE_SUCCESS
Ejemplo n.º 17
0
class MergeCmd(KsconfCmd):
    help = "Merge two or more .conf files"
    description = dedent("""\
    Merge two or more .conf files into a single combined .conf file.
    This is similar to the way that Splunk logically combines the ``default`` and ``local``
    folders at runtime.
    """)
    maturity = "stable"

    def register_args(self, parser):
        parser.add_argument(
            "conf",
            metavar="FILE",
            nargs="+",
            help="The source configuration file(s) to collect settings from."
        ).completer = conf_files_completer
        parser.add_argument("--target",
                            "-t",
                            metavar="FILE",
                            type=ConfFileType("r+",
                                              "none",
                                              parse_profile=PARSECONF_STRICT),
                            default=ConfFileProxy("<stdout>", "w",
                                                  self.stdout),
                            help=dedent("""\
            Save the merged configuration files to this target file.
            If not provided, the merged conf is written to standard output.""")
                            ).completer = conf_files_completer

        # This is helpful when writing bash expressions like MyApp/{default,local}/props.conf;
        # when either default or local may not be present.
        parser.add_argument("--ignore-missing",
                            "-s",
                            default=False,
                            action="store_true",
                            help="Silently ignore any missing CONF files.")

        parser.add_argument("--dry-run",
                            "-D",
                            default=False,
                            action="store_true",
                            help=dedent("""\
            Enable dry-run mode.
            Instead of writing to TARGET, preview changes in 'diff' format.
            If TARGET doesn't exist, then show the merged file."""))
        parser.add_argument("--banner",
                            "-b",
                            default="",
                            help=dedent("""\
            A banner or warning comment added to the top of the TARGET file.
            Used to discourage Splunk admins from editing an auto-generated file."""
                                        ))

    def run(self, args):
        ''' Merge multiple configuration files into one '''
        self.parse_profile = PARSECONF_MID

        if args.ignore_missing:
            cfgs = [
                self.parse_conf(c) for c in args.conf
                if os.path.isfile(c) or c == "-"
            ]
        else:
            cfgs = [self.parse_conf(conf) for conf in args.conf]

        merge_conf_files(args.target,
                         cfgs,
                         dry_run=args.dry_run,
                         banner_comment=args.banner)
        return EXIT_CODE_SUCCESS
Ejemplo n.º 18
0
class RestPublishCmd(KsconfCmd):
    help = "Publish .conf settings to a live Splunk instance via REST"
    description = dedent("""\
    Publish stanzas in a .conf file to a running Splunk instance via REST.  This requires access to
    the HTTPS endpoint of splunk.  By default, ksconf will handle both the creation of new stanzas
    and the update of exists stanzas.

    This can be used to push full configuration stanzas where you only have REST access and can't
    directly publish an app.

    Only attributes present in the conf file are pushed.  While this may seem obvious, this fact can
    have profound implications in certain situations, like when using this command for continuous
    updates.  This means that it's possible for the source .conf to ultimately differ from what ends
    up on the server's .conf file.  One way to avoid this is to explicitly remove object using
    ``--delete`` mode first, and then insert a new copy of the object.  Of course this means that
    the object will be unavailable.  The other impact is that diffs only compares and shows a subset
    of attribute.

    Be aware that, for consistency, the configs/conf-TYPE endpoint is used for this command.
    Therefore, a reload may be required for the server to use the published config settings.
    """)

    maturity = "alpha"

    def __init__(self, *args, **kwargs):
        super(RestPublishCmd, self).__init__(*args, **kwargs)
        self._service = None
        self.meta = None        # type: MetaData

    @classmethod
    def _handle_imports(cls):
        g = globals()
        if globals()["splunklib"]:
            return
        import splunklib.client
        g["splunklib"] = splunklib

    def register_args(self, parser):
        # type: (ArgumentParser) -> None
        parser.add_argument("conf", metavar="CONF", nargs="+",
                            type=ConfFileType("r", "load", parse_profile=PARSECONF_LOOSE),
                            help="Configuration file(s) to export settings from."
                            ).completer = conf_files_completer

        parser.add_argument("--conf", dest="conf_type", metavar="TYPE",
                            help=dedent("""\
            Explicitly set the configuration file type.  By default this is derived from CONF, but
            sometime it's helpful set this explicitly.  Can be any valid Splunk conf file type,
            example include 'app', 'props', 'tags', 'savedsearches', and so on."""))
        parser.add_argument("-m", "--meta", action="append",
                            help=
                            "Specify one or more ``.meta`` files to determine the desired read & "
                            "write ACLs, owner, and sharing for objects in the CONF file.")

        #add_splunkd_namespace(
        #    add_splunkd_access_args(parser.add_argument("Splunkd endpoint")))

        add_splunkd_namespace(
            add_splunkd_access_args(parser))

        parsg1 = parser.add_mutually_exclusive_group(required=False)
        '''
        parsg1.add_argument("-u", "--update", action="store_true", default=False,
                            help="Assume that the REST entities already exist.")
        parsg1.add_argument("--update-only", action="store_true", default=False,
                            help="Only update existing entities.  "
                                 "Non-existent entries will be skipped.")
        '''
        parsg1.add_argument("-D", "--delete", action="store_true", default=False,
                            help=dedent("""\
            Remove existing REST entities.  This is a destructive operation.
            In this mode, stanzas attributes are unnecessary.
            NOTE:  This works for 'local' entities only; the default folder cannot be updated.
            """))

    @staticmethod
    def make_boolean(stanza, attr="disabled"):
        if attr in stanza:
            stanza[attr] = "1" if conf_attr_boolean(stanza[attr]) else "0"

    def connect_splunkd(self, args):
        # Take username/password form URL, if encoded there; otherwise use defaults from argparse
        up = urlparse(args.url)
        username = up.username or args.user
        password = up.password or args.password
        self._service = splunklib.client.connect(
            hostname=up.hostname, port=up.port, username=username, password=password,
            owner=args.owner, app=args.app, sharing=args.sharing)

    def handle_conf_file(self, args, conf_proxy):
        if args.conf_type:
            conf_type = args.conf_type
        else:
            conf_type = os.path.basename(conf_proxy.name).replace(".conf", "")

        if isinstance(conf_type, six.text_type):
            conf_type = conf_type.encode("utf-8")

        config_file = self._service.confs[conf_type]
        conf = conf_proxy.data

        # Sorting stanza for consistent processing of large files.  No CLI option for now.
        # XXX:  Support stanza order preservation after new parser is created (long-term)
        for stanza_name in sorted(conf):
            stanza_data = conf[stanza_name]

            if stanza_name is GLOBAL_STANZA:
                # XXX:  Research proper handling of default/global stanzas..
                # As-is, curl returns an HTTP error, but yet the new entry is added to the
                # conf file.  So I suppose we could ignore the exit code?!    ¯\_(ツ)_/¯
                sys.stderr.write("Refusing to touch the [default] stanza.  Too much could go wrong.\n")
                continue

            if args.delete:
                action, info = self.delete_conf(stanza_name, stanza_data, config_file)
            else:
                action, info = self.publish_conf(stanza_name, stanza_data, config_file)

            print("{:50} {:8}   (delta size: {})".format("[{}]".format(stanza_name), action, len(info.get("delta",[]))))

            update_time = info.get("updated", 0)
            ###headers = (conf_proxy.name, "{}/{}".format(args.url, config_file.path))
            #rest_header = DiffHeader("{}/{}".format(args.url, info.get("path", config_file.path), update_time))
            rest_header = DiffHeader(info.get("path", config_file.path), update_time)
            if action != "nochange" and "delta" in info:
                show_diff(self.stdout, info["delta"], headers=(conf_proxy.name, rest_header))

            if "meta" in info:
                print(info["meta"])

            if "acl_delta" in info:
                show_diff(self.stdout, info["acl_delta"])

    def publish_conf(self, stanza_name, stanza_data, config_file):
        if self.meta:
            metadata = self.meta.get(config_file.name, stanza_name)
            owner = metadata.get("owner", None)
            app = config_file.service.namespace.app
            if metadata.get("export", None) == "system":
                sharing = "global"
            else:
                # Could still be "user" technically; but it seems unlikely that '--meta' would be given
                # in that case.  Still, there's possible room for improvement.
                sharing = "app"
        else:
            metadata = {}
            owner = None
            sharing = None
            app = None

        res = {}
        # XXX:  Move boolean comparison stuff to the core delta detection library....
        self.make_boolean(stanza_data)

        try:
            stz = config_file[stanza_name]
        except KeyError:
            stz = None

        if stz is not None:
            ## print("Stanza {} already exists on server.  Checking to see if update is needed.".format(stanza_name))
            # When pulling do we need to specify this?  (owner=owner, app=app, sharing=sharing);  If meta is given and where these are different than the defaults on the CLI?...
            stz_data = stz.content

            # Diff printing really doesn't like 'None's...
            stz_data = { k:v or "" for k,v in six.iteritems(stz_data) }
            self.make_boolean(stz_data)
            res["path"] = stz.path
            try:
                res["updated"] = stz.state["updated"]
            except:
                pass
            ## print("VALUE NOW:   (FROM SERVER)   {}".format(stz.content))  ## VERY NOISY!
            data = reduce_stanza(stz_data, stanza_data)
            ## print("VALUE NOW:   (FILTERED TO OUR ATTRS)   {}".format(data))
            delta = res["delta"] = compare_stanzas(stanza_data, data, stanza_name)
            if is_equal(delta):
                ## print("NO CHANGE NEEDED.")
                res["delta"] = []
                action = "nochange"
            else:
                stz.update(**stanza_data)
                # Any need to call .refresh() here to grab the state from the server?
                action = "update"
        else:
            ## print("Stanza {} new -- publishing!".format(stanza_name))
            stz = config_file.create(stanza_name, owner=owner, app=app, sharing=sharing, **stanza_data)
            res["delta"] = compare_stanzas({}, stanza_data, stanza_name)
            res["path"] = stz.path
            action = "new"

        # METADATA PUSH

        if not self.meta:
            return (action, res)

        if not int(stz.access["can_change_perms"]):
            res["meta"] = "Can't change meta according to 'can_change_perms'"
            return (action, res)

        # NOTE:  We don't support attribute-level metadata here (Need it?  2 words:  pull request)
        if not metadata:
            res["meta"] = "No metadata found for [{}/{}]".format(config_file.name, stanza_name)
            return (action, res)
        final_meta = {}
        if "access.read" in metadata:
            final_meta["perms.read"] = ",".join(metadata["access.read"])
        if "access.write" in metadata:
            final_meta["perms.write"] = ",".join(metadata["access.write"])
        if "owner" in metadata:
            final_meta["owner"] = metadata["owner"]
        else:
            final_meta["owner"] = "nobody"
        export = metadata.get("export", "")
        if export == "system":
            final_meta["sharing"] = "global"
        else:
            # Could still be "user" technically; but it seems unlikely that '--meta' would be given
            # in that case.  Still, there's possible room for improvement.
            final_meta["sharing"] = "app"

        # Build access dict for comparison purpose
        access = {}
        for x in ("owner", "app", "sharing"):
            access[x] = stz.access[x]
        for x in ("read", "write"):
            try:
                access["perms." + x] = ",".join(stz.access["perms"][x])
            except (KeyError, TypeError):
                access["perms." + x] = ""
        # print("[{}] fm={} access:  {}".format(stanza_name, final_meta, access))

        acl_delta = compare_stanzas(reduce_stanza(access, final_meta), final_meta,
                                    stanza_name + "/acl")
        if is_equal(acl_delta):
            res["acl_delta"] = []
            return (action, res)
        else:
            res["acl_delta"] = acl_delta

        resource = None
        try:
            # Wonky workaround.  See https://github.com/splunk/splunk-sdk-python/issues/207
            # config_file.service.http.post()
            # response = Endpoint(config_file.service, stz.path + "acl/").post(**final_meta)

            svc = config_file.service
            all_headers = svc.additional_headers + svc._auth_headers
            resource = svc.authority + \
                       svc._abspath(stz.path + "acl",
                                    owner=svc.namespace.owner, app=svc.namespace.app,
                                    sharing=svc.namespace.sharing)
            #logger.debug("request to do the ACL THING!  (Round trip debugging)")
            response = svc.http.post(resource, all_headers, **final_meta)

            res["meta_response"] = response
        except Exception:
            # Don't die on exceptions for ACLs...  print the error and move on (too many things to go wrong here)
            print("Failed hitting:  {}  ARGS={}".format(resource, final_meta))
            import traceback
            traceback.print_exc()
            # XXX:  Do better

        return (action, res)

    def delete_conf(self, stanza_name, stanza_data, config_file):
        res = {}
        if stanza_name in config_file:
            stz = config_file[stanza_name]
            stz_data = stz.content
            res["path"] = stz.path
            try:
                res["updated"] = stz.state["updated"]
            except KeyError:
                # Doesn't matter
                pass

            self.make_boolean(stz_data)
            ## print("Found {}".format(stz_data))
            data = reduce_stanza(stz_data, stanza_data)
            config_file.delete(stanza_name)
            res["delta"] = compare_stanzas(data, {}, stanza_name)
            return ("deleted", res)
        else:
            res["delta"] = []
            return ("nochange", res)

    def run(self, args):
        if args.insecure:
            raise NotImplementedError("Need to implement -k feature")

        if args.meta:
            self.meta = MetaData()
            for meta_file in args.meta:
                print("Loading metadata from {}".format(meta_file))
                self.meta.feed_file(meta_file)

        self.connect_splunkd(args)
        for conf_proxy in args.conf:    # type: ConfFileProxy
            self.handle_conf_file(args, conf_proxy)

        return EXIT_CODE_SUCCESS
Ejemplo n.º 19
0
class XmlFormatCmd(KsconfCmd):
    help = "Normalize XML view and nav files"
    description = dedent("""
    Normalize and apply consistent XML indentation and CDATA usage for XML dashboards and
    navigation files.

    Technically this could be used on *any* XML file, but certain element names specific to Splunk's
    simple XML dashboards are handled specially, and therefore could result in unusable results.

    The expected indentation level is guessed based on the first element indentation, but can be
    explicitly set if not detectable.
    """)
    maturity = "alpha"

    @classmethod
    def _handle_imports(cls):
        g = globals()
        if globals()["etree"]:
            return
        from lxml import etree
        cls.version_extra = "lxml {}".format(etree.__version__)
        g["etree"] = etree

    def register_args(self, parser):
        parser.add_argument("xml",
                            metavar="FILE",
                            nargs="+",
                            help=dedent("""\
            One or more XML files to check.
            If '-' is given, then a list of files is read from standard input"""
                                        )).completer = conf_files_completer
        parser.add_argument(
            "--indent",
            type=int,
            default=2,
            help="Number of spaces.  This is only used if indentation cannot be "
            "guessed from the existing file.")
        parser.add_argument("--quiet",
                            "-q",
                            default=False,
                            action="store_true",
                            help="Reduce the volume of output.")

    def run(self, args):
        formatter = SplunkSimpleXmlFormatter()
        # Should we read a list of conf files from STDIN?
        if len(args.xml) == 1 and args.xml[0] == "-":
            files = _stdin_iter()
        else:
            files = args.xml
        c = Counter()
        exit_code = EXIT_CODE_SUCCESS
        for fn in files:
            c["checked"] += 1
            if not os.path.isfile(fn):
                self.stderr.write("Skipping missing file:  {0}\n".format(fn))
                c["missing"] += 1
                continue
            try:
                if formatter.format_xml(fn, fn, args.indent):
                    self.stderr.write(
                        "Replaced file {0} with formatted content\n".format(
                            fn))
                    c["changed"] += 1
                else:
                    if not args.quiet:
                        self.stderr.write("Already formatted {0}\n".format(fn))
                    c["no-action"] += 1
                self.stderr.flush()
            except etree.ParseError as e:
                self.stderr.write("Error parsing file {0}:  {1}\n".format(
                    fn, e))
                self.stderr.flush()
                c["error"] += 1
                exit_code = EXIT_CODE_BAD_CONF_FILE
            except Exception as e:  # pragma: no cover
                self.stderr.write(
                    "Unhandled top-level exception while parsing {0}.  "
                    "Aborting.\n{1}\n".format(fn, e))
                debug_traceback()
                c["error"] += 1
                exit_code = EXIT_CODE_INTERNAL_ERROR
                break

        if not exit_code and c["changed"] > 0:
            exit_code = EXIT_CODE_FORMAT_APPLIED

        if True:  # show stats or verbose
            self.stdout.write(
                "Completed formatting {0[checked]} files.  rc={1} Breakdown:\n"
                "   {0[changed]} files were formatted successfully.\n"
                "   {0[no-action]} files were already formatted.\n"
                "   {0[error]} files failed.\n".format(c, exit_code))
        return exit_code
Ejemplo n.º 20
0
class CheckCmd(KsconfCmd):
    help = "Perform basic syntax and sanity checks on .conf files"
    description = dedent("""
    Provide basic syntax and sanity checking for Splunk's .conf
    files.  Use Splunk's builtin ``btool check`` for a more robust
    validation of attributes and values.

    Consider using this utility as part of a pre-commit hook.""")
    maturity = "stable"

    def register_args(self, parser):
        parser.add_argument("conf",
                            metavar="FILE",
                            nargs="+",
                            help=dedent("""\
            One or more configuration files to check.
            If '-' is given, then read a list of files to validate from standard input"""
                                        )).completer = conf_files_completer
        parser.add_argument("--quiet",
                            "-q",
                            default=False,
                            action="store_true",
                            help="Reduce the volume of output.")
        ''' # Do we really need this?
        parser.add_argument("--max-errors", metavar="INT", type=int, default=0, help=
            "Abort check if more than this many files fail validation.  "
            "Useful for a pre-commit hook where any failure is unacceptable.")
        '''

    def run(self, args):
        # Should we read a list of conf files from STDIN?
        if len(args.conf) == 1 and args.conf[0] == "-":
            confs = _stdin_iter()
        else:
            confs = args.conf
        c = Counter()
        exit_code = EXIT_CODE_SUCCESS
        for conf in confs:
            c["checked"] += 1
            if not os.path.isfile(conf):
                self.stderr.write("Skipping missing file:  {0}\n".format(conf))
                c["missing"] += 1
                continue
            try:
                parse_conf(conf, profile=PARSECONF_STRICT_NC)
                c["okay"] += 1
                if not args.quiet:
                    self.stdout.write("Successfully parsed {0}\n".format(conf))
                    self.stdout.flush()
            except ConfParserException as e:
                self.stderr.write("Error in file {0}:  {1}\n".format(conf, e))
                self.stderr.flush()
                exit_code = EXIT_CODE_BAD_CONF_FILE
                # TODO:  Break out counts by error type/category (there's only a few of them)
                c["error"] += 1
            except Exception as e:  # pragma: no cover
                self.stderr.write(
                    "Unhandled top-level exception while parsing {0}.  "
                    "Aborting.\n{1}\n".format(conf, e))
                debug_traceback()
                exit_code = EXIT_CODE_INTERNAL_ERROR
                c["error"] += 1
                break
        if True:  # show stats or verbose
            self.stdout.write(
                "Completed checking {0[checked]} files.  rc={1} Breakdown:\n"
                "   {0[okay]} files were parsed successfully.\n"
                "   {0[error]} files failed.\n".format(c, exit_code))
        return exit_code
Ejemplo n.º 21
0
class SortCmd(KsconfCmd):
    help = "Sort a Splunk .conf file creating a normalized format appropriate for version control"
    description = dedent("""\
    Sort a Splunk .conf file.  Sort has two modes:  (1) by default, the sorted
    config file will be echoed to the screen.  (2) the config files are updated
    inplace when the '-i' option is used.

    Manually managed conf files can be blacklisted by add a comment containing the
    string 'KSCONF-NO-SORT' to the top of any .conf file.

    To recursively sort all files:

        find . -name '*.conf' | xargs ksconf sort -i
    """)
    format = "manual"
    maturity = "stable"

    def register_args(self, parser):
        import argparse
        parser.add_argument("conf", metavar="FILE", nargs="+",
                            type=argparse.FileType('r'), default=[self.stdin],
                            help="Input file to sort, or standard input."
                            ).completer = conf_files_completer

        # Pick mode:  target (sysout) vs inplace
        mode = parser.add_mutually_exclusive_group()
        mode.add_argument("--target", "-t", metavar="FILE",
                          type=argparse.FileType('w'), default=self.stdout,
                          help="File to write results to.  Defaults to standard output."
                          ).completer = conf_files_completer
        mode.add_argument("--inplace", "-i",
                          action="store_true", default=False, help="""
                          Replace the input file with a sorted version.
                          Warning this a potentially destructive operation that may
                          move/remove comments.""")

        # Inplace update arguments
        grp1 = parser.add_argument_group("In-place update arguments")
        grp1.add_argument("-F", "--force", action="store_true",
                          help="""
                          Force file sorting for all files, even for files containing the special
                          'KSCONF-NO-SORT' marker.""")
        grp1.add_argument("-q", "--quiet", action="store_true",
                          help="""Reduce the output.
                          Reports only updated or invalid files.
                          This is useful for pre-commit hooks, for example.""")

        parser.add_argument("-n", "--newlines", metavar="LINES", type=int, default=1,
                            help="Lines between stanzas.")


    def run(self, args):
        ''' Sort one or more configuration file. '''
        stanza_delims = "\n" * args.newlines
        if args.inplace:
            failure = False
            changes = 0
            for conf in args.conf:
                try:
                    if not args.force and _has_nosort_marker(conf.name):
                        if not args.quiet:
                            self.stderr.write("Skipping blacklisted file {}\n".format(conf.name))
                        continue
                    data = parse_conf(conf, profile=PARSECONF_STRICT)
                    conf.close()
                    smart_rc = smart_write_conf(conf.name, data, stanza_delim=stanza_delims,
                                                sort=True)
                except ConfParserException as e:
                    smart_rc = None
                    self.stderr.write("Error trying to process file {0}.  "
                                      "Error:  {1}\n".format(conf.name, e))
                    failure = True
                if smart_rc == SMART_NOCHANGE:
                    if not args.quiet:
                        self.stderr.write("Nothing to update.  "
                                          "File {0} is already sorted\n".format(conf.name))
                else:
                    self.stderr.write("Replaced file {0} with sorted content.\n".format(conf.name))
                    changes += 1
            if failure:
                return EXIT_CODE_BAD_CONF_FILE
            if changes:
                return EXIT_CODE_SORT_APPLIED
        else:
            for conf in args.conf:
                if len(args.conf) > 1:
                    args.target.write("---------------- [ {0} ] ----------------\n\n"
                                      .format(conf.name))
                try:
                    data = parse_conf(conf, profile=PARSECONF_STRICT)
                    write_conf(args.target, data, stanza_delim=stanza_delims, sort=True)
                except ConfParserException as e:
                    self.stderr.write("Error trying processing {0}.  Error:  {1}\n".
                                      format(conf.name, e))
                    return EXIT_CODE_BAD_CONF_FILE
            return EXIT_CODE_SUCCESS
Ejemplo n.º 22
0
    def register_args(self, parser):
        # type: (argparse.ArgumentParser) -> None
        parser.add_argument("conf",
                            metavar="CONF",
                            help="Input conf file",
                            nargs="+",
                            type=ConfFileType("r",
                                              parse_profile=PARSECONF_MID_NC)
                            ).completer = conf_files_completer
        parser.add_argument(
            "-o",
            "--output",
            metavar="FILE",
            type=argparse.FileType('w'),
            default=self.stdout,
            help="File where the filtered results are written.  "
            "Defaults to standard out.")
        parser.add_argument(
            "--comments",
            "-C",
            action="store_true",
            default=False,
            help="Preserve comments.  Comments are discarded by default.")
        parser.add_argument("--verbose",
                            action="store_true",
                            default=False,
                            help="Enable additional output.")

        parser.add_argument(
            "--match",
            "-m",  # metavar="MODE",
            choices=["regex", "wildcard", "string"],
            default="wildcard",
            help=dedent("""\
            Specify pattern matching mode.
            Defaults to 'wildcard' allowing for ``*`` and  ``?`` matching.
            Use 'regex' for more power but watch out for shell escaping.
            Use 'string' to enable literal matching."""))
        parser.add_argument("--ignore-case",
                            "-i",
                            action="store_true",
                            help=dedent("""\
            Ignore case when comparing or matching strings.
            By default matches are case-sensitive."""))
        parser.add_argument("--invert-match",
                            "-v",
                            action="store_true",
                            help=dedent("""\
            Invert match results.
            This can be used to show what content does NOT match,
            or make a backup copy of excluded content."""))

        pg_out = parser.add_argument_group(
            "Output mode",
            dedent("""\
            Select an alternate output mode.
            If any of the following options are used, the stanza output is not shown.
            """))
        pg_out.add_argument(
            "--files-with-matches",
            "-l",
            action="store_true",
            help="List files that match the given search criteria")
        pg_om1 = pg_out.add_mutually_exclusive_group()
        pg_om1.add_argument("--count",
                            "-c",
                            action="store_true",
                            help="Count matching stanzas")
        pg_om1.add_argument("--brief",
                            "-b",
                            action="store_true",
                            help="List name of matching stanzas")

        pg_sel = parser.add_argument_group(
            "Stanza selection",
            dedent("""\
            Include or exclude entire stanzas using these filter options.

            All filter options can be provided multiple times.
            If you have a long list of filters, they can be saved in a file and referenced using
            the special ``file://`` prefix.  One entry per line."""))

        pg_sel.add_argument("--stanza",
                            metavar="PATTERN",
                            action="append",
                            default=[],
                            help=dedent("""
            Match any stanza who's name matches the given pattern.
            PATTERN supports bulk patterns via the ``file://`` prefix."""))

        pg_sel.add_argument("--attr-present",
                            metavar="ATTR",
                            action="append",
                            default=[],
                            help=dedent("""\
            Match any stanza that includes the ATTR attribute.
            ATTR supports bulk attribute patterns via the ``file://`` prefix."""
                                        ))
        '''# Add next
        pg_sel.add_argument("--attr-eq", metavar=("ATTR", "PATTERN"), nargs=2, action="append",
                            default=[],
                            help="""
            Match any stanza that includes an attribute matching the pattern.
            PATTERN supports the special ``file://filename`` syntax.""")
        '''
        ''' # This will be more difficult
        pg_sel.add_argument("--attr-ne",  metavar=("ATTR", "PATTERN"), nargs=2, action="append",
                            default=[],
                            help="""
            Match any stanza that includes an attribute matching the pattern.
            PATTERN supports the special ``file://`` syntax.""")
        '''

        pg_con = parser.add_argument_group(
            "Attribute selection",
            dedent("""\
            Include or exclude attributes passed through.
            By default, all attributes are preserved.
            Allowlist (keep) operations are preformed before blocklist (reject) operations."""
                   ))

        pg_con.add_argument("--keep-attrs",
                            metavar="WC-ATTR",
                            default=[],
                            action="append",
                            help=dedent("""\
            Select which attribute(s) will be preserved.
            This space separated list of attributes indicates what to preserve.
            Supports wildcards."""))

        pg_con.add_argument("--reject-attrs",
                            metavar="WC-ATTR",
                            default=[],
                            action="append",
                            help=dedent("""\
            Select which attribute(s) will be discarded.
            This space separated list of attributes indicates what to discard.
            Supports wildcards."""))
Ejemplo n.º 23
0
class MinimizeCmd(KsconfCmd):
    help = "Minimize the target file by removing entries duplicated in the default conf(s)"
    description = dedent("""\
    Minimize a conf file by removing any duplicated default settings.

    Reduce a local conf file to only your intended changes without manually tracking
    which entries you've edited.  Minimizing local conf files makes your local
    customizations easier to read and often results in cleaner upgrades.
    """)
    maturity = "beta"

    def register_args(self, parser):
        parser.add_argument(
            "conf",
            metavar="CONF",
            nargs="+",
            type=ConfFileType("r", "load", parse_profile=PARSECONF_LOOSE),
            help=
            "The default configuration file(s) used to determine what base or settings are. "
            "The base settings determine what is unnecessary to repeat in target file."
        ).completer = conf_files_completer
        parser.add_argument(
            "--target",
            "-t",
            metavar="TARGET",
            type=ConfFileType("r+", "load", parse_profile=PARSECONF_STRICT),
            help=
            "The local file that you wish to remove duplicate settings from.  "
            "This file will be read from and then replaced with a minimized version."
        ).completer = conf_files_completer
        grp1 = parser.add_mutually_exclusive_group()
        grp1.add_argument(
            "--dry-run",
            "-D",
            default=False,
            action="store_true",
            help="Enable dry-run mode.  "
            "Instead of writing the minimizing the TARGET file, preview what would be removed "
            "the form of a 'diff'.")
        grp1.add_argument(
            "--output",
            type=ConfFileType("w", "none", parse_profile=PARSECONF_STRICT),
            default=None,
            help=
            "Write the minimized output to a separate file instead of updating TARGET."
        ).completer = conf_files_completer
        parser.add_argument(
            "--explode-default",
            "-E",
            default=False,
            action="store_true",
            help="Enable minimization across stanzas for special use-cases.  "
            "Helpful when dealing with stanzas downloaded from a REST endpoint or "
            "``btool list`` output.")
        parser.add_argument(
            "-k",
            "--preserve-key",
            action="append",
            default=[],
            help="Specify attributes that should always be kept.")

    def run(self, args):
        if args.explode_default:
            # Is this the SAME as exploding the defaults AFTER the merge?;
            # I think NOT.  Needs testing
            cfgs = [explode_default_stanza(conf.data) for conf in args.conf]
        else:
            cfgs = [conf.data for conf in args.conf]
        # Merge all config files:
        default_cfg = merge_conf_dicts(*cfgs)
        del cfgs
        local_cfg = args.target.data
        orig_cfg = dict(args.target.data)

        if args.explode_default:
            # Make a skeleton default dict; at the highest level, that ensure that all default
            default_stanza = default_cfg.get(GLOBAL_STANZA,
                                             default_cfg.get("default"))
            skeleton_default = dict([(k, {}) for k in args.target.data])
            skeleton_default = explode_default_stanza(skeleton_default,
                                                      default_stanza)
            default_cfg = merge_conf_dicts(skeleton_default, default_cfg)

            local_cfg = explode_default_stanza(local_cfg)
            local_cfg = explode_default_stanza(local_cfg, default_stanza)

        minz_cfg = dict(local_cfg)

        # This may be a bit too simplistic.  Weird interplay may exists between a [default] stanza
        # and local [Upstream] stanza line up, but [Upstream] in our default file does not.
        # XXX:  Add a unit test!

        diffs = compare_cfgs(default_cfg, local_cfg, allow_level0=False)

        for op in diffs:
            if op.tag == DIFF_OP_DELETE:
                # This is normal.  Don't expect all default content to be mirrored into local
                continue
            elif op.tag == DIFF_OP_EQUAL:
                if isinstance(op.location, DiffStanza):
                    del minz_cfg[op.location.stanza]
                else:
                    # Todo: Only preserve keys for stanzas where at least 1 key has been modified
                    if match_bwlist(op.location.key, args.preserve_key):
                        '''
                        self.stderr.write("Skipping key [PRESERVED]  [{0}] key={1} value={2!r}\n"
                                     "".format(op.location.stanza, op.location.key, op.a))
                        '''
                        continue  # pragma: no cover  (peephole optimization)
                    del minz_cfg[op.location.stanza][op.location.key]
                    # If that was the last remaining key in the stanza, delete the entire stanza
                    if not _drop_stanza_comments(minz_cfg[op.location.stanza]):
                        del minz_cfg[op.location.stanza]
            elif op.tag == DIFF_OP_INSERT:
                '''
                self.stderr.write("Keeping local change:  <{0}> {1!r}\n-{2!r}\n+{3!r}\n\n\n".format(
                    op.tag, op.location, op.b, op.a))
                '''
                continue
            elif op.tag == DIFF_OP_REPLACE:
                '''
                self.stderr.write("Keep change:  <{0}> {1!r}\n-{2!r}\n+{3!r}\n\n\n".format(
                    op.tag, op.location, op.b, op.a))
                '''
                continue

        if args.dry_run:
            if args.explode_default:
                rc = show_diff(self.stdout,
                               compare_cfgs(orig_cfg, minz_cfg),
                               headers=(args.target.name,
                                        args.target.name + "-new"))
            else:
                rc = show_diff(self.stdout,
                               compare_cfgs(local_cfg, default_cfg),
                               headers=(args.target.name,
                                        args.target.name + "-new"))
            return rc

        if args.output:
            args.output.dump(minz_cfg)
        else:
            args.target.dump(minz_cfg)
            '''
Ejemplo n.º 24
0
 def register_args(self, parser):
     parser.set_defaults(mode="ask")
     parser.add_argument(
         "source",
         metavar="SOURCE",
         type=ConfFileType("r+", "load", parse_profile=PARSECONF_STRICT_NC),
         help="The source configuration file to pull changes from. "
         "Typically the :file:`local` conf file)"
     ).completer = conf_files_completer
     parser.add_argument("target",
                         metavar="TARGET",
                         type=ConfFileType("r+",
                                           "none",
                                           accept_dir=True,
                                           parse_profile=PARSECONF_STRICT),
                         help=dedent("""\
         Configuration file or directory to push the changes into.
         (Typically the :file:`default` folder)
         """)).completer = conf_files_completer
     grp1 = parser.add_mutually_exclusive_group()
     grp1.add_argument("--batch",
                       "-b",
                       action="store_const",
                       dest="mode",
                       const="batch",
                       help=dedent("""\
         Use batch mode where all configuration settings are automatically promoted.
         All changes are removed from source and applied to target.
         The source file will be removed, unless
         ``--keep-empty`` is used."""))
     grp1.add_argument("--interactive",
                       "-i",
                       action="store_const",
                       dest="mode",
                       const="interactive",
                       help=dedent("""\
         Enable interactive mode where the user will be prompted to approve
         the promotion of specific stanzas and attributes.
         The user will be able to apply, skip, or edit the changes being promoted."""
                                   ))
     parser.add_argument(
         "--force",
         "-f",
         action="store_true",
         default=False,
         help="Disable safety checks. "
         "Don't check to see if SOURCE and TARGET share the same basename.")
     parser.add_argument("--keep",
                         "-k",
                         action="store_true",
                         default=False,
                         help=dedent("""\
         Keep conf settings in the source file.
         All changes will be copied into the target file instead of being moved there.
         This is typically a bad idea since local always overrides default."""
                                     ))
     parser.add_argument("--keep-empty",
                         action="store_true",
                         default=False,
                         help=dedent("""\
         Keep the source file, even if after the settings promotions the file has no content.
         By default, SOURCE will be removed after all content has been moved into TARGET.
         Splunk will re-create any necessary local files on the fly."""))
Ejemplo n.º 25
0
class CombineCmd(KsconfCmd):
    help = dedent("""\
    Combine configuration files across multiple source directories into a single
    destination directory.  This allows for an arbitrary number of splunk
    configuration layers to coexist within a single app.  Useful in both ongoing
    merge and one-time ad-hoc use.

    For example, combine can consolidate 'users' directory across several instances
    after a phased server migration.
    """)
    description = dedent("""\
    Merge .conf settings from multiple source directories into a combined target
    directory.   Configuration files can be stored in a '/etc/*.d' like directory
    structure and consolidated back into a single 'default' directory.

    This command supports both one-time operations and recurring merge jobs.  For
    example, this command can be used to combine all users knowledge objects (stored
    in 'etc/users') after a server migration, or to merge a single user's settings
    after an their account has been renamed.  Recurring operations assume some type
    of external scheduler is being used.  A best-effort is made to only write to
    target files as needed.

    The 'combine' command takes your logical layers of configs (upstream, corporate,
    splunk admin fixes, and power user knowledge objects, ...) expressed as
    individual folders and merges them all back into the single 'default' folder
    that Splunk reads from.  One way to keep the 'default' folder up-to-date is
    using client-side git hooks.

    No directory layout is mandatory, but but one simple approach is to model your
    layers using a prioritized 'default.d' directory structure. (This idea is
    borrowed from the Unix System V concept where many services natively read their
    config files from '/etc/*.d' directories.)


    THE PROBLEM:

    In a typical enterprise deployment of Splunk, a single app can easily have
    multiple logical sources of configuration:  (1) The upstream app developer, (2)
    local developer app-developer adds organization-specific customizations or
    fixes, (3) splunk admin tweaks the inappropriate 'indexes.conf' settings, and
    (4) custom knowledge objects added by your subject matter experts.  Ideally we'd
    like to version control these, but doing so is complicated because normally you
    have to manage all 4 of these logical layers in one 'default' folder.  (Splunk
    requires that app settings be located either in 'default' or 'local'; and
    managing local files with version control leads to merge conflicts; so
    effectively, all version controlled settings need to be in 'default', or risk
    merge conflicts.)  So when a new upstream version is released, someone has to
    manually upgrade the app being careful to preserve all custom configurations.
    The solution provided by the 'combine' functionality is that all of these
    logical sources can be stored separately in their own physical directories
    allowing changes to be managed independently.  (This also allows for different
    layers to be mixed-and-matched by selectively including which layers to
    combine.)  While this doesn't completely remove the need for a human to review
    app upgrades, it does lower the overhead enough that updates can be pulled in
    more frequently, thus reducing the divergence potential.  (Merge frequently.)


    NOTES:

    The 'combine' command is similar to running the 'merge' subcommand recursively
    against a set of directories.  One key difference is that this command will
    gracefully handle non-conf files intelligently too.

    EXAMPLE:

        Splunk_CiscoSecuritySuite/
        ├── README
        ├── default.d
        │   ├── 10-upstream
        │   │   ├── app.conf
        │   │   ├── data
        │   │   │   └── ui
        │   │   │       ├── nav
        │   │   │       │   └── default.xml
        │   │   │       └── views
        │   │   │           ├── authentication_metrics.xml
        │   │   │           ├── cisco_security_overview.xml
        │   │   │           ├── getting_started.xml
        │   │   │           ├── search_ip_profile.xml
        │   │   │           ├── upgrading.xml
        │   │   │           └── user_tracking.xml
        │   │   ├── eventtypes.conf
        │   │   ├── macros.conf
        │   │   ├── savedsearches.conf
        │   │   └── transforms.conf
        │   ├── 20-my-org
        │   │   └── savedsearches.conf
        │   ├── 50-splunk-admin
        │   │   ├── indexes.conf
        │   │   ├── macros.conf
        │   │   └── transforms.conf
        │   └── 70-firewall-admins
        │       ├── data
        │       │   └── ui
        │       │       └── views
        │       │           ├── attacks_noc_bigscreen.xml
        │       │           ├── device_health.xml
        │       │           └── user_tracking.xml
        │       └── eventtypes.conf

    Commands:

        cd Splunk_CiscoSecuritySuite
        ksconf combine default.d/* --target=default
        """)
    format = "manual"

    def register_args(self, parser):
        parser.add_argument("source", nargs="+", help="""
            The source directory where configuration files will be merged from.
            When multiple sources directories are provided, start with the most general and end
            with the specific; later sources will override values from the earlier ones.
            Supports wildcards so a typical Unix 'conf.d/##-NAME' directory structure works well."""
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--target", "-t", help="""
            Directory where the merged files will be stored.
            Typically either 'default' or 'local'"""
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--dry-run", "-D", default=False, action="store_true", help="""
            Enable dry-run mode.
            Instead of writing to TARGET, preview changes as a 'diff'.
            If TARGET doesn't exist, then show the merged file.""")
        parser.add_argument("--banner", "-b",
                            default=" **** WARNING: This file is managed by 'ksconf combine', do "
                                    "not edit hand-edit this file! ****",
                            help="A warning banner to discourage manual editing of conf files.")

    def run(self, args):
        # Ignores case sensitivity.  If you're on Windows, name your files right.
        conf_file_re = re.compile("([a-z]+\.conf|(default|local)\.meta)$")

        if args.target is None:
            self.stderr.write("Must provide the '--target' directory.\n")
            return EXIT_CODE_MISSING_ARG

            self.stderr.write("Combining conf files into {}\n".format(args.target))
        args.source = list(_expand_glob_list(args.source))
        for src in args.source:
            self.stderr.write("Reading conf files from {}\n".format(src))

        marker_file = os.path.join(args.target, CONTROLLED_DIR_MARKER)
        if os.path.isdir(args.target):
            if not os.path.isfile(os.path.join(args.target, CONTROLLED_DIR_MARKER)):
                self.stderr.write("Target directory already exists, but it appears to have been"
                                  "created by some other means.  Marker file missing.\n")
                return EXIT_CODE_COMBINE_MARKER_MISSING
        elif args.dry_run:
            self.stderr.write(
                "Skipping creating destination folder {0} (dry-run)\n".format(args.target))
        else:
            self.stderr.write("Creating destination folder {0}\n".format(args.target))
            os.mkdir(args.target)
            open(marker_file, "w").write("This directory is managed by KSCONF.  Don't touch\n")

        # Build a common tree of all src files.
        src_file_index = defaultdict(list)
        for src_root in args.source:
            for (root, dirs, files) in relwalk(src_root):
                for fn in files:
                    # Todo: Add blacklist CLI support:  defaults to consider: *sw[po], .git*, .bak, .~
                    if fn.endswith(".swp") or fn.endswith("*.bak"):
                        continue  # pragma: no cover  (peephole optimization)
                    src_file = os.path.join(root, fn)
                    src_path = os.path.join(src_root, root, fn)
                    src_file_index[src_file].append(src_path)

        # Find a set of files that exist in the target folder, but in NO source folder (for cleanup)
        target_extra_files = set()
        for (root, dirs, files) in relwalk(args.target):
            for fn in files:
                tgt_file = os.path.join(root, fn)
                if tgt_file not in src_file_index:
                    # Todo:  Add support for additional blacklist wildcards (using fnmatch)
                    if fn == CONTROLLED_DIR_MARKER or fn.endswith(".bak"):
                        continue  # pragma: no cover (peephole optimization)
                    target_extra_files.add(tgt_file)

        for (dest_fn, src_files) in sorted(src_file_index.items()):
            dest_path = os.path.join(args.target, dest_fn)

            # Make missing destination folder, if missing
            dest_dir = os.path.dirname(dest_path)
            if not os.path.isdir(dest_dir) and not args.dry_run:
                os.makedirs(dest_dir)

            # Handle conf files and non-conf files separately
            if not conf_file_re.search(dest_fn):
                # self.stderr.write("Considering {0:50}  NON-CONF Copy from source:  {1!r}\n".format(dest_fn, src_files[-1]))
                # Always use the last file in the list (since last directory always wins)
                src_file = src_files[-1]
                if args.dry_run:
                    if os.path.isfile(dest_path):
                        if file_compare(src_file, dest_path):
                            smart_rc = SMART_NOCHANGE
                        else:
                            if (_is_binary_file(src_file) or _is_binary_file(dest_path)):
                                # Binary files.  Can't compare...
                                smart_rc = "DRY-RUN (NO-DIFF=BIN)"
                            else:
                                show_text_diff(self.stdout, dest_path, src_file)
                                smart_rc = "DRY-RUN (DIFF)"
                    else:
                        smart_rc = "DRY-RUN (NEW)"
                else:
                    smart_rc = smart_copy(src_file, dest_path)
                if smart_rc != SMART_NOCHANGE:
                    self.stderr.write(
                        "Copy <{0}>   {1:50}  from {2}\n".format(smart_rc, dest_path, src_file))
            else:
                # Handle merging conf files
                dest = ConfFileProxy(os.path.join(args.target, dest_fn), "r+",
                                     parse_profile=PARSECONF_MID)
                srcs = [ConfFileProxy(sf, "r", parse_profile=PARSECONF_STRICT) for sf in src_files]
                # self.stderr.write("Considering {0:50}  CONF MERGE from source:  {1!r}\n".format(dest_fn, src_files[0]))
                smart_rc = merge_conf_files(dest, srcs, dry_run=args.dry_run,
                                            banner_comment=args.banner)
                if smart_rc != SMART_NOCHANGE:
                    self.stderr.write(
                        "Merge <{0}>   {1:50}  from {2!r}\n".format(smart_rc, dest_path,
                                                                    src_files))

        if True and target_extra_files:  # Todo: Allow for cleanup to be disabled via CLI
            self.stderr.write("Cleaning up extra files not part of source tree(s):  {0} files.\n".
                format(len(target_extra_files)))
            for dest_fn in target_extra_files:
                self.stderr.write("Remove unwanted file {0}\n".format(dest_fn))
                os.unlink(os.path.join(args.target, dest_fn))
Ejemplo n.º 26
0
class FilterCmd(KsconfCmd):
    help = "A stanza-aware GREP tool for conf files"
    description = dedent("""
    Filter the contents of a conf file in various ways.  Stanzas can be included
    or excluded based on a provided filter or based on the presence or value of a key.

    Where possible, this command supports GREP-like arguments to bring a familiar feel.
    """)

    # format = "manual"
    maturity = "alpha"

    def __init__(self, *args, **kwargs):
        super(FilterCmd, self).__init__(*args, **kwargs)
        self.stanza_filters = None
        self.attr_presence_filters = None

    def register_args(self, parser):
        # type: (argparse.ArgumentParser) -> None
        parser.add_argument("conf",
                            metavar="CONF",
                            help="Input conf file",
                            nargs="+",
                            type=ConfFileType("r",
                                              parse_profile=PARSECONF_MID_NC)
                            ).completer = conf_files_completer
        parser.add_argument(
            "-o",
            "--output",
            metavar="FILE",
            type=argparse.FileType('w'),
            default=self.stdout,
            help="File where the filtered results are written.  "
            "Defaults to standard out.")
        parser.add_argument(
            "--comments",
            "-C",
            action="store_true",
            default=False,
            help="Preserve comments.  Comments are discarded by default.")
        parser.add_argument("--verbose",
                            action="store_true",
                            default=False,
                            help="Enable additional output.")

        parser.add_argument(
            "--match",
            "-m",  # metavar="MODE",
            choices=["regex", "wildcard", "string"],
            default="wildcard",
            help=dedent("""\
            Specify pattern matching mode.
            Defaults to 'wildcard' allowing for ``*`` and  ``?`` matching.
            Use 'regex' for more power but watch out for shell escaping.
            Use 'string' to enable literal matching."""))
        parser.add_argument("--ignore-case",
                            "-i",
                            action="store_true",
                            help=dedent("""\
            Ignore case when comparing or matching strings.
            By default matches are case-sensitive."""))
        parser.add_argument("--invert-match",
                            "-v",
                            action="store_true",
                            help=dedent("""\
            Invert match results.
            This can be used to show what content does NOT match,
            or make a backup copy of excluded content."""))

        pg_out = parser.add_argument_group(
            "Output mode",
            dedent("""\
            Select an alternate output mode.
            If any of the following options are used, the stanza output is not shown.
            """))
        pg_out.add_argument(
            "--files-with-matches",
            "-l",
            action="store_true",
            help="List files that match the given search criteria")
        pg_om1 = pg_out.add_mutually_exclusive_group()
        pg_om1.add_argument("--count",
                            "-c",
                            action="store_true",
                            help="Count matching stanzas")
        pg_om1.add_argument("--brief",
                            "-b",
                            action="store_true",
                            help="List name of matching stanzas")

        pg_sel = parser.add_argument_group(
            "Stanza selection",
            dedent("""\
            Include or exclude entire stanzas using these filter options.

            All filter options can be provided multiple times.
            If you have a long list of filters, they can be saved in a file and referenced using
            the special ``file://`` prefix.  One entry per line."""))

        pg_sel.add_argument("--stanza",
                            metavar="PATTERN",
                            action="append",
                            default=[],
                            help=dedent("""
            Match any stanza who's name matches the given pattern.
            PATTERN supports bulk patterns via the ``file://`` prefix."""))

        pg_sel.add_argument("--attr-present",
                            metavar="ATTR",
                            action="append",
                            default=[],
                            help=dedent("""\
            Match any stanza that includes the ATTR attribute.
            ATTR supports bulk attribute patterns via the ``file://`` prefix."""
                                        ))
        '''# Add next
        pg_sel.add_argument("--attr-eq", metavar=("ATTR", "PATTERN"), nargs=2, action="append",
                            default=[],
                            help="""
            Match any stanza that includes an attribute matching the pattern.
            PATTERN supports the special ``file://filename`` syntax.""")
        '''
        ''' # This will be more difficult
        pg_sel.add_argument("--attr-ne",  metavar=("ATTR", "PATTERN"), nargs=2, action="append",
                            default=[],
                            help="""
            Match any stanza that includes an attribute matching the pattern.
            PATTERN supports the special ``file://`` syntax.""")
        '''

        pg_con = parser.add_argument_group(
            "Attribute selection",
            dedent("""\
            Include or exclude attributes passed through.
            By default, all attributes are preserved.
            Allowlist (keep) operations are preformed before blocklist (reject) operations."""
                   ))

        pg_con.add_argument("--keep-attrs",
                            metavar="WC-ATTR",
                            default=[],
                            action="append",
                            help=dedent("""\
            Select which attribute(s) will be preserved.
            This space separated list of attributes indicates what to preserve.
            Supports wildcards."""))

        pg_con.add_argument("--reject-attrs",
                            metavar="WC-ATTR",
                            default=[],
                            action="append",
                            help=dedent("""\
            Select which attribute(s) will be discarded.
            This space separated list of attributes indicates what to discard.
            Supports wildcards."""))

    def prep_filters(self, args):
        flags = 0
        if args.ignore_case:
            flags |= FilteredList.IGNORECASE
        if args.verbose:
            flags |= FilteredList.VERBOSE

        self.stanza_filters = create_filtered_list(args.match,
                                                   flags).feedall(args.stanza)
        self.attr_presence_filters = create_filtered_list(args.match, flags)
        self.attr_presence_filters.feedall(args.attr_present)

        if args.keep_attrs or args.reject_attrs:
            self.attrs_keep_filter = FilterListWildcard(flags)
            for attrs in args.keep_attrs:
                self.attrs_keep_filter.feedall(attrs.split(" "))
            self.attrs_reject_filter = FilterListWildcard(FilteredList.INVERT
                                                          | flags)
            for attrs in args.reject_attrs:
                self.attrs_reject_filter.feedall(attrs.split(" "))
        else:
            # Bypass filter
            self.filter_attrs = lambda x: x

    def _test_stanza(self, stanza, attributes):
        if self.stanza_filters.match(stanza):
            # If there are no attribute level filters, automatically keep (preserves empty stanzas)
            if not self.attr_presence_filters.has_rules:
                return True
            # See if any of the attributes we are looking for exist, if so keep the entire stanza
            for attr in attributes:
                if self.attr_presence_filters.match(attr):
                    return True
        return False

    def filter_attrs(self, content):
        d = {}
        for (attr, value) in content.items():
            if self.attrs_keep_filter.match(
                    attr) and self.attrs_reject_filter.match(attr):
                d[attr] = content[attr]
        return d

    def output(self, args, matches, filename):
        if args.files_with_matches:
            if matches:
                if args.count:
                    args.output.write("{} has {} matching stanza(s)\n".format(
                        filename, len(matches)))
                elif args.brief:
                    for stanza_name in matches:
                        args.output.write("{}: {}\n".format(
                            filename, stanza_name))
                else:
                    # Just show a single file
                    args.output.write("{}\n".format(filename))
            elif args.verbose:
                self.stderr.write(
                    "No matching stanzas in {}\n".format(filename))
        elif args.count:
            args.output.write("{}\n".format(len(matches)))
        elif args.brief:
            for stanza_name in matches:
                args.output.write("{}\n".format(stanza_name))
        else:
            if len(args.conf) > 1:
                args.output.write("#  {}\n".format(filename))
            if matches:
                write_conf_stream(args.output, matches)
            elif args.verbose:
                self.stderr.write(
                    "No matching stanzas in {}\n".format(filename))
            if args.verbose:
                sys.stderr.write("Matched {} stanzas from {}\n".format(
                    len(matches), filename))

    def run(self, args):
        ''' Filter configuration files. '''
        self.prep_filters(args)

        # By allowing multiple input CONF files, this means that we could have duplicate stanzas (not detected by the parser)
        # so for now that just means duplicate stanzas on the output, but that may be problematic
        # I guess this is really up to the invoker to know if they care about that or not... Still would be helpful for a quick "grep" of a large number of files

        for conf in args.conf:
            conf.set_parser_option(keep_comments=args.comments)
            cfg = conf.data
            # Should this be an ordered dict?
            cfg_out = dict()
            for stanza_name, attributes in cfg.items():
                keep = self._test_stanza(stanza_name,
                                         attributes) ^ args.invert_match
                if keep:
                    cfg_out[stanza_name] = self.filter_attrs(attributes)

            self.output(args, cfg_out, conf.name)
            # Explicit flush used to resolve a CLI unittest timing issue in pypy
            args.output.flush()

        return EXIT_CODE_SUCCESS
Ejemplo n.º 27
0
class RestExportCmd(KsconfCmd):
    help = "Export .conf settings as a curl script to apply to a Splunk instance later (via REST)"
    description = dedent("""\
    Build an executable script of the stanzas in a configuration file that can be later applied to
    a running Splunk instance via the Splunkd REST endpoint.

    This can be helpful when pushing complex props and transforms to an instance where you only have
    UI access and can't directly publish an app.

    """)
    format = "manual"
    maturity = "beta"

    def register_args(self, parser):
        # type: (ArgumentParser) -> None
        parser.add_argument("conf", metavar="CONF", nargs="+",
                            type=ConfFileType("r", "load", parse_profile=PARSECONF_LOOSE),
                            help="Configuration file(s) to export settings from."
                            ).completer = conf_files_completer
        parser.add_argument("--output", "-t", metavar="FILE",
                            type=FileType("w"), default=sys.stdout,
                            help="Save the shell script output to this file.  "
                                 "If not provided, the output is written to standard output.")

        prsout = parser.add_argument_group("Output Control")

        '''
        prsout.add_argument("--syntax", choices=["curl", "powershell"],  # curl-windows?
                            default="curl",
                            help="Pick the output syntax mode.  "
                                 "Currently only 'curl' is supported.")
        '''
        prsout.add_argument("--disable-auth-output", action="store_true", default=False,
                            help="Turn off sample login curl commands from the output.")
        prsout.add_argument("--pretty-print", "-p", action="store_true", default=False,
                            help=dedent("""\
            Enable pretty-printing.
            Make shell output a bit more readable by splitting entries across lines."""))

        parsg1 = parser.add_mutually_exclusive_group(required=False)
        parsg1.add_argument("-u", "--update", action="store_true", default=False,
                            help="Assume that the REST entities already exist.  "
                                 "By default, output assumes stanzas are being created.")
        parsg1.add_argument("-D", "--delete", action="store_true", default=False,
                            help=dedent("""\
            Remove existing REST entities.  This is a destructive operation.
            In this mode, stanza attributes are unnecessary and ignored.
            NOTE:  This works for 'local' entities only; the default folder cannot be updated.
            """))

        parser.add_argument("--url", default="https://localhost:8089",
                            help="URL of Splunkd.  Default:  %(default)s")
        parser.add_argument("--app", default="$SPLUNK_APP",
                            help="Set the namespace (app name) for the endpoint")

        parser.add_argument("--user", help="Deprecated.  Use --owner instead.")
        parser.add_argument("--owner", default="nobody",
                            help="Set the object owner.  Typically, the default of 'nobody' is "
                                 "ideal if you want to share the configurations at the app-level.")
        parser.add_argument("--conf", dest="conf_type", metavar="TYPE",
                            help=dedent("""\
            Explicitly set the configuration file type.  By default, this is derived from CONF, but
            sometimes it's helpful to set this explicitly.  Can be any valid Splunk conf file type.
            Examples include: 'app', 'props', 'tags', 'savedsearches', etc."""))

        parser.add_argument("--extra-args", action="append",
                            help=dedent("""\
            Extra arguments to pass to all CURL commands.
            Quote arguments on the command line to prevent confusion between arguments to ksconf vs
            curl."""))

    @staticmethod
    def build_rest_url(base, owner, app, conf):
        # XXX: Quote owner & app; however for now we're still allowing the user to pass though an
        #  environmental variable as-is and quoting would break that.   Need to make a decision,
        # for now this is not likely to be a big issue given app and user name restrictions.
        return build_rest_url(base, "configs/conf-{}".format(conf), owner, app)

    def run(self, args):
        ''' Convert a conf file into a bunch of CURL commands'''
        """

        Some inspiration in the form of CURL commands...

        [single_quote_kv]
        REGEX = ([^=\s]+)='([^']+)'
        FORMAT = $1::$2
        MV_ADD = 0

        CREATE NEW:

        curl -k https://SPLUNK:8089/servicesNS/nobody/my_app/configs/conf-transforms \
         -H "Authorization: Splunk $SPLUNKDAUTH" -X POST \
         -d name=single_quote_kv \
         -d REGEX="(%5B%5E%3D%5Cs%5D%2B)%3D%27(%5B%5E%27%5D%2B)%27" \
         -d FORMAT='$1::$2'

        UPDATE EXISTING:  (note the change in URL/name attribute)

        curl -k https://SPLUNK:8089/servicesNS/nobody/my_app/configs/conf-transforms/single_quote_kv \
         -H "Authorization: Splunk $SPLUNKDAUTH" -X POST \
         -d REGEX="(%5B%5E%3D%5Cs%5D%2B)%3D%27(%5B%5E%27%5D%2B)%27" \
         -d FORMAT='$1::$2' \
         -d MV_ADD=0
        """
        stream = args.output

        if args.user:       # pragma: no cover
            from warnings import warn
            warn("Use '--owner' instead of '--user'", DeprecationWarning)
            if args.owner != "nobody":
                raise ValueError("Can't use both --user and --owner at the same time!")
            args.owner = args.user

        if args.pretty_print:
            line_breaks = 2
        else:
            line_breaks = 1

        if args.disable_auth_output is False:
            # Make this preamble optional
            stream.write("## Example of creating a local SPLUNKDAUTH token\n")
            stream.write("export SPLUNKDAUTH=$("
                         "curl -ks {}/services/auth/login -d username=admin -d password=changeme "
                         "| grep sessionKey "
                         r"| sed -E 's/[ ]*<sessionKey>(.*)<.sessionKey>/\1/')".format(args.url))
            stream.write('; [[ -n $SPLUNKDAUTH ]] && echo "Login token created"')
            stream.write("\n\n\n")

        for conf_proxy in args.conf:
            conf = conf_proxy.data
            if args.conf_type:
                conf_type = args.conf_type
            else:
                conf_type = os.path.basename(conf_proxy.name).replace(".conf", "")

            stream.write("# CURL REST commands for {}\n".format(conf_proxy.name))

            for stanza_name, stanza_data in conf.items():
                cc = CurlCommand()
                cc.pretty_format = args.pretty_print
                cc.url = self.build_rest_url(args.url, args.owner, args.app, conf_type)
                if args.extra_args:
                    for extra_arg in args.extra_args:
                        cc.extend_args(extra_arg)

                if stanza_name is GLOBAL_STANZA:
                    # XXX:  Research proper handling of default/global stanzas..
                    # As-is, curl returns an HTTP error, but yet the new entry is added to the
                    # conf file.  So I suppose we could ignore the exit code?!    ¯\_(ツ)_/¯
                    stream.write("### WARN:  Writing to the default stanza may not work as "
                                 "expected.  Or it may work, but be reported as a failure.  "
                                 "Patches welcome!\n")
                    cc.url += "/default"
                elif args.update or args.delete:
                    cc.url += "/" + quote(stanza_name, "")  # Must quote '/'s too.
                else:
                    cc.data["name"] = stanza_name

                if args.delete:
                    cc.method = "DELETE"
                else:
                    # Add individual keys
                    for (key, value) in stanza_data.items():
                        cc.data[key] = value

                cc.headers["Authorization"] = "Splunk $SPLUNKDAUTH"

                stream.write(cc.get_command())
                stream.write("\n" * line_breaks)
            stream.write("\n\n" * line_breaks)
        stream.write("\n")

        return EXIT_CODE_SUCCESS
Ejemplo n.º 28
0
    def register_args(self, parser):
        # type: (argparse.ArgumentParser) -> None
        parser.set_defaults(mode="ask")
        parser.add_argument("source", metavar="SOURCE",
                            type=ConfFileType("r+", "load", parse_profile=PARSECONF_STRICT_NC),
                            help="The source configuration file to pull changes from. "
                                 "(Typically the :file:`local` conf file)"
                            ).completer = conf_files_completer
        parser.add_argument("target", metavar="TARGET",
                            type=ConfFileType("r+", "none", accept_dir=True,
                                              parse_profile=PARSECONF_STRICT), help=dedent("""\
            Configuration file or directory to push the changes into.
            (Typically the :file:`default` folder)
            """)
                            ).completer = conf_files_completer
        grp1 = parser.add_mutually_exclusive_group()
        grp1.add_argument("--batch", "-b", action="store_const",
                          dest="mode", const="batch", help=dedent("""\
            Use batch mode where all configuration settings are automatically promoted.
            All changes are removed from source and applied to target.
            The source file will be removed unless
            ``--keep-empty`` is used."""))
        grp1.add_argument("--interactive", "-i",
                          action="store_const",
                          dest="mode", const="interactive", help=dedent("""\
            Enable interactive mode where the user will be prompted to approve
            the promotion of specific stanzas and attributes.
            The user will be able to apply, skip, or edit the changes being promoted."""))
        grp1.add_argument("--summary", "-s",
                          action="store_const",
                          dest="mode", const="summary",
                          help="Summarize content that could be promoted.")

        parser.add_argument("--verbose", action="store_true", default=False,
                            help="Enable additional output.")

        pg_ftr = parser.add_argument_group("Automatic filtering options", dedent("""\
            Include or exclude stanzas to promote using these filter options.
            Stanzas selected by these filters will be promoted.

            All filter options can be provided multiple times.
            If you have a long list of filters, they can be saved in a file and
            referenced using the special ``file://`` prefix.  One entry per line."""))
        pg_ftr.add_argument("--match", "-m",
                            choices=["regex", "wildcard", "string"],
                            default="wildcard",
                            help=dedent("""\
            Specify pattern matching mode.
            Defaults to 'wildcard' allowing for ``*`` and  ``?`` matching.
            Use 'regex' for more power but watch out for shell escaping.
            Use 'string' to enable literal matching."""))
        pg_ftr.add_argument("--ignore-case", action="store_true",
                            help=dedent("""\
            Ignore case when comparing or matching strings.
            By default matches are case-sensitive."""))
        pg_ftr.add_argument("--invert-match", "-v", action="store_true",
                            help=dedent("""\
            Invert match results.
            This can be used to prevent content from being promoted."""))
        pg_ftr.add_argument("--stanza", metavar="PATTERN", action="append", default=[],
                            help=dedent("""\
            Promote any stanza with a name matching the given pattern.
            PATTERN supports bulk patterns via the ``file://`` prefix."""))

        parser.add_argument("--force", "-f",
                            action="store_true", default=False,
                            help=
            "Disable safety checks. "
            "Don't check to see if SOURCE and TARGET share the same basename.")
        parser.add_argument("--keep", "-k",
                            action="store_true", default=False, help=dedent("""\
            Keep conf settings in the source file.
            All changes will be copied into the TARGET file instead of being moved there.
            This is typically a bad idea since local always overrides default."""))
        parser.add_argument("--keep-empty",
                            action="store_true", default=False, help=dedent("""\
            Keep the source file, even if after the settings promotions the file has no content.
            By default, SOURCE will be removed after all content has been moved into TARGET.
            Splunk will re-create any necessary local files on the fly."""))
Ejemplo n.º 29
0
class MinimizeCmd(KsconfCmd):
    help = "Minimize the target file by removing entries duplicated in the default conf(s)"
    description = dedent("""\
    Minimize a conf file by removing the default settings

    Reduce local conf file to only your indented changes without manually tracking
    which entries you've edited.  Minimizing local conf files makes your local
    customizations easier to read and often results in cleaner add-on upgrades.

    A typical scenario & why does this matter:

    To customizing a Splunk app or add-on, start by copying the conf file from
    default to local and then applying your changes to the local file.  That's good.
    But stopping here may complicated future upgrades, because the local file
    doesn't contain *just* your settings, it contains all the default settings too.
    Fixes published by the app creator may be masked by your local settings.  A
    better approach is to reduce the local conf file leaving only the stanzas and
    settings that you indented to change.  This make your conf files easier to read
    and makes upgrades easier, but it's tedious to do by hand.

    For special cases, the '--explode-default' mode reduces duplication between
    entries normal stanzas and global/default entries.  If 'disabled = 0' is a
    global default, it's technically safe to remove that setting from individual
    stanzas.  But sometimes it's preferable to be explicit, and this behavior may be
    too heavy-handed for general use so it's off by default.  Use this mode if your
    conf file that's been fully-expanded.  (i.e., conf entries downloaded via REST,
    or the output of "btool list").  This isn't perfect, since many apps push their
    settings into the global namespace, but it can help.


    Example usage:

        cd Splunk_TA_nix
        cp default/inputs.conf local/inputs.conf

        # Edit 'disabled' and 'interval' settings in-place
        vi local/inputs.conf

        # Remove all the extra (unmodified) bits
        ksconf minimize --target=local/inputs.conf default/inputs.conf
    """)
    format = "manual"
    maturity = "beta"
    ''' Make sure this works before advertising (same file as target and source????)
    # Note:  Use the 'merge' command to "undo"
    ksconf merge --target=local/inputs.conf default/inputs local/inputs.conf
    '''
    def register_args(self, parser):
        parser.add_argument("conf",
                            metavar="FILE",
                            nargs="+",
                            type=ConfFileType("r",
                                              "load",
                                              parse_profile=PARSECONF_LOOSE),
                            help="""
            The default configuration file(s) used to determine what base settings are "
            unnecessary to keep in the target file."""
                            ).completer = conf_files_completer
        parser.add_argument("--target",
                            "-t",
                            metavar="FILE",
                            type=ConfFileType("r+",
                                              "load",
                                              parse_profile=PARSECONF_STRICT),
                            help="""
            This is the local file that you with to remove the duplicate settings from.
            By default, this file will be read and the updated with a minimized version."""
                            ).completer = conf_files_completer
        grp1 = parser.add_mutually_exclusive_group()
        grp1.add_argument("--dry-run",
                          "-D",
                          default=False,
                          action="store_true",
                          help="""
            Enable dry-run mode.
            Instead of writing the minimizing the TARGET file, preview what what be removed in
            the form of a 'diff'.""")
        grp1.add_argument("--output",
                          type=ConfFileType("w",
                                            "none",
                                            parse_profile=PARSECONF_STRICT),
                          default=None,
                          help="""
            Write the minimized output to a separate file instead of updating TARGET.
            This can be use to preview changes if dry-run produces a large diff.
            This may also be helpful in other workflows."""
                          ).completer = conf_files_completer
        parser.add_argument("--explode-default",
                            "-E",
                            default=False,
                            action="store_true",
                            help="""
            Enable minimization across stanzas as well as files for special use-cases.
            This mode will not only minimize the same stanza across multiple config files,
            it will also attempt to minimize default any values stored in the [default] or global
            stanza as well.
            Example:  Trim out cruft in savedsearches.conf by pointing to
            etc/system/default/savedsearches.conf""")
        parser.add_argument("-k",
                            "--preserve-key",
                            action="append",
                            default=[],
                            help="""
            Specify a key that should be allowed to be a duplication but should be preserved
            within the minimized output.  For example, it may be desirable keep the
            'disabled' settings in the local file, even if it's enabled by default."""
                            )

    def run(self, args):
        if args.explode_default:
            # Is this the SAME as exploding the defaults AFTER the merge?;
            # I think NOT.  Needs testing
            cfgs = [explode_default_stanza(conf.data) for conf in args.conf]
        else:
            cfgs = [conf.data for conf in args.conf]
        # Merge all config files:
        default_cfg = merge_conf_dicts(*cfgs)
        del cfgs
        local_cfg = args.target.data
        orig_cfg = dict(args.target.data)

        if args.explode_default:
            # Make a skeleton default dict; at the highest level, that ensure that all default
            default_stanza = default_cfg.get(GLOBAL_STANZA,
                                             default_cfg.get("default"))
            skeleton_default = dict([(k, {}) for k in args.target.data])
            skeleton_default = explode_default_stanza(skeleton_default,
                                                      default_stanza)
            default_cfg = merge_conf_dicts(skeleton_default, default_cfg)

            local_cfg = explode_default_stanza(local_cfg)
            local_cfg = explode_default_stanza(local_cfg, default_stanza)

        minz_cfg = dict(local_cfg)

        # This may be a bit too simplistic.  Weird interplay may exit between if [default] stanza
        # and ocal [Upstream] stanza line up, but [Upstream] in our default file does not.
        # XXX:  Add a unit test!

        diffs = compare_cfgs(default_cfg, local_cfg, allow_level0=False)

        for op in diffs:
            if op.tag == DIFF_OP_DELETE:
                # This is normal.  Don't expect all default content to be mirrored into local
                continue
            elif op.tag == DIFF_OP_EQUAL:
                if isinstance(op.location, DiffStanza):
                    del minz_cfg[op.location.stanza]
                else:
                    # Todo: Only preserve keys for stanzas where at least 1 key has been modified
                    if match_bwlist(op.location.key, args.preserve_key):
                        '''
                        self.stderr.write("Skipping key [PRESERVED]  [{0}] key={1} value={2!r}\n"
                                     "".format(op.location.stanza, op.location.key, op.a))
                        '''
                        continue  # pragma: no cover  (peephole optimization)
                    del minz_cfg[op.location.stanza][op.location.key]
                    # If that was the last remaining key in the stanza, delete the entire stanza
                    if not _drop_stanza_comments(minz_cfg[op.location.stanza]):
                        del minz_cfg[op.location.stanza]
            elif op.tag == DIFF_OP_INSERT:
                '''
                self.stderr.write("Keeping local change:  <{0}> {1!r}\n-{2!r}\n+{3!r}\n\n\n".format(
                    op.tag, op.location, op.b, op.a))
                '''
                continue
            elif op.tag == DIFF_OP_REPLACE:
                '''
                self.stderr.write("Keep change:  <{0}> {1!r}\n-{2!r}\n+{3!r}\n\n\n".format(
                    op.tag, op.location, op.b, op.a))
                '''
                continue

        if args.dry_run:
            if args.explode_default:
                rc = show_diff(self.stdout,
                               compare_cfgs(orig_cfg, minz_cfg),
                               headers=(args.target.name,
                                        args.target.name + "-new"))
            else:
                rc = show_diff(self.stdout,
                               compare_cfgs(local_cfg, default_cfg),
                               headers=(args.target.name,
                                        args.target.name + "-new"))
            return rc

        if args.output:
            args.output.dump(minz_cfg)
        else:
            args.target.dump(minz_cfg)
            '''
Ejemplo n.º 30
0
    def register_args(self, parser):
        parser.add_argument(
            "tarball",
            metavar="SPL",
            help="The path to the archive to install."
        ).completer = FilesCompleter(allowednames=allowed_extentions)
        parser.add_argument("--dest",
                            metavar="DIR",
                            default=".",
                            help=dedent("""\
            Set the destination path where the archive will be extracted.
            By default, the current directory is used.  Sane values include: etc/apps,
            etc/deployment-apps, and so on.""")
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--app-name",
                            metavar="NAME",
                            default=None,
                            help=dedent("""\
            The app name to use when expanding the archive.
            By default, the app name is taken from the archive as the top-level path included
            in the archive (by convention).
            """))
        parser.add_argument("--default-dir",
                            default="default",
                            metavar="DIR",
                            help=dedent("""\
            Name of the directory where the default contents will be stored.
            This is a useful feature for apps that use a dynamic default directory
            that's created and managed by the 'combine' mode.""")
                            ).completer = DirectoriesCompleter()
        parser.add_argument("--exclude",
                            "-e",
                            action="append",
                            default=[],
                            help=dedent("""\
            Add a file pattern to exclude from extraction.
            Splunk's pseudo-glob patterns are supported here.
            ``*`` for any non-directory match,
            ``...`` for ANY (including directories),
            and ``?`` for a single character."""))
        parser.add_argument("--keep",
                            "-k",
                            action="append",
                            default=[],
                            help=dedent("""\
            Specify a pattern for files to preserve during an upgrade.
            Repeat this argument to keep multiple patterns."""))
        parser.add_argument("--allow-local",
                            default=False,
                            action="store_true",
                            help=dedent("""\
            Allow local/* and local.meta files to be extracted from the archive.
            """))
        parser.add_argument("--git-sanity-check",
                            choices=["off", "changed", "untracked", "ignored"],
                            default="untracked",
                            help=dedent("""\
            By default, 'git status' is run on the destination folder to detect working tree or
            index modifications before the unarchive process start.

            Sanity check choices go from least restrictive to most thorough:

            'off' prevents all safety checks.
            'changed' aborts only upon local modifications to files tracked by git.
            'untracked' (the default) looks for changed and untracked files.
            'ignored' aborts is (any) local changes, untracked, or ignored files are found.
            """))
        parser.add_argument("--git-mode",
                            default="stage",
                            choices=["nochange", "stage", "commit"],
                            help=dedent("""\
            Set the desired level of git integration.
            The default mode is *stage*, where new, updated, or removed files are automatically
            handled for you.

            To prevent any ``git add`` or ``git rm`` commands from being run, pick the
            'nochange' mode.
            """))
        parser.add_argument("--no-edit",
                            action="store_true",
                            default=False,
                            help=dedent("""\
            Tell git to skip opening your editor on commit.
            By default, you will be prompted to review/edit the commit message.
            (Git Tip:  Delete the content of the default message to abort the commit.)"""
                                        ))
        parser.add_argument("--git-commit-args",
                            "-G",
                            default=[],
                            action="append",
                            help="Extra arguments to pass to 'git'")