def test_write_with_compare(self): orig = self.sample01 new = self.twd.get_path("metadata/local.meta") md = MetaData() md.feed_file(orig) with open(new, "w", encoding="utf-8") as stream: md.write_stream(stream) a = parse_conf(orig) b = parse_conf(new) diffs = compare_cfgs(a, b) self.assertEqual(len(diffs), 1) self.assertEqual(diffs[0].tag, DIFF_OP_EQUAL)
def parse_string(text, profile=None, **kwargs): text = dedent(text) f = StringIO(text) if profile: return parse_conf(f, profile) else: return parse_conf_stream(f, **kwargs)
def get_conf(self, profile=None, **kwargs): """ Parse stdout as a .conf file""" f = StringIO(self.stdout) if profile: return parse_conf(f, profile) else: return parse_conf_stream(f, **kwargs)
def run(self, args): ''' Sort one or more configuration file. ''' stanza_delims = "\n" * args.newlines if args.inplace: failure = False changes = 0 for conf in args.conf: try: if not args.force and _has_nosort_marker(conf.name): if not args.quiet: self.stderr.write("Skipping blacklisted file {}\n".format(conf.name)) continue data = parse_conf(conf, profile=PARSECONF_STRICT) conf.close() smart_rc = smart_write_conf(conf.name, data, stanza_delim=stanza_delims, sort=True) except ConfParserException as e: smart_rc = None self.stderr.write("Error trying to process file {0}. " "Error: {1}\n".format(conf.name, e)) failure = True if smart_rc == SMART_NOCHANGE: if not args.quiet: self.stderr.write("Nothing to update. " "File {0} is already sorted\n".format(conf.name)) else: self.stderr.write("Replaced file {0} with sorted content.\n".format(conf.name)) changes += 1 if failure: return EXIT_CODE_BAD_CONF_FILE if changes: return EXIT_CODE_SORT_APPLIED else: for conf in args.conf: if len(args.conf) > 1: args.target.write("---------------- [ {0} ] ----------------\n\n" .format(conf.name)) try: data = parse_conf(conf, profile=PARSECONF_STRICT) write_conf(args.target, data, stanza_delim=stanza_delims, sort=True) except ConfParserException as e: self.stderr.write("Error trying processing {0}. Error: {1}\n". format(conf.name, e)) return EXIT_CODE_BAD_CONF_FILE return EXIT_CODE_SUCCESS
def load(self, profile=None): if not self.readable(): # Q: Should we mimic the exception caused by doing a read() on a write-only file object? raise ValueError("Unable to load() from {} with mode '{}'".format( self._type(), self._mode)) parse_profile = dict(self._parse_profile) if profile: parse_profile.update(profile) data = parse_conf(self.stream, profile=parse_profile) return data
def test_read_file(self): """ Confirm that parse_conf() works with an OS-level file. """ d = { "stanza1": {"key1": "value1", "key2": "value2"}, "stanza2": {"monkey": "banana", "dog": "cat"}, "stanza3": {"key_with_no_value": ''}, } tfile = StringIO() write_conf(tfile, d, sort=False) tfile.seek(0) d2 = parse_conf(tfile) self.assertDictEqual(d, d2)
def run(self, args): # Should we read a list of conf files from STDIN? if len(args.conf) == 1 and args.conf[0] == "-": confs = _stdin_iter() else: confs = args.conf c = Counter() exit_code = EXIT_CODE_SUCCESS for conf in confs: c["checked"] += 1 if not os.path.isfile(conf): self.stderr.write("Skipping missing file: {0}\n".format(conf)) c["missing"] += 1 continue try: parse_conf(conf, profile=PARSECONF_STRICT_NC) c["okay"] += 1 if not args.quiet: self.stdout.write("Successfully parsed {0}\n".format(conf)) self.stdout.flush() except ConfParserException as e: self.stderr.write("Error in file {0}: {1}\n".format(conf, e)) self.stderr.flush() exit_code = EXIT_CODE_BAD_CONF_FILE # TODO: Break out counts by error type/category (there's only a few of them) c["error"] += 1 except Exception as e: # pragma: no cover self.stderr.write( "Unhandled top-level exception while parsing {0}. " "Aborting.\n{1}\n".format(conf, e)) debug_traceback() exit_code = EXIT_CODE_INTERNAL_ERROR c["error"] += 1 break if True: # show stats or verbose self.stdout.write( "Completed checking {0[checked]} files. rc={1} Breakdown:\n" " {0[okay]} files were parsed successfully.\n" " {0[error]} files failed.\n".format(c, exit_code)) return exit_code
def test_write_nonstr(self): """ Make sure that other python primitive types are writen out correctly. """ # Note: Types will not be preserved, but values should not be lost d = {"stanza": {"boolean1": True, "boolean2": False, "int1": 99, "int2": 0, "none": None}} tfile = StringIO() write_conf(tfile, d) tfile.seek(0) d2 = parse_conf(tfile) st = d2["stanza"] self.assertEqual(st["boolean1"], "True") self.assertEqual(st["boolean2"], "False") self.assertEqual(st["int1"], "99") self.assertEqual(st["int2"], "0") self.assertEqual(st["none"], "")
def snapshot_file_conf(self, path): # XXX: If we are unable to read the file (IOError/OSError) that should be reported via # metadata or via 'failure' record = {} record["meta"] = self._decode_path_meta(path) record["file"] = self._get_file_info(path) try: data = parse_conf(path, profile=PARSECONF_MID_NC) # May need to format this differently. Specifically need some way to textually # indicate the global stanza conf = record["conf"] = [] for (stanza, stanza_data) in data.items(): rec = {"stanza": stanza, "attributes": stanza_data} if stanza is GLOBAL_STANZA: rec["stanza"] = "**GLOBAL_STANZA**" conf.append(rec) except ConfParserException as e: record["conf"] = None record["failure"] = "{}".format(e) self._data.append(record)
def read_conf(self, rel_path, profile=PARSECONF_MID): path = self.get_path(rel_path) return parse_conf(path, profile=profile)
def run(self, args): """ Install / upgrade a Splunk app from an archive file """ # Handle ignored files by preserving them as much as possible. # Add --dry-run mode? j/k - that's what git is for! if not os.path.isfile(args.tarball): self.stderr.write("No such file or directory {}\n".format(args.tarball)) return EXIT_CODE_FAILED_SAFETY_CHECK if not os.path.isdir(args.dest): self.stderr.write("Destination directory does not exist: {}\n".format(args.dest)) return EXIT_CODE_FAILED_SAFETY_CHECK f_hash = file_hash(args.tarball) self.stdout.write("Inspecting archive: {}\n".format(args.tarball)) new_app_name = args.app_name # ARCHIVE PRE-CHECKS: Archive must contain only one app, no weird paths, ... app_name = set() app_conf = {} files = 0 local_files = set() a = extract_archive(args.tarball, extract_filter=gaf_filter_name_like("app.conf")) for gaf in sanity_checker(a): gaf_app, gaf_relpath = gaf.path.split("/", 1) files += 1 if gaf.path.endswith("app.conf") and gaf.payload: conffile = StringIO(gaf.payload.decode(default_encoding)) conffile.name = os.path.join(args.tarball, gaf.path) app_conf = parse_conf(conffile, profile=PARSECONF_LOOSE) del conffile elif gaf_relpath.startswith("local") or gaf_relpath.endswith("local.meta"): local_files.add(gaf_relpath) app_name.add(gaf.path.split("/", 1)[0]) del gaf_app, gaf_relpath if len(app_name) > 1: self.stderr.write("The 'unarchive' command only supports extracting a single splunk" " app at a time.\nHowever the archive {} contains {} apps: {}\n" "".format(args.tarball, len(app_name), ", ".join(app_name))) return EXIT_CODE_FAILED_SAFETY_CHECK else: app_name = app_name.pop() del a if local_files: self.stderr.write("Local {} files found in the archive. ".format(len(local_files))) if args.allow_local: self.stderr.write("Keeping these due to the '--allow-local' flag\n") else: self.stderr.write("Excluding local files by default. " "Use '--allow-local' to override.") if not new_app_name and True: # if not --no-app-name-fixes if app_name.endswith("-master"): self.stdout.write("Automatically dropping '-master' from the app name. " "This is often the result of a github export.\n") # Trick, but it works... new_app_name = app_name[:-7] mo = re.search(r"(.*)-\d+\.[\d.-]+$", app_name) if mo: self.stdout.write("Automatically removing the version suffix from the app name. " "'{}' will be extracted as '{}'\n".format(app_name, mo.group(1))) new_app_name = mo.group(1) app_basename = new_app_name or app_name dest_app = os.path.join(args.dest, app_basename) self.stdout.write("Inspecting destination folder: {}\n".format(os.path.abspath(dest_app))) # FEEDBACK TO THE USER: UPGRADE VS INSTALL, GIT?, APP RENAME, ... app_name_msg = app_name vc_msg = "without version control support" old_app_conf = {} if os.path.isdir(dest_app): mode = "upgrade" is_git = git_is_working_tree(dest_app) try: # Ignoring the 'local' entries since distributed apps shouldn't contain local old_app_conf_file = os.path.join(dest_app, args.default_dir or "default", "app.conf") old_app_conf = parse_conf(old_app_conf_file, profile=PARSECONF_LOOSE) except ConfParserException: self.stderr.write("Unable to read app.conf from existing install.\n") else: mode = "install" is_git = git_is_working_tree(args.dest) if is_git: vc_msg = "with git support" if new_app_name and new_app_name != app_name: app_name_msg = "{} (renamed from {})".format(new_app_name, app_name) def show_pkg_info(conf, label): self.stdout.write("{} packaging info: '{}' by {} (version {})\n".format( label, conf.get("ui", {}).get("label", "Unknown"), conf.get("launcher", {}).get("author", "Unknown"), conf.get("launcher", {}).get("version", "Unknown"))) if old_app_conf: show_pkg_info(old_app_conf, " Installed app") if app_conf: show_pkg_info(app_conf, " Tarball app") self.stdout.write("About to {} the {} app {}.\n".format(mode, app_name_msg, vc_msg)) existing_files = set() if mode == "upgrade": if is_git: existing_files.update(git_ls_files(dest_app)) if not existing_files: self.stderr.write("App is in a git repository but no files have been staged " "or committed. Either commit or remove '{}' and try again." "\n".format(dest_app)) return EXIT_CODE_FAILED_SAFETY_CHECK if args.git_sanity_check == "off": self.stdout.write("The 'git status' safety checks have been disabled via CLI" "argument. Skipping.\n") else: d = { # untracked, ignored "changed": (False, False), "untracked": (True, False), "ignored": (True, True) } is_clean = git_is_clean(dest_app, *d[args.git_sanity_check]) del d if is_clean: self.stdout.write("Git folder is clean. " "Okay to proceed with the upgrade.\n") else: self.stderr.write("Unable to move forward without a clean working tree.\n" "Clean up and try again. " "Modifications are listed below.\n\n") self.stderr.flush() if args.git_sanity_check == "changed": git_status_ui(dest_app, "--untracked-files=no") elif args.git_sanity_check == "ignored": git_status_ui(dest_app, "--ignored") else: git_status_ui(dest_app) return EXIT_CODE_FAILED_SAFETY_CHECK else: for (root, dirs, filenames) in os.walk(dest_app): for fn in filenames: existing_files.add(os.path.join(root, fn)) self.stdout.write("Before upgrade. App has {} files\n".format(len(existing_files))) elif is_git: self.stdout.write("Git clean check skipped. Not needed for a fresh app install.\n") def fixup_pattern_bw(patterns, prefix=None): modified = [] for pattern in patterns: if pattern.startswith("./"): if prefix: pattern = "{0}/{1}".format(prefix, pattern[2:]) else: pattern = pattern[2:] modified.append(pattern) # If a pattern like 'tags.conf' or '*.bak' is provided, ues basename match (any dir) elif "/" not in pattern: modified.append("(^|.../)" + pattern) else: modified.append(pattern) return modified # PREP ARCHIVE EXTRACTION installed_files = set() excludes = list(args.exclude) ''' for pattern in args.exclude: # If a pattern like 'default.meta' or '*.bak' is provided, assume it's a basename match. if "/" not in pattern: excludes.append(".../" + pattern) else: excludes.append(pattern) ''' if not args.allow_local: for pattern in local_files: excludes.append("./" + pattern) excludes = fixup_pattern_bw(excludes, app_basename) self.stderr.write("Extraction exclude patterns: {!r}\n".format(excludes)) path_rewrites = [] files_iter = extract_archive(args.tarball) if True: files_iter = sanity_checker(files_iter) if args.default_dir: rep = "/{}/".format(args.default_dir.strip("/")) path_rewrites.append(("/default/", rep)) del rep if new_app_name: # We do have the "app_name" extracted from our first pass above, but regex = re.compile(r'^([^/]+)(?=/)') path_rewrites.append((regex, new_app_name)) if path_rewrites: files_iter = gen_arch_file_remapper(files_iter, path_rewrites) self.stdout.write("Extracting app now...\n") for gaf in files_iter: if match_bwlist(gaf.path, excludes, escape=False): self.stdout.write("Skipping [blacklist] {}\n".format(gaf.path)) continue if not is_git or args.git_mode in ("nochange", "stage"): self.stdout.write("{0:60s} {2:o} {1:-6d}\n".format(gaf.path, gaf.size, gaf.mode)) installed_files.add(gaf.path.split("/", 1)[1]) full_path = os.path.join(args.dest, gaf.path) dir_exists(os.path.dirname(full_path)) with open(full_path, "wb") as fp: fp.write(gaf.payload) os.chmod(full_path, gaf.mode) del fp, full_path files_new, files_upd, files_del = _cmp_sets(installed_files, existing_files) ''' print "New: \n\t{}".format("\n\t".join(sorted(files_new))) print "Existing: \n\t{}".format("\n\t".join(sorted(files_upd))) print "Removed: \n\t{}".format("\n\t".join(sorted(files_del))) ''' self.stdout.write("Extracted {} files: {} new, {} existing, and {} removed\n".format( len(installed_files), len(files_new), len(files_upd), len(files_del))) # Filer out "removed" files; and let us keep some based on a keep-whitelist: This should # include things like local, ".gitignore", ".gitattributes" and so on keep_list = [".git*"] keep_list.extend(args.keep) if not args.allow_local: keep_list += ["local/...", "local.meta"] keep_list = fixup_pattern_bw(keep_list) self.stderr.write("Keep file patterns: {!r}\n".format(keep_list)) files_to_delete = [] files_to_keep = [] for fn in files_del: if match_bwlist(fn, keep_list, escape=False): # How to handle a keep of "default.d/..." when we DO want to cleanup the default # redirect folder of "default.d/10-upstream"? # This may be an academic question since most apps will continue to send # an ever increasing list of default files (to mask out old/unused ones) self.stdout.write("Keeping {}\n".format(fn)) files_to_keep.append(fn) else: files_to_delete.append(fn) if files_to_keep: self.stdout.write("Keeping {} of {} files marked for deletion due to whitelist.\n" .format(len(files_to_keep), len(files_del))) git_rm_queue = [] if files_to_delete: self.stdout.write("Removing files not present in the upgraded version of the app.\n") for fn in files_to_delete: path = os.path.join(dest_app, fn) if is_git and args.git_mode in ("stage", "commit"): self.stdout.write("git rm -f {}\n".format(path)) git_rm_queue.append(fn) else: self.stdout.write("rm -f {}\n".format(path)) os.unlink(path) if git_rm_queue: # Run 'git rm file1 file2 file3 ..." (using an xargs like mechanism) git_cmd_iterable(["rm"], git_rm_queue, cwd=dest_app) del git_rm_queue if is_git: if args.git_mode in ("stage", "commit"): git_cmd(["add", os.path.basename(dest_app)], cwd=os.path.dirname(dest_app)) # self.stdout.write("git add {}\n".format(os.path.basename(dest_app))) ''' else: self.stdout.write("git add {}\n".format(dest_app)) ''' # Is there anything to stage/commit? if git_is_clean(os.path.dirname(dest_app), check_untracked=False): self.stderr.write("No changes detected. Nothing to {}\n".format(args.git_mode)) return git_commit_app_name = app_conf.get("ui", {}).get("label", os.path.basename(dest_app)) git_commit_new_version = app_conf.get("launcher", {}).get("version", None) if mode == "install": git_commit_message = "Install {}".format(git_commit_app_name) if git_commit_new_version: git_commit_message += " version {}".format(git_commit_new_version) else: # Todo: Specify Upgrade/Downgrade/Refresh git_commit_message = "Upgrade {}".format( git_commit_app_name) git_commit_old_version = old_app_conf.get("launcher", {}).get("version", None) if git_commit_old_version and git_commit_new_version: git_commit_message += " version {} (was {})".format(git_commit_new_version, git_commit_old_version) elif git_commit_new_version: git_commit_message += " to version {}".format(git_commit_new_version) # Could possibly include some CLI arg details, like what file patterns were excluded git_commit_message += "\n\nSHA256 {} {}\n\nSplunk-App-managed-by: ksconf" \ .format(f_hash, os.path.basename(args.tarball)) git_commit_cmd = ["commit", os.path.basename(dest_app), "-m", git_commit_message] if not args.no_edit: git_commit_cmd.append("--edit") git_commit_cmd.extend(args.git_commit_args) if args.git_mode == "commit": capture_std = True if args.no_edit else False proc = git_cmd(git_commit_cmd, cwd=os.path.dirname(dest_app), capture_std=capture_std) if proc.returncode == 0: self.stderr.write(dedent("""\ Your changes have been committed. Please review before pushing. If you find any issues, here are some possible solutions: To fix issues in the last commit, edit and add the files to be fixed, then run: git commit --amend To roll back the last commit but KEEP the app upgrade, run: git reset --soft HEAD^1 To roll back the last commit and REVERT the app upgrade, run: git reset --hard HEAD^1 NOTE: Make sure you have *no* other uncommitted changes before running 'reset'. """)) else: self.stderr.write("Git commit failed. Return code {}. Git args: git {}\n" .format(proc.returncode, list2cmdline(git_commit_cmd))) return EXIT_CODE_GIT_FAILURE elif args.git_mode == "stage": self.stdout.write("To commit later, use the following\n") self.stdout.write( "\tgit {}\n".format(list2cmdline(git_commit_cmd).replace("\n", "\\n")))
def feed_file(self, stream): conf = parse_conf(stream) self.feed_conf(conf)
def test_combine_3dir(self): twd = TestWorkDir() twd.write_file( "etc/apps/Splunk_TA_aws/default.d/10-upstream/props.conf", """ [aws:config] SHOULD_LINEMERGE = false TRUNCATE = 8388608 TIME_PREFIX = configurationItemCaptureTime"\s*:\s*" TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%3NZ TZ = GMT MAX_TIMESTAMP_LOOKAHEAD = 28 KV_MODE = json ANNOTATE_PUNCT = false FIELDALIAS-dest = resourceType AS dest FIELDALIAS-object = resourceId AS object FIELDALIAS-object_id = ARN AS object_id EVAL-change_type = "configuration" EVAL-dvc = "AWS Config" EVAL-status="success" LOOKUP-action= aws_config_action_lookup status AS configurationItemStatus OUTPUT action LOOKUP-object_category = aws_config_object_category_lookup type AS resourceType OUTPUT object_category # unify account ID field FIELDALIAS-aws-account-id = awsAccountId as aws_account_id FIELDALIAS-region-for-aws-config = awsRegion AS region """) twd.write_file( "etc/apps/Splunk_TA_aws/default.d/10-upstream/data/ui/nav/default.xml", """ <nav search_view="search" color="#65A637"> <view name="Inputs" default="true" label="Inputs" /> <view name="Configuration" default="false" label="Configuration" /> <view name="search" default="false" label="Search" /> </nav> """) # In the future there will be a more efficient way to handle the global 'ANNOTATE_PUCT' scenario twd.write_file( "etc/apps/Splunk_TA_aws/default.d/20-corp/props.conf", """ [aws:config] TZ = UTC # Corp want's punct to be enabled globally ANNOTATE_PUNCT = true """) twd.write_file( "etc/apps/Splunk_TA_aws/default.d/60-dept/props.conf", """ [aws:config] # Our config is bigger than yours! TRUNCATE = 9999999 """) twd.write_file( "etc/apps/Splunk_TA_aws/default.d/60-dept/data/ui/nav/default.xml", """ <nav search_view="search" color="#65A637"> <view name="My custom view" /> <view name="Inputs" default="true" label="Inputs" /> <view name="Configuration" default="false" label="Configuration" /> <view name="search" default="false" label="Search" /> </nav> """) default = twd.get_path("etc/apps/Splunk_TA_aws/default") with ksconf_cli: ko = ksconf_cli("combine", "--target", default, default + ".d/*") cfg = parse_conf( twd.get_path("etc/apps/Splunk_TA_aws/default/props.conf")) self.assertIn("aws:config", cfg) self.assertEqual(cfg["aws:config"]["ANNOTATE_PUNCT"], "true") self.assertEqual(cfg["aws:config"]["EVAL-change_type"], '"configuration"') self.assertEqual(cfg["aws:config"]["TRUNCATE"], '9999999') nav_content = twd.read_file( "etc/apps/Splunk_TA_aws/default/data/ui/nav/default.xml") self.assertIn("My custom view", nav_content) twd.write_conf( "etc/apps/Splunk_TA_aws/default.d/99-theforce/props.conf", {"aws:config": { "TIME_FORMAT": "%Y-%m-%dT%H:%M:%S.%6NZ" }}) twd.write_file( "etc/apps/Splunk_TA_aws/default.d/99-the-force/data/ui/nav/default.xml", """ <nav search_view="search" color="#65A637"> <view name="My custom view" /> <view name="Inputs" default="true" label="Inputs" /> <view name="Configuration" default="false" label="Configuration" /> </nav> """) twd.write_file("etc/apps/Splunk_TA_aws/default/data/dead.conf", "# File to remove") with ksconf_cli: ko = ksconf_cli("combine", "--dry-run", "--target", default, default + ".d/*") self.assertRegex(ko.stdout, r'[\r\n][-]\s*<view name="search"') self.assertRegex(ko.stdout, r"[\r\n][+]TIME_FORMAT = [^\r\n]+%6N") with ksconf_cli: ko = ksconf_cli("combine", "--target", default, default + ".d/*")