def dump_ds(d, space=""): dbl_space = space * 2 delegate = dr.get_delegate(d) try: print(space + "Class: %s" % d.__class__) except: pass try: print(space + "Filtered: %s" % delegate.filterable) except: pass print(space + "Raw: %s" % delegate.raw) print(space + "Multioutput: %s" % delegate.multi_output) if isinstance(d, sf.simple_file): print(space + "Path: %s" % d.path) if isinstance(d, (sf.simple_command, sf.foreach_execute)): print(space + "Command: %s" % d.cmd) if isinstance(d, sf.first_file): print(space + "Paths:") for p in d.paths: print(dbl_space + p) if isinstance(d, sf.glob_file): print(space + "Patterns:") for p in d.patterns: print(dbl_space + p) print(space + "Ignore: %s" % d.ignore) if isinstance(d, sf.foreach_collect): print(space + "Path: %s" % d.path) print(space + "Ignore: %s" % d.ignore) filters = get_filters(d) if filters: print(space + "Filters:") for f in filters: print(dbl_space + f)
uploader_json = json.load(fp, object_pairs_hook=OrderedDict) dr.load_components("insights.specs.default") dr.load_components("insights.parsers") dr.load_components("insights.combiners") for package in sys.argv[2:]: dr.load_components(package) filters.dump() specs = sorted(vars(Specs)) filters = {} for spec in specs: s = getattr(Specs, spec) if type(s) == RegistryPoint: f = get_filters(s) if f: filters[spec] = sorted(f) for spec in chain.from_iterable(uploader_json[i] for i in ("commands", "files", "globs")): if spec["symbolic_name"] in filters: spec["pattern"] = filters[spec["symbolic_name"]] uploader_json["version"] = datetime.now().isoformat() pattern = re.compile(", $") output = "\n".join( pattern.sub(",", l) for l in json.dumps(uploader_json, indent=4).splitlines())