def cmd_dump(self, items: Iterable, args: str) -> Iterable: """Usage: | dump [--json] [--private] Dumps details about the resources. Optionally dump them as one JSON object. Beware that dumping large datasets as JSON requires the entire dataset to be in memory. If --private is given private resource attributes (those starting with _) will be included in the dump. """ dump_json = False json_out = [] args = args.split(" ") if "--json" in args: dump_json = True exclude_private = "--private" not in args for item in items: if not isinstance(item, BaseResource): raise RuntimeError( f"Item {item} is not a valid resource - dumping failed" ) out = resource2dict(item, exclude_private, self.graph) if dump_json: json_out.append(out) else: yield ("---\n" + yaml.dump(out, Dumper=yaml.Dumper).strip()) if dump_json: yield (fmt_json(json_out))
def cmd_jq(self, items: Iterable, args: str) -> Iterable: """Usage: | jq <jq filter> | Run jq JSON processor against the input which must either be Cloudkeeper resources or a JSON string. """ compiled_jq = jq.compile(args) for item in items: if isinstance(item, BaseResource): item = fmt_json(resource2dict(item, True, self.graph)) elif not isinstance(item, str): continue yield from compiled_jq.input(text=item).all()
def cmd_dump(self, items: Iterable, args: str) -> Iterable: """Usage: | dump [--json] [--private] Dumps details about the resources. Optionally dump them as one JSON object. Beware that dumping large datasets as JSON requires the entire dataset to be in memory. If --private is given private resource attributes (those starting with _) will be included in the dump. """ dump_json = False json_out = [] args = args.split(" ") if "--json" in args: dump_json = True exclude_private = "--private" not in args for item in items: if not isinstance(item, BaseResource): raise RuntimeError( f"Item {item} is not a valid resource - dumping failed") out = get_resource_attributes(item, exclude_private=exclude_private) cloud = item.cloud(self.graph) account = item.account(self.graph) region = item.region(self.graph) zone = item.zone(self.graph) out["cloud_id"] = cloud.id out["account_id"] = account.id out["region_id"] = region.id out["zone_id"] = zone.id out["cloud_name"] = cloud.name out["account_name"] = account.name out["region_name"] = region.name out["zone_name"] = zone.name out["event_log"] = item.event_log out["predecessors"] = [ i.sha256 for i in item.predecessors(self.graph) ] out["successors"] = [i.sha256 for i in item.successors(self.graph)] if dump_json: json_out.append(out) else: yield (pformat(out)) if dump_json: yield (fmt_json(json_out))
def cmd_dump(self, items: Iterable, args: str) -> Iterable: """Usage: | dump [--json] Dumps details about the resources. Optionally dump them as one JSON object. Beware that dumping large datasets as JSON requires the entire dataset to be in memory. """ dump_json = False json_out = [] if args == "--json": dump_json = True for item in items: if not isinstance(item, BaseResource): raise RuntimeError( f"Item {item} is not a valid resource - dumping failed") out = get_resource_attributes(item) cloud = item.cloud(self.graph) account = item.account(self.graph) region = item.region(self.graph) out["cloud_id"] = cloud.id out["account_id"] = account.id out["region_id"] = region.id out["cloud_name"] = cloud.name out["account_name"] = account.name out["region_name"] = region.name out["event_log"] = item.event_log out["predecessors"] = [ i.sha256 for i in item.predecessors(self.graph) ] out["successors"] = [i.sha256 for i in item.successors(self.graph)] if dump_json: json_out.append(out) else: yield (pformat(out)) if dump_json: yield (fmt_json(json_out))
def cmd_debug_proc_info(self, items: Iterable, args: str) -> Iterable: """Usage: debug_proc_info Show system information. """ yield fmt_json(get_stats(self.graph))