示例#1
0
    def report_cleanup(self, event: Event):
        graph = event.data

        now = datetime.utcnow().replace(tzinfo=timezone.utc)
        report_file_prefix = f'cleanup_report_{now.strftime("%Y-%m-%d_%H-%M-%S")}.'
        report_file = self.report_cleanups_path / (
            report_file_prefix + ArgumentParser.args.report_cleanups_format)

        log.info(f'Writing Cleanup Report to {report_file}')
        rows = []
        with graph.lock.read_access:
            for node in graph.nodes:
                if isinstance(node, BaseResource) and node.cleaned:
                    cloud = node.cloud(graph)
                    account = node.account(graph)
                    region = node.region(graph)

                    if not isinstance(cloud, BaseCloud) or not isinstance(
                            account, BaseAccount) or not isinstance(
                                region, BaseRegion):
                        log.error(
                            f'Unable to determine cloud ({cloud}), account ({account}) or region ({region}) for node {node.dname}'
                        )
                        continue

                    row = {
                        'datetime': now.isoformat(),
                        'cloud': cloud.name,
                        'account': account.name,
                        'region': region.name,
                        **get_resource_attributes(node)
                    }
                    rows.append(row)

        with report_file.open('w') as report_file_io:
            if ArgumentParser.args.report_cleanups_format == 'csv':
                fieldnames = [
                    'datetime', 'cloud', 'account', 'region', 'resource_type',
                    'id', 'name', 'ctime'
                ]
                fieldnames.extend(ArgumentParser.args.report_cleanups_add_attr)
                # for CSV we remove any unwanted attributes and initialize wanted missing ones
                # for JSON we leave them all intact
                for row in rows:
                    for attr in list(row.keys()):
                        if attr not in fieldnames:
                            del row[attr]
                    for attr in fieldnames:
                        if attr not in row:
                            row[attr] = ''

                writer = csv.DictWriter(report_file_io, fieldnames=fieldnames)
                writer.writeheader()
                writer.writerows(rows)
            elif ArgumentParser.args.report_cleanups_format == 'json':
                json.dump(rows, report_file_io)
            else:
                log.error(
                    f'Unknown output format: {ArgumentParser.args.report_cleanups_format}'
                )
示例#2
0
    def __init__(self, project: GCPProject) -> None:
        self.project = project
        self.credentials = Credentials.get(self.project.id)
        self.root = self.project
        self.graph = Graph()
        resource_attr = get_resource_attributes(self.root)
        self.graph.add_node(self.root, label=self.root.name, **resource_attr)

        self.mandatory_collectors = {
            "regions": self.collect_regions,
            "zones": self.collect_zones,
        }
        self.global_collectors = {
            "networks": self.collect_networks,
            "instances": self.collect_instances,
            "disk_types": self.collect_disk_types,
            "disks": self.collect_disks,
        }
        self.region_collectors = {}
        self.zone_collectors = {}
        self.all_collectors = dict(self.mandatory_collectors)
        self.all_collectors.update(self.global_collectors)
        self.all_collectors.update(self.region_collectors)
        self.all_collectors.update(self.zone_collectors)
        self.collector_set = set(self.all_collectors.keys())
示例#3
0
 def __init__(self) -> None:
     super().__init__()
     self.name = str(self.cloud)
     self.root = Cloud(self.cloud, {})
     self.graph = Graph()
     self.finished = False
     resource_attributes = get_resource_attributes(self.root)
     self.graph.add_node(self.root, label=self.root.id, **resource_attributes)
示例#4
0
    def cmd_dump(self, items: Iterable, args: str) -> Iterable:
        """Usage: | dump [--json] [--private]

        Dumps details about the resources.
        Optionally dump them as one JSON object.
        Beware that dumping large datasets as JSON requires
        the entire dataset to be in memory.

        If --private is given private resource attributes
        (those starting with _) will be included in the dump.
        """
        dump_json = False
        json_out = []
        args = args.split(" ")
        if "--json" in args:
            dump_json = True
        exclude_private = "--private" not in args

        for item in items:
            if not isinstance(item, BaseResource):
                raise RuntimeError(
                    f"Item {item} is not a valid resource - dumping failed")
            out = get_resource_attributes(item,
                                          exclude_private=exclude_private)
            cloud = item.cloud(self.graph)
            account = item.account(self.graph)
            region = item.region(self.graph)
            zone = item.zone(self.graph)
            out["cloud_id"] = cloud.id
            out["account_id"] = account.id
            out["region_id"] = region.id
            out["zone_id"] = zone.id
            out["cloud_name"] = cloud.name
            out["account_name"] = account.name
            out["region_name"] = region.name
            out["zone_name"] = zone.name
            out["event_log"] = item.event_log
            out["predecessors"] = [
                i.sha256 for i in item.predecessors(self.graph)
            ]
            out["successors"] = [i.sha256 for i in item.successors(self.graph)]
            if dump_json:
                json_out.append(out)
            else:
                yield (pformat(out))
        if dump_json:
            yield (fmt_json(json_out))
示例#5
0
    def cmd_dump(self, items: Iterable, args: str) -> Iterable:
        '''Usage: | dump [--json]

        Dumps details about the resources.
        Optionally dump them as one JSON object.
        Beware that dumping large datasets as JSON requires
        the entire dataset to be in memory.
        '''
        dump_json = False
        json_out = []
        if args == '--json':
            dump_json = True

        for item in items:
            if not isinstance(item, BaseResource):
                raise RuntimeError(
                    f'Item {item} is not a valid resource - dumping failed')
            out = get_resource_attributes(item)
            cloud = item.cloud(self.graph)
            account = item.account(self.graph)
            region = item.region(self.graph)
            out['cloud_id'] = cloud.id
            out['account_id'] = account.id
            out['region_id'] = region.id
            out['cloud_name'] = cloud.name
            out['account_name'] = account.name
            out['region_name'] = region.name
            out['event_log'] = item.event_log
            out['predecessors'] = [
                i.sha256 for i in item.predecessors(self.graph)
            ]
            out['successors'] = [i.sha256 for i in item.successors(self.graph)]
            if dump_json:
                json_out.append(out)
            else:
                yield (pformat(out))
        if dump_json:
            yield (json.dumps(json_out,
                              default=json_default,
                              skipkeys=True,
                              indent=4,
                              separators=(',', ': '),
                              sort_keys=True))
示例#6
0
    def cmd_dump(self, items: Iterable, args: str) -> Iterable:
        """Usage: | dump [--json]

        Dumps details about the resources.
        Optionally dump them as one JSON object.
        Beware that dumping large datasets as JSON requires
        the entire dataset to be in memory.
        """
        dump_json = False
        json_out = []
        if args == "--json":
            dump_json = True

        for item in items:
            if not isinstance(item, BaseResource):
                raise RuntimeError(
                    f"Item {item} is not a valid resource - dumping failed")
            out = get_resource_attributes(item)
            cloud = item.cloud(self.graph)
            account = item.account(self.graph)
            region = item.region(self.graph)
            out["cloud_id"] = cloud.id
            out["account_id"] = account.id
            out["region_id"] = region.id
            out["cloud_name"] = cloud.name
            out["account_name"] = account.name
            out["region_name"] = region.name
            out["event_log"] = item.event_log
            out["predecessors"] = [
                i.sha256 for i in item.predecessors(self.graph)
            ]
            out["successors"] = [i.sha256 for i in item.successors(self.graph)]
            if dump_json:
                json_out.append(out)
            else:
                yield (pformat(out))
        if dump_json:
            yield (fmt_json(json_out))
示例#7
0
    def cmd_format(self, items: Iterable, args: str) -> Iterable:
        """Usage: | format "{attr}: {attr.name}" |

        String format the output.
        Any resource attribute returned in the dump output can be used.
        If a resource is missing a referenced attribute an empty string will
        be printed instead.

        Example:
            > match resource_type = gcp_instance |
              format {name} {instance_cores} {instance_memory/1024}

        See
            https://docs.python.org/3.8/library/string.html#formatspec
        for more on the Python Format Specification Mini-Language.
        """
        for item in items:
            if not isinstance(item, BaseResource):
                raise RuntimeError(
                    f"Item {item} is not a valid resource - dumping failed"
                )
            fmt = defaultdict(str)
            out = get_resource_attributes(item)
            cloud = item.cloud(self.graph)
            account = item.account(self.graph)
            region = item.region(self.graph)
            zone = item.zone(self.graph)
            out["cloud"] = cloud
            out["account"] = account
            out["region"] = region
            out["zone"] = zone
            out["predecessors"] = [i.sha256 for i in item.predecessors(self.graph)]
            out["successors"] = [i.sha256 for i in item.successors(self.graph)]
            out["resource"] = item
            fmt.update(out)
            fmt_item = args.format_map(fmt)
            yield fmt_item
示例#8
0
    def __init__(self, project: GCPProject) -> None:
        """
        Args:
            project: The GCP project resource object this project collector
                is going to collect.
        """
        self.project = project
        self.credentials = Credentials.get(self.project.id)
        self.root = self.project
        self.graph = Graph()
        resource_attr = get_resource_attributes(self.root)
        self.graph.add_node(self.root, label=self.root.name, **resource_attr)

        # Mandatory collectors are always collected regardless of whether
        # they were included by --gcp-collect or excluded by --gcp-no-collect
        self.mandatory_collectors = {
            "regions": self.collect_regions,
            "zones": self.collect_zones,
        }
        # Global collectors are resources that are either specified on a global level
        # as opposed to a per zone or per region level or they are zone/region
        # resources that provide a aggregatedList() function returning all resources
        # for all zones/regions.
        self.global_collectors = {
            "networks": self.collect_networks,
            "subnetworks": self.collect_subnetworks,
            "routers": self.collect_routers,
            "routes": self.collect_routes,
            "health_checks": self.collect_health_checks,
            "http_health_checks": self.collect_http_health_checks,
            "https_health_checks": self.collect_https_health_checks,
            "machine_types": self.collect_machine_types,
            "instances": self.collect_instances,
            "disk_types": self.collect_disk_types,
            "disks": self.collect_disks,
            "target_vpn_gateways": self.collect_target_vpn_gateways,
            "vpn_gateways": self.collect_vpn_gateways,
            "vpn_tunnels": self.collect_vpn_tunnels,
            "security_policies": self.collect_security_policies,
            "snapshots": self.collect_snapshots,
            "ssl_certificates": self.collect_ssl_certificates,
            "network_endpoint_groups": self.collect_network_endpoint_groups,
            "instance_groups": self.collect_instance_groups,
            "instance_group_managers": self.collect_instance_group_managers,
            "autoscalers": self.collect_autoscalers,
            "backend_services": self.collect_backend_services,
            "url_maps": self.collect_url_maps,
            "target_pools": self.collect_target_pools,
            "target_instances": self.collect_target_instances,
            "target_http_proxies": self.collect_target_http_proxies,
            "target_https_proxies": self.collect_target_https_proxies,
            "target_ssl_proxies": self.collect_target_ssl_proxies,
            "target_tcp_proxies": self.collect_target_tcp_proxies,
            "target_grpc_proxies": self.collect_target_grpc_proxies,
            "forwarding_rules": self.collect_forwarding_rules,
            "buckets": self.collect_buckets,
            "databases": self.collect_databases,
        }
        # Region collectors collect resources in a single region.
        # They are being passed the GCPRegion resource object as `region` arg.
        self.region_collectors = {
            "region_ssl_certificates":
            self.collect_region_ssl_certificates,
            "region_target_http_proxies":
            self.collect_region_target_http_proxies,
            "region_target_https_proxies":
            self.collect_region_target_https_proxies,
        }
        # Zone collectors are being called for each zone.
        # They are being passed the GCPZone resource object as `zone` arg.
        self.zone_collectors = {}
        self.all_collectors = dict(self.mandatory_collectors)
        self.all_collectors.update(self.global_collectors)
        self.all_collectors.update(self.region_collectors)
        self.all_collectors.update(self.zone_collectors)
        self.collector_set = set(self.all_collectors.keys())