def _transform(self, resource_from_api): """Create an iterator of AppEngine applications to load into database. Args: resource_from_api (dict): AppEngine applications, keyed by project id, from GCP API. Yields: iterator: AppEngine applications in a dict. """ for project_id, app in resource_from_api.iteritems(): yield {'project_id': project_id, 'name': app.get('name'), 'app_id': app.get('id'), 'dispatch_rules': parser.json_stringify( app.get('dispatchRules', [])), 'auth_domain': app.get('authDomain'), 'location_id': app.get('locationId'), 'code_bucket': app.get('codeBucket'), 'default_cookie_expiration': app.get( 'defaultCookieExpiration'), 'serving_status': app.get('servingStatus'), 'default_hostname': app.get('defaultHostname'), 'default_bucket': app.get('defaultBucket'), 'iap': parser.json_stringify(app.get('iap', {})), 'gcr_domain': app.get('gcrDomain'), 'raw_application': parser.json_stringify(app)}
def _transform(self, resource_from_api): """Create an iterator of instance groups to load into database. Args: resource_from_api: A dict of instance groups, keyed by project id, from GCP API. Yields: Iterator of instance group properties in a dict. """ for (project_id, instance_groups) in resource_from_api.iteritems(): for instance_group in instance_groups: yield {'project_id': project_id, 'id': instance_group.get('id'), 'creation_timestamp': parser.format_timestamp( instance_group.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': instance_group.get('name'), 'description': instance_group.get('description'), 'named_ports': parser.json_stringify( instance_group.get('namedPorts', [])), 'network': instance_group.get('network'), 'region': instance_group.get('region'), 'size': self._to_int(instance_group.get('size')), 'subnetwork': instance_group.get('subnetwork'), 'zone': instance_group.get('zone'), 'raw_instance_group': parser.json_stringify(instance_group)}
def _transform(self, resource_from_api): """Create an iterator of instance templates to load into database. Args: resource_from_api (dict): Instance templates, keyed by project id, from GCP API. Yields: iterator: instance template properties in a dict. """ for (project_id, instance_templates) in resource_from_api.iteritems(): for instance_template in instance_templates: yield { 'project_id': project_id, 'id': instance_template.get('id'), 'creation_timestamp': parser.format_timestamp( instance_template.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': instance_template.get('name'), 'description': instance_template.get('description'), 'properties': parser.json_stringify( instance_template.get('properties', {})), 'raw_instance_template': parser.json_stringify(instance_template) }
def _transform(self, resource_from_api): """Create an iterator of instance group managers to load into database. Args: resource_from_api (dict): Instance group managers, keyed by project id, from GCP API. Yields: iterator: instance group manager properties in a dict. """ for (project_id, igms) in resource_from_api.iteritems(): for igm in igms: yield {'project_id': project_id, 'id': igm.get('id'), 'creation_timestamp': parser.format_timestamp( igm.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': igm.get('name'), 'description': igm.get('description'), 'base_instance_name': igm.get('baseInstanceName'), 'current_actions': parser.json_stringify( igm.get('currentActions', {})), 'instance_group': igm.get('instanceGroup'), 'instance_template': igm.get('instanceTemplate'), 'named_ports': parser.json_stringify( igm.get('namedPorts', [])), 'region': igm.get('region'), 'target_pools': parser.json_stringify( igm.get('targetPools', [])), 'target_size': igm.get('targetSize'), 'zone': igm.get('zone'), 'raw_instance_group_manager': parser.json_stringify(igm)}
def _transform(firewall_dict, project_id=None, validate=None): """Transforms firewall dictionary into FirewallRule. Args: firewall_dict (dict): A dictionary with firewall field names matching the API field names. project_id (str): A project id string. validate (bool): Whether to validate this FirewallRule or not. Returns: FirewallRule: A FirewallRule created from the input dictionary. """ in_dict = { 'firewall_rule_id': firewall_dict.get('id'), 'firewall_rule_name': firewall_dict.get('name'), 'firewall_rule_description': firewall_dict.get('description'), 'firewall_rule_kind': firewall_dict.get('kind'), 'firewall_rule_network': firewall_dict.get('network'), 'firewall_rule_priority': firewall_dict.get('priority'), 'firewall_rule_direction': firewall_dict.get('direction'), 'firewall_rule_source_ranges': parser.json_stringify(firewall_dict.get('sourceRanges')), 'firewall_rule_destination_ranges': parser.json_stringify(firewall_dict.get('destinationRanges')), 'firewall_rule_source_tags': parser.json_stringify(firewall_dict.get('sourceTags')), 'firewall_rule_target_tags': parser.json_stringify(firewall_dict.get('targetTags')), 'firewall_rule_source_service_accounts': parser.json_stringify(firewall_dict.get('sourceServiceAccounts')), 'firewall_rule_target_service_accounts': parser.json_stringify(firewall_dict.get('targetServiceAccounts')), 'firewall_rule_allowed': parser.json_stringify(firewall_dict.get('allowed')), 'firewall_rule_denied': parser.json_stringify(firewall_dict.get('denied')), 'firewall_rule_self_link': parser.json_stringify(firewall_dict.get('selfLink')), 'firewall_rule_create_time': parser.format_timestamp( parser.json_stringify(firewall_dict.get('creationTimestamp')), FirewallRule.MYSQL_DATETIME_FORMAT), } if project_id: in_dict['project_id'] = project_id return FirewallRule(validate=validate, **in_dict)
def _transform(self, resource_from_api): """Transform firewall rules map into loadable format for Cloud SQL. Loading the project id as project number is not supported by the GCP firewall api. Args: resource_from_api (dict): A dict mapping projects with a list of their firewall rules. {project_id1: [firewall_rule1a, firewall_rule1b], project_id2: [firewall_rule2a, firewall_rule2b], project_id3: [firewall_rule3a, firewall_rule3b]} Yields: iterable: Loadable firewall rules as a per-firewall dictionary. """ for project_id, firewall_rules in resource_from_api.iteritems(): for firewall_rule in firewall_rules: yield {'firewall_rule_id': firewall_rule.get('id'), 'project_id': project_id, 'firewall_rule_name': firewall_rule.get('name'), 'firewall_rule_description': firewall_rule.get('description'), 'firewall_rule_kind': firewall_rule.get('kind'), 'firewall_rule_network': firewall_rule.get('network'), 'firewall_rule_priority': firewall_rule.get('priority'), 'firewall_rule_direction': firewall_rule.get('direction'), 'firewall_rule_source_ranges': parser.json_stringify( firewall_rule.get('sourceRanges')), 'firewall_rule_destination_ranges': parser.json_stringify( firewall_rule.get('destinationRanges')), 'firewall_rule_source_tags': parser.json_stringify( firewall_rule.get('sourceTags')), 'firewall_rule_target_tags': parser.json_stringify( firewall_rule.get('targetTags')), 'firewall_rule_source_service_accounts': parser.json_stringify( firewall_rule.get('sourceServiceAccounts')), 'firewall_rule_target_service_accounts': parser.json_stringify( firewall_rule.get('targetServiceAccounts')), 'firewall_rule_allowed': parser.json_stringify( firewall_rule.get('allowed')), 'firewall_rule_denied': parser.json_stringify( firewall_rule.get('denied')), 'firewall_rule_self_link': firewall_rule.get('selfLink'), 'firewall_rule_create_time': parser.format_timestamp( firewall_rule.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'raw_firewall_rule': parser.json_stringify(firewall_rule)}
def _transform(self, resource_from_api): """Create an iterator of forwarding rules to load into database. TODO: truncate the region and target. Args: resource_from_api: A dict of forwarding rules, keyed by project id, from GCP API. Yields: Iterator of forwarding rule properties in a dict. """ for (project_id, forwarding_rules) in resource_from_api.iteritems(): for rule in forwarding_rules: yield { 'project_id': project_id, 'id': rule.get('id'), 'creation_timestamp': parser.format_timestamp(rule.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': rule.get('name'), 'description': rule.get('description'), 'region': rule.get('region'), 'ip_address': rule.get('IPAddress'), 'ip_protocol': rule.get('IPProtocol'), 'port_range': rule.get('portRange'), 'ports': parser.json_stringify(rule.get('ports', [])), 'target': rule.get('target'), 'load_balancing_scheme': rule.get('loadBalancingScheme'), 'subnetwork': rule.get('subnetwork'), 'network': rule.get('network'), 'backend_service': rule.get('backendService'), 'raw_forwarding_rule': parser.json_stringify(rule) }
def _transform(self, resource_from_api): """Yield an iterator of loadable iam policies. Args: resource_from_api (iterable): Resource manager project list response. https://cloud.google.com/resource-manager/reference/rest/v1/projects/list#response-body Yields: iterable: Loadable projects, as a per-project dictionary. """ for project in (project for d in resource_from_api\ for project in d.get('projects', [])): yield { 'project_number': project.get('projectNumber'), 'project_id': project.get('projectId'), 'project_name': project.get('name'), 'lifecycle_state': project.get('lifecycleState'), 'parent_type': project.get('parent', {}).get('type'), 'parent_id': project.get('parent', {}).get('id'), 'raw_project': parser.json_stringify(project), 'create_time': parser.format_timestamp(project.get('createTime'), self.MYSQL_DATETIME_FORMAT) }
def _transform(self, resource_from_api): """Yield an iterator of loadable groups. Args: A list of tuples in the form of: [(project_id, dataset_id, {dataset_access_object}),...] Yields: An iterable of project_id, dataset_id, and access detail. """ for (project_id, dataset_id, access) in resource_from_api: for acl in access: yield { 'project_id': project_id, 'dataset_id': dataset_id, 'access_domain': acl.get('domain'), 'access_user_by_email': acl.get('userByEmail'), 'access_special_group': acl.get('specialGroup'), 'access_group_by_email': acl.get('groupByEmail'), 'role': acl.get('role'), 'access_view_project_id': acl.get('view', {}).get('projectId'), 'access_view_table_id': acl.get('view', {}).get('tableId'), 'access_view_dataset_id': acl.get('view', {}).get('datasetId'), 'raw_access_map': parser.json_stringify(acl) }
def _transform(self, resource_from_api): """Yield an iterator of loadable organizations. Args: resource_from_api: An iterable of resource manager org search response. https://cloud.google.com/resource-manager/reference/rest/v1/organizations/search https://cloud.google.com/resource-manager/reference/rest/v1/organizations#Organization Yields: An iterable of loadable orgs, each org as a dict. """ for org in (o for d in resource_from_api for o in d.get( 'organizations', [])): # org_name is the unique identifier for the org, formatted as # "organizations/<organization_id>". org_name = org.get('name') org_id = org_name[len('%s/' % self.RESOURCE_NAME):] yield {'org_id': org_id, 'name': org_name, 'display_name': org.get('displayName'), 'lifecycle_state': org.get('lifecycleState'), 'raw_org': parser.json_stringify(org), 'creation_time': parser.format_timestamp( org.get('creationTime'), self.MYSQL_DATETIME_FORMAT)}
def _transform(self, resource_from_api): """Create an iterator of backend services to load into database. Args: resource_from_api: A dict of forwarding rules, keyed by project id, from GCP API. Yields: Iterator of backend service properties in a dict. """ for (project_id, backend_services) in resource_from_api.iteritems(): for backend_service in backend_services: yield {'project_id': project_id, 'id': backend_service.get('id'), 'creation_timestamp': parser.format_timestamp( backend_service.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': backend_service.get('name'), 'description': backend_service.get('description'), 'affinity_cookie_ttl_sec': self._to_int( backend_service.get('affinityCookieTtlSec')), 'backends': parser.json_stringify( backend_service.get('backends', [])), 'cdn_policy': parser.json_stringify( backend_service.get('cdnPolicy', {})), 'connection_draining': parser.json_stringify( backend_service.get('connectionDraining', {})), 'enable_cdn': self._to_bool( backend_service.get('enableCDN')), 'health_checks': parser.json_stringify( backend_service.get('healthChecks', [])), 'iap': parser.json_stringify( backend_service.get('iap', {})), 'load_balancing_scheme': backend_service.get( 'loadBalancingScheme'), 'port': self._to_int(backend_service.get('port')), 'port_name': backend_service.get('portName'), 'protocol': backend_service.get('protocol'), 'region': backend_service.get('region'), 'session_affinity': backend_service.get( 'sessionAffinity'), 'timeout_sec': backend_service.get('timeoutSec')}
def _write_temp_attachment(self): """Write the attachment to a temp file. Returns: The output filename for the violations json just written. """ # Make attachment output_file_name = self._get_output_filename() output_file_path = '{}/{}'.format(TEMP_DIR, output_file_name) with open(output_file_path, 'w+') as f: f.write(parser.json_stringify(self.violations)) return output_file_name
def run(self): """Generate the temporary json file and upload to GCS.""" with tempfile.NamedTemporaryFile() as tmp_violations: tmp_violations.write(parser.json_stringify(self.violations)) tmp_violations.flush() gcs_upload_path = '{}/{}'.format(self.pipeline_config['gcs_path'], self._get_output_filename()) if gcs_upload_path.startswith('gs://'): storage_client = storage.StorageClient() storage_client.put_text_file(tmp_violations.name, gcs_upload_path)
def _transform(self, resource_from_api): """Create an iterator of instances to load into database. Args: resource_from_api (dict): A dict of instances, keyed by project id, from GCP API. Yields: dict: Instance properties. """ for (project_id, instances) in resource_from_api.iteritems(): for instance in instances: yield { 'project_id': project_id, 'id': instance.get('id'), 'creation_timestamp': parser.format_timestamp(instance.get('creationTimestamp'), self.MYSQL_DATETIME_FORMAT), 'name': instance.get('name'), 'description': instance.get('description'), 'can_ip_forward': self._to_bool(instance.get('canIpForward', 0)), 'cpu_platform': instance.get('cpuPlatform'), 'disks': parser.json_stringify(instance.get('disks', [])), 'machine_type': instance.get('machineType'), 'metadata': parser.json_stringify(instance.get('metadata', {})), 'network_interfaces': parser.json_stringify(instance.get('networkInterfaces', [])), 'scheduling': parser.json_stringify(instance.get('scheduling', {})), 'service_accounts': parser.json_stringify(instance.get('serviceAccounts', [])), 'status': instance.get('status'), 'status_message': instance.get('statusMessage'), 'tags': parser.json_stringify(instance.get('tags')), 'zone': instance.get('zone'), 'raw_instance': parser.json_stringify(instance) }
def _transform(self, resource_from_api): """Create an iterator of KE services to load into database. Args: resource_from_api (dict): KE services from GCP API, keyed by project id, with list of clusters as values. {project1: [clusters], project2: [clusters], project3: [clusters]} Each cluster has additional server_config data included. Yields: iterator: KE service in a dict. """ for project_id, clusters in resource_from_api.iteritems(): for cluster in clusters: cluster['masterAuth'] = '<not saved in forseti inventory>' yield {'project_id': project_id, 'addons_config': parser.json_stringify( cluster.get('addonsConfig')), 'cluster_ipv4_cidr': cluster.get('clusterIpv4Cidr'), 'create_time': cluster.get('createTime'), 'current_master_version': cluster.get('currentMasterVersion'), 'current_node_count': cluster.get('currentNodeCount'), 'current_node_version': cluster.get('currentNodeVersion'), 'endpoint': cluster.get('endpoint'), 'initial_cluster_version': cluster.get('initialClusterVersion'), 'instance_group_urls': cluster.get('instanceGroupUrls'), 'legacy_abac': parser.json_stringify( cluster.get('legacyAbac')), 'locations': cluster.get('locations'), 'logging_service': cluster.get('loggingService'), 'monitoring_service': cluster.get('monitoringService'), 'name': cluster.get('name'), 'network': cluster.get('network'), 'node_config': parser.json_stringify( cluster.get('nodeConfig')), 'node_ipv4_cidr_size': cluster.get('nodeIpv4CidrSize'), 'node_pools': parser.json_stringify( cluster.get('nodePools')), 'self_link': cluster.get('selfLink'), 'services_ipv4_cidr': cluster.get('servicesIpv4Cidr'), 'status': cluster.get('status'), 'subnetwork': cluster.get('subnetwork'), 'zone': cluster.get('zone'), 'server_config': parser.json_stringify( cluster.get('serverConfig')), 'raw_cluster': parser.json_stringify(cluster)}
def _make_attachment(self): """Create the attachment object. Returns: attachment: SendGrid attachment object or `None` in case of failure. """ attachment = None with tempfile.NamedTemporaryFile() as tmp_file: tmp_file.write(parser.json_stringify(self.violations)) tmp_file.flush() attachment = self.mail_util.create_attachment( file_location=tmp_file.name, content_type='text/json', filename=self._get_output_filename(), disposition='attachment', content_id='Violations') return attachment
def run(self, violations, gcs_path): """Generate the temporary json file and upload to GCS. Args: violations (dict): Violations to be uploaded as findings. gcs_path (str): The GCS bucket to upload the findings. """ LOGGER.info('Running CSCC findings notification.') findings = self._transform_to_findings(violations) with tempfile.NamedTemporaryFile() as tmp_violations: tmp_violations.write(parser.json_stringify(findings)) tmp_violations.flush() gcs_upload_path = '{}/{}'.format(gcs_path, self._get_output_filename()) if gcs_upload_path.startswith('gs://'): storage_client = storage.StorageClient() storage_client.put_text_file(tmp_violations.name, gcs_upload_path) LOGGER.info('Completed CSCC findings notification.')
'ip_address': '10.10.10.1', 'region': 'https://www.googleapis.com/compute/v1/projects/project1/regions/us-central1', 'backend_service': None, 'load_balancing_scheme': 'EXTERNAL', 'target': 'https://www.googleapis.com/compute/v1/projects/project1/regions/us-central1/targetPools/project1-pool', 'port_range': '80-80', 'ports': '[]', 'ip_protocol': 'TCP', 'creation_timestamp': '2017-05-05 12:00:01', 'id': '111111111111', 'name': 'project1-rule', 'network': None, 'subnetwork': None, 'raw_forwarding_rule': parser.json_stringify(FAKE_FORWARDING_RULE1), }, { 'project_id': 'project2', 'description': '', 'ip_address': '10.10.10.2', 'region': 'https://www.googleapis.com/compute/v1/projects/project2/regions/us-central1', 'backend_service': None, 'load_balancing_scheme': 'EXTERNAL', 'target': 'https://www.googleapis.com/compute/v1/projects/project2/regions/us-central1/targetPools/project2-pool', 'port_range': '80-80', 'ports': '[]', 'ip_protocol': 'TCP', 'creation_timestamp': '2017-05-05 12:00:01',
'name': 'apps/project1', 'default_cookie_expiration': None, 'code_bucket': 'staging.project1.appspot.com', 'auth_domain': 'gmail.com', 'project_id': 'project1', 'iap': '{}', 'default_bucket': 'project1.appspot.com', 'raw_application': parser.json_stringify(FAKE_PROJECT_APPLICATIONS_MAP['project1']) }] FAKE_SERVICES = [{ 'split': { 'allocations': { '1': 1 } }, 'name': 'apps/fakescanner/services/default', 'id': 'aaaaaa11111' }] EXPECTED_LOADABLE_SERVICES = [{ 'project_id': 'project1',
FAKE_PROJECT_SERVICE_ACCOUNTS_MAP_WITH_KEYS = { 'project1': [ { 'name': '*****@*****.**', 'email': '*****@*****.**', 'oauth2ClientId': '12345', 'keys': [] }, ] } FAKE_SERVICE_ACCOUNT_KEYS = { '*****@*****.**': { 'keys': [], } } EXPECTED_LOADABLE_SERVICE_ACCOUNTS = [ { 'project_id': 'project1', 'name': '*****@*****.**', 'email': '*****@*****.**', 'oauth2_client_id': '12345', 'account_keys': parser.json_stringify( FAKE_SERVICE_ACCOUNT_KEYS['*****@*****.**']['keys']), 'raw_service_account': parser.json_stringify( FAKE_PROJECT_SERVICE_ACCOUNTS_MAP_WITH_KEYS['project1'][0]), } ]
u'codeBucket': u'staging.project1.appspot.com', u'defaultBucket': u'project1.appspot.com', u'defaultHostname': u'project1.appspot.com', u'gcrDomain': u'us.gcr.io', u'id': u'project1', u'locationId': u'us-central', u'name': u'apps/project1', u'servingStatus': u'SERVING' } } EXPECTED_LOADABLE_APPLICATIONS = [ { 'default_hostname': u'project1.appspot.com', 'app_id': u'project1', 'serving_status': u'SERVING', 'gcr_domain': u'us.gcr.io', 'location_id': u'us-central', 'dispatch_rules': '[]', 'name': u'apps/project1', 'default_cookie_expiration': None, 'code_bucket': u'staging.project1.appspot.com', 'auth_domain': u'gmail.com', 'project_id': 'project1', 'iap': '{}', 'default_bucket': u'project1.appspot.com', 'raw_application': parser.json_stringify( FAKE_PROJECT_APPLICATIONS_MAP['project1']) } ]
def _retrieve(self): """Retrieve AppEngine applications from GCP. Get all the projects in the current snapshot and retrieve the AppEngine applications for each. Returns: dict: Mapping projects with their AppEngine applications: {project_id: application} """ projects = (proj_dao.ProjectDao(self.global_configs).get_projects( self.cycle_timestamp)) apps = {} loadable_services = [] loadable_versions = [] loadable_instances = [] for project in projects: app = self.safe_api_call('get_app', project.id) if app: apps[project.id] = app services = self.safe_api_call('list_services', project.id) if services: for service in services: app_id = app.get('id') service_id = service.get('id') loadable_services.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'service': parser.json_stringify(service) }) versions = self.safe_api_call('list_versions', project.id, service_id) if versions: for version in versions: version_id = version.get('id') loadable_versions.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'version_id': version_id, 'version': parser.json_stringify(version) }) instances = self.safe_api_call( 'list_instances', project.id, service_id, version_id) if instances: for instance in instances: instance_id = instance.get('id') parser.json_stringify(instance) loadable_instances.append({ 'project_id': project.id, 'app_id': app_id, 'service_id': service_id, 'version_id': version_id, 'instance_id': instance_id, 'instance': parser.json_stringify(instance) }) return apps, loadable_services, loadable_versions, loadable_instances
EXPECTED_LOADABLE_APPLICATIONS = [{ 'default_hostname': u'project1.appspot.com', 'app_id': u'project1', 'serving_status': u'SERVING', 'gcr_domain': u'us.gcr.io', 'location_id': u'us-central', 'dispatch_rules': '[]', 'name': u'apps/project1', 'default_cookie_expiration': None, 'code_bucket': u'staging.project1.appspot.com', 'auth_domain': u'gmail.com', 'project_id': 'project1', 'iap': '{}', 'default_bucket': u'project1.appspot.com', 'raw_application': parser.json_stringify(FAKE_PROJECT_APPLICATIONS_MAP['project1']) }]
def _transform_data(cloudsql_instances_map): """Yield an iterator of loadable instances. Args: cloudsql_instnces_maps: An iterable of instances as per-project dictionary. Example: {'project_number': 11111, 'instances': instances_dict} Yields: An iterable of instances dictionary. """ for instances_map in cloudsql_instances_map: instances = instances_map['instances'] items = instances.get('items', []) for item in items: yield { 'project_number': instances_map['project_number'], 'name': item.get('name'), 'project': item.get('project'), 'backend_type': item.get('backendType'), 'connection_name': item.get('connectionName'), 'current_disk_size': int(item.get('currentDiskSize', 0)), 'database_version': item.get('databaseVersion'), 'failover_replica_available':\ item.get('failoverReplica', {}).get('available'), 'failover_replica_name':\ item.get('failoverReplica', {}).get('name'), 'instance_type': item.get('instanceType'), 'ipv6_address': item.get('ipv6Address'), 'kind': item.get('kind'), 'master_instance_name': item.get('masterInstanceName'), 'max_disk_size': int(item.get('maxDiskSize', 0)), 'on_premises_configuration_host_port':\ item.get('onPremisesConfiguration', {})\ .get('hostPort'), 'on_premises_configuration_kind':\ item.get('onPremisesConfiguration', {}).get('kind'), 'region': item.get('region'), 'replica_configuration':\ json.dumps(item.get('replicaConfiguration')), 'replica_names': json.dumps(item.get('replicaNames')), 'self_link': item.get('selfLink'), 'server_ca_cert': json.dumps(item.get('serverCaCert')), 'service_account_email_address':\ item.get('serviceAccountEmailAddress'), 'settings_activation_policy':\ item.get('settings', {}).get('activationPolicy'), 'settings_authorized_gae_applications':\ json.dumps(item.get('settings', {})\ .get('authorizedGaeApplications')), 'settings_availability_type':\ item.get('settings', {}).get('availabilityType'), 'settings_backup_configuration_binary_log_enabled':\ item.get('settings', {})\ .get('backupConfiguration', {})\ .get('binaryLogEnabled'), 'settings_backup_configuration_enabled':\ item.get('settings', {})\ .get('backupConfiguration', {}).get('enabled'), 'settings_backup_configuration_kind':\ item.get('settings', {})\ .get('backupConfiguration', {}).get('kind'), 'settings_backup_configuration_start_time':\ item.get('settings', {})\ .get('backupConfiguration', {}).get('startTime'), 'settings_crash_safe_replication_enabled':\ item.get('settings', {})\ .get('crashSafeReplicationEnabled'), 'settings_data_disk_size_gb':\ int(item.get('settings', {}).get('dataDiskSizeGb', 0)), 'settings_data_disk_type': item.get('settings', {}).get('dataDiskType'), 'settings_database_flags': json.dumps(item.get('settings', {})\ .get('databaseFlags')), 'settings_database_replication_enabled': item.get('settings', {})\ .get('databaseReplicationEnabled', {}), 'settings_ip_configuration_ipv4_enabled': item.get('settings', {}).get('ipConfiguration', {})\ .get('ipv4Enabled', {}), 'settings_ip_configuration_require_ssl': item.get('settings', {}).get('ipConfiguration', {})\ .get('requireSsl', {}), 'settings_kind': item.get('settings', {}).get('kind'), 'settings_labels': json.dumps(item.get('settings', {}).get('labels')), 'settings_location_preference_follow_gae_application':\ item.get('settings', {}).get('locationPreference', {})\ .get('followGaeApplication'), 'settings_location_preference_kind':\ item.get('settings', {}).get('locationPreference', {})\ .get('kind'), 'settings_location_preference_zone':\ item.get('settings', {}).get('locationPreference', {})\ .get('zone'), 'settings_maintenance_window':\ json.dumps(item.get('settings', {})\ .get('maintenanceWindow')), 'settings_pricing_plan':\ item.get('settings', {}).get('pricingPlan'), 'settings_replication_type':\ item.get('settings', {}).get('replicationType'), 'settings_settings_version':\ int(item.get('settings', {}).get('settingsVersion', 0)), 'settings_storage_auto_resize':\ item.get('settings', {}).get('storageAutoResize'), 'settings_storage_auto_resize_limit':\ int(item.get('settings', {})\ .get('storageAutoResizeLimit', 0)), 'settings_tier': item.get('settings', {}).get('tier'), 'state': item.get('state'), 'suspension_reason': \ json.dumps(item.get('suspensionReason')), 'raw_cloudsql_instance': parser.json_stringify(item) }