def get_zones(self, **kwargs): """ Certain services require to be poled per-zone. In these cases, this method will return a list of zones to poll. :return: """ computeengine_client = gcp_connect_service(service='computeengine') if not self.zones: # get zones from a project that has CE API enabled for project in self.projects: try: zones_list = [] zones = computeengine_client.zones().list( project=project['projectId']).execute()['items'] for zone in zones: zones_list.append(zone['name']) self.zones = zones_list except HttpError as e: pass except Exception as e: print_exception(e) if self.zones: break return self.zones
def get_regions(self, **kwargs): """ Certain services require to be poled per-region. In these cases, this method will return a list of regions to poll. :return: """ computeengine_client = gcp_connect_service(service='computeengine') if not self.regions: # get regions from a project that has CE API enabled for project in self.projects: try: regions_list = [] regions = computeengine_client.regions().list(project=project['projectId']).execute()['items'] for region in regions: regions_list.append(region['name']) self.regions = regions_list except HttpError as e: pass except Exception as e: printException(e) if self.regions: break return self.regions
def fetch_all(self, credentials, regions=None, partition_name='aws', targets=None): """ :param credentials: F :param service: Name of the service :param regions: Name of regions to fetch data from :param partition_name: AWS partition to connect to :param targets: Type of resources to be fetched; defaults to all. :return: """ regions = [] if regions is None else regions global status, formatted_string # Initialize targets if not targets: targets = type(self).targets print_info('Fetching %s config...' % format_service_name(self.service)) formatted_string = None # FIXME the below should be in moved to each provider's code # Connect to the service if self._is_provider('aws'): if self.service in ['s3']: # S3 namespace is global but APIs aren't.... api_clients = {} for region in build_region_list(self.service, regions, partition_name): api_clients[region] = connect_service('s3', credentials, region, silent=True) api_client = api_clients[list(api_clients.keys())[0]] elif self.service == 'route53domains': api_client = connect_service(self.service, credentials, 'us-east-1', silent=True) # TODO: use partition's default region else: api_client = connect_service(self.service, credentials, silent=True) elif self._is_provider('gcp'): api_client = gcp_connect_service(service=self.service, credentials=credentials) elif self._is_provider('azure'): api_client = azure_connect_service(service=self.service, credentials=credentials) # Threading to fetch & parse resources (queue consumer) params = {'api_client': api_client} if self._is_provider('aws'): if self.service in ['s3']: params['api_clients'] = api_clients # Threading to parse resources (queue feeder) target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse']) # Threading to list resources (queue feeder) params = {'api_client': api_client, 'q': target_queue} if self._is_provider('aws'): if self.service in ['s3']: params['api_clients'] = api_clients service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list']) # Init display self.fetchstatuslogger = FetchStatusLogger(targets) # Go for target in targets: service_queue.put(target) # Blocks until all items in the queue have been gotten and processed. service_queue.join() target_queue.join() # Show completion and force newline if self._is_provider('aws'): # Show completion and force newline if self.service != 'iam': self.fetchstatuslogger.show(True) else: self.fetchstatuslogger.show(True) # Threads should stop running as queues are empty self.run_target_threads = False self.run_service_threads = False # Put x items in the queues to ensure threads run one last time (and exit) for i in range(self.thread_config['parse']): target_queue.put(None) for j in range(self.thread_config['list']): service_queue.put(None)
def fetch_all(self, credentials, regions=[], partition_name='aws', targets=None): """ :param credentials: F :param service: Name of the service :param regions: Name of regions to fetch data from :param partition_name: AWS partition to connect to :param targets: Type of resources to be fetched; defaults to all. :return: """ global status, formatted_string # Initialize targets if not targets: targets = type(self).targets printInfo('Fetching %s config...' % format_service_name(self.service)) formatted_string = None # Connect to the service if self._is_provider('aws'): if self.service in [ 's3' ]: # S3 namespace is global but APIs aren't.... api_clients = {} for region in build_region_list(self.service, regions, partition_name): api_clients[region] = connect_service('s3', credentials, region, silent=True) api_client = api_clients[list(api_clients.keys())[0]] elif self.service == 'route53domains': api_client = connect_service( self.service, credentials, 'us-east-1', silent=True) # TODO: use partition's default region else: api_client = connect_service(self.service, credentials, silent=True) elif self._is_provider('gcp'): api_client = gcp_connect_service(service=self.service, credentials=credentials) elif self._is_provider('azure'): api_client = azure_connect_service(service=self.service, credentials=credentials) # Threading to fetch & parse resources (queue consumer) params = {'api_client': api_client} if self._is_provider('aws'): if self.service in ['s3']: params['api_clients'] = api_clients # Threading to parse resources (queue feeder) target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse']) # Threading to list resources (queue feeder) params = {'api_client': api_client, 'q': target_queue} if self._is_provider('aws'): if self.service in ['s3']: params['api_clients'] = api_clients service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list']) # Init display self.fetchstatuslogger = FetchStatusLogger(targets) # Go for target in targets: service_queue.put(target) # Join service_queue.join() target_queue.join() if self._is_provider('aws'): # Show completion and force newline if self.service != 'iam': self.fetchstatuslogger.show(True) else: self.fetchstatuslogger.show(True)
def _get_projects(self, parent_type, parent_id): """ Returns all the projects in a given organization or folder. For a project_id it only returns the project details. """ if parent_type not in ['project', 'organization', 'folder', 'service-account']: return None projects = [] #FIXME can't currently be done with API client library as it consumes v1 which doesn't support folders """ resource_manager_client = resource_manager.Client(credentials=self.credentials) project_list = resource_manager_client.list_projects() for p in project_list: if p.parent['id'] == self.organization_id and p.status == 'ACTIVE': projects.append(p.project_id) """ resource_manager_client_v1 = gcp_connect_service(service='cloudresourcemanager', credentials=self.credentials) resource_manager_client_v2 = gcp_connect_service(service='cloudresourcemanager-v2', credentials=self.credentials) if parent_type == 'project': project_response = resource_manager_client_v1.projects().list(filter='id:%s' % parent_id).execute() if 'projects' in project_response.keys(): for project in project_response['projects']: if project['lifecycleState'] == "ACTIVE": projects.append(project) elif parent_type == 'service-account': project_response = resource_manager_client_v1.projects().list().execute() if 'projects' in project_response.keys(): for project in project_response['projects']: if project['lifecycleState'] == "ACTIVE": projects.append(project) else: # get parent children projectss request = resource_manager_client_v1.projects().list(filter='parent.id:%s' % parent_id) while request is not None: response = request.execute() if 'projects' in response.keys(): for project in response['projects']: if project['lifecycleState'] == "ACTIVE": projects.append(project) request = resource_manager_client_v1.projects().list_next(previous_request=request, previous_response=response) # get parent children projects in children folders recursively folder_response = resource_manager_client_v2.folders().list(parent='%ss/%s' % (parent_type, parent_id)).execute() if 'folders' in folder_response.keys(): for folder in folder_response['folders']: projects.extend(self._get_projects("folder", folder['name'].strip(u'folders/'))) return projects