def reduced_inventory(self, session, inventory_index_id, types, fetch_category=Categories.resource): result = ( [x for x in DataAccess.iter(session, inventory_index_id, types, fetch_category)]) return result
def verify_resource_timestamps_from_storage(storage): session = storage.session inventory_index_id = storage.inventory_index.id for i, item in enumerate(DataAccess.iter(session, inventory_index_id, list()), start=1): self.assertTrue('timestamp' in item.get_other()) return i
def list(self): """List stored inventory. Yields: object: Inventory metadata """ with self.config.scoped_session() as session: for item in DataAccess.list(session): yield item
def purge(self, retention_days): """Purge the gcp_inventory data that's older than the retention days. Args: retention_days (string): Days of inventory tables to retain. Returns: str: Purge result. """ LOGGER.info('retention_days is: %s', retention_days) if not retention_days: LOGGER.info('retention_days is not specified. Will use ' 'configuration default.') retention_days = ( self.config.inventory_config.retention_days) retention_days = int(retention_days) if retention_days < 0: result_message = 'Purge is disabled. Nothing will be purged.' LOGGER.info(result_message) return result_message utc_now = date_time.get_utc_now_datetime() cutoff_datetime = ( utc_now - datetime.timedelta(days=retention_days)) LOGGER.info('Cut-off datetime to start purging is: %s', cutoff_datetime) with self.config.scoped_session() as session: inventory_indexes_to_purge = ( DataAccess.get_inventory_indexes_older_than_cutoff( session, cutoff_datetime)) if not inventory_indexes_to_purge: result_message = 'No inventory to be purged.' LOGGER.info(result_message) return result_message purged_inventory_indexes = [] for inventory_index in inventory_indexes_to_purge: _ = self.delete(inventory_index.id) purged_inventory_indexes.append(str(inventory_index.id)) purged_inventory_indexes_as_str = ', '.join(purged_inventory_indexes) result_message = ( 'Inventory data from these inventory indexes have ' 'been purged: {}').format(purged_inventory_indexes_as_str) LOGGER.info(result_message) return result_message
def delete(self, inventory_id): """Delete an inventory by id. Args: inventory_id (str): Id of the inventory. Returns: object: Inventory object that was deleted """ with self.config.scoped_session() as session: result = DataAccess.delete(session, inventory_id) return result
def get(self, inventory_id): """Get inventory metadata by id. Args: inventory_id (str): Id of the inventory. Returns: object: Inventory metadata """ with self.config.scoped_session() as session: result = DataAccess.get(session, inventory_id) return result
def get_user_emails(service_config, member_types=None): """Retrieves the list of user email addresses from inventory. Args: service_config (dict): The service configuration member_types (list): Member types to query in storage. This defaults to 'gsuite_user'. Returns: list: List of list of user e-mail addresses. """ if not member_types: member_types = ['gsuite_user'] emails = [] with service_config.scoped_session() as session: inventory_index_id = ( DataAccess.get_latest_inventory_index_id(session)) for inventory_row in DataAccess.iter(session, inventory_index_id, type_list=member_types): emails.append(inventory_row.get_resource_data()['primaryEmail']) return emails
def run(self): """Runs the import. Raises: NotImplementedError: If the importer encounters an unknown inventory type. """ autocommit = self.session.autocommit autoflush = self.session.autoflush try: self.session.autocommit = False self.session.autoflush = True root = DataAccess.get_root(self.readonly_session, self.inventory_index_id) inventory_index = DataAccess.get(self.readonly_session, self.inventory_index_id) description = { 'source': 'inventory', 'source_info': { 'inventory_index_id': self.inventory_index_id}, 'source_root': self._type_name(root), 'pristine': True, 'gsuite_enabled': DataAccess.type_exists( self.readonly_session, self.inventory_index_id, ['gsuite_group', 'gsuite_user']) } LOGGER.debug('Model description: %s', description) self.model.add_description(json.dumps(description, sort_keys=True)) if root.get_resource_type() in ['organization']: LOGGER.debug('Root resource is organization: %s', root) else: LOGGER.debug('Root resource is not organization: %s.', root) item_counter = 0 LOGGER.debug('Start storing resources into models.') for resource in DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST): item_counter += 1 self._store_resource(resource) if not item_counter % 1000: # Flush database every 1000 resources LOGGER.debug('Flushing model write session: %s', item_counter) self._flush_session() if not item_counter % 100000: # Commit every 100k resources while iterating # through all the resources. LOGGER.debug('Commiting model write session: %s', item_counter) self._commit_session() self._commit_session() LOGGER.debug('Finished storing resources into models.') item_counter += self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, ['role']), self._convert_role ) item_counter += self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.dataset_policy), self._convert_dataset_policy ) item_counter += self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.gcs_policy), self._convert_gcs_policy ) item_counter += self.model_action_wrapper( DataAccess.iter( self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.kubernetes_service_config), self._convert_service_config ) self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GSUITE_TYPE_LIST), self._store_gsuite_principal ) self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.enabled_apis), self._convert_enabled_apis ) self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, MEMBER_TYPE_LIST, with_parent=True), self._store_gsuite_membership, post_action=self._store_gsuite_membership_post ) self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GROUPS_SETTINGS_LIST), self._store_groups_settings ) self.dao.denorm_group_in_group(self.session) self.model_action_wrapper( DataAccess.iter(self.readonly_session, self.inventory_index_id, GCP_TYPE_LIST, fetch_category=Categories.iam_policy), self._store_iam_policy ) self.dao.expand_special_members(self.session) except Exception as e: # pylint: disable=broad-except LOGGER.exception(e) buf = StringIO() traceback.print_exc(file=buf) buf.seek(0) message = buf.read() LOGGER.debug('Importer has an exception: %s', message) self.model.set_error(message) else: LOGGER.debug('Set model status.') for row in inventory_index.warning_messages: self.model.add_warning('{}: {}'.format(row.resource_full_name, row.warning_message)) self.model.set_done(item_counter) finally: LOGGER.debug('Finished running importer.') self.session.commit() self.session.autocommit = autocommit self.session.autoflush = autoflush
def run(inventory_index_id, scanner_index_id, progress_queue, service_config=None): """Run the notifier. Entry point when the notifier is run as a library. Args: inventory_index_id (int64): Inventory index id. scanner_index_id (int64): Scanner index id. progress_queue (Queue): The progress queue. service_config (ServiceConfig): Forseti 2.0 service configs. Returns: int: Status code. """ # pylint: disable=too-many-locals global_configs = service_config.get_global_config() notifier_configs = service_config.get_notifier_config() with service_config.scoped_session() as session: if scanner_index_id: inventory_index_id = ( DataAccess.get_inventory_index_id_by_scanner_index_id( session, scanner_index_id)) else: if not inventory_index_id: inventory_index_id = ( DataAccess.get_latest_inventory_index_id(session)) scanner_index_id = scanner_dao.get_latest_scanner_index_id( session, inventory_index_id) if not scanner_index_id: LOGGER.error( 'No success or partial success scanner index found for ' 'inventory index: "%s".', str(inventory_index_id)) else: # get violations violation_access = scanner_dao.ViolationAccess(session) violations = violation_access.list( scanner_index_id=scanner_index_id) violations_as_dict = [] for violation in violations: violations_as_dict.append( scanner_dao.convert_sqlalchemy_object_to_dict(violation)) violations_as_dict = convert_to_timestamp(violations_as_dict) violation_map = scanner_dao.map_by_resource(violations_as_dict) for retrieved_v in violation_map: log_message = ( 'Retrieved {} violations for resource \'{}\''.format( len(violation_map[retrieved_v]), retrieved_v)) LOGGER.info(log_message) progress_queue.put(log_message) # build notification notifiers notifiers = [] for resource in notifier_configs['resources']: if violation_map.get(resource['resource']) is None: log_message = 'Resource \'{}\' has no violations'.format( resource['resource']) progress_queue.put(log_message) LOGGER.info(log_message) continue if not resource['should_notify']: LOGGER.debug('Not notifying for: %s', resource['resource']) continue for notifier in resource['notifiers']: log_message = ( 'Running \'{}\' notifier for resource \'{}\''.format( notifier['name'], resource['resource'])) progress_queue.put(log_message) LOGGER.info(log_message) chosen_pipeline = find_notifiers(notifier['name']) notifiers.append(chosen_pipeline( resource['resource'], inventory_index_id, violation_map[resource['resource']], global_configs, notifier_configs, notifier.get('configuration'))) # Run the notifiers. for notifier in notifiers: notifier.run() # Run the CSCC notifier. violation_configs = notifier_configs.get('violation') if violation_configs: if violation_configs.get('cscc').get('enabled'): source_id = violation_configs.get('cscc').get('source_id') if source_id: # beta mode LOGGER.debug( 'Running CSCC notifier with beta API. source_id: ' '%s', source_id) (cscc_notifier.CsccNotifier(inventory_index_id) .run(violations_as_dict, source_id=source_id)) else: # alpha mode LOGGER.debug('Running CSCC notifier with alpha API.') gcs_path = ( violation_configs.get('cscc').get('gcs_path')) mode = violation_configs.get('cscc').get('mode') organization_id = ( violation_configs.get('cscc').get( 'organization_id')) (cscc_notifier.CsccNotifier(inventory_index_id) .run(violations_as_dict, gcs_path, mode, organization_id)) InventorySummary(service_config, inventory_index_id).run() log_message = 'Notification completed!' progress_queue.put(log_message) progress_queue.put(None) LOGGER.info(log_message) return 0
def reduced_inventory(self, session, inventory_index_id, types): result = ([ x for x in DataAccess.iter(session, inventory_index_id, types) ]) return result