def execute(self): log.info('Starting feed update') # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. start_time = datetime.datetime.utcnow() try: f = DataFeeds.instance() start_time = datetime.datetime.utcnow() f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches updated_dict = f.sync(to_sync=self.feeds, full_flush=self.full_flush) log.info('Feed sync complete. Results = {}'.format(updated_dict)) return updated_dict except: log.exception('Failure refreshing and syncing feeds') raise finally: end_time = datetime.datetime.utcnow() try: self.rescan_images_created_between(from_time=start_time, to_time=end_time) except: log.exception( 'Unexpected exception rescanning vulns for images added during the feed sync' ) raise finally: end_session()
def execute(self): log.info('Starting feed update') # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. start_time = datetime.datetime.utcnow() try: f = DataFeeds.instance() updated = [] start_time = datetime.datetime.utcnow() f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches updated_dict = f.sync(to_sync=self.feeds, full_flush=self.full_flush) # Response is dict with feed name and dict for each group mapped to list of images updated log.info('Updated: {}'.format(updated_dict)) for feed in updated_dict: for updated_imgs in list(updated_dict[feed].values()): updated += updated_imgs log.info('Feed sync complete') return updated except: log.exception('Failure refreshing and syncing feeds') raise finally: end_time = datetime.datetime.utcnow() self.rescan_images_created_between(from_time=start_time, to_time=end_time) end_session()
def execute(self): log.info('Starting feed update') try: f = DataFeeds.instance() updated = [] vuln_updates = {} # try: # # Update the vulnerabilities with updates to the affected images done within the same transaction scope # vuln_updates = df.vulnerabilities.sync(item_processing_fn=FeedsUpdateTask.process_updated_vulnerability) # group_counts = [len(grp_updates) for grp_updates in vuln_updates.values()] # total_count = reduce(lambda x, y: x + y, group_counts, 0) # log.info('Processed {} vulnerability updates in {} groups'.format(total_count, len(group_counts))) # updated_dict = f.sync(to_sync=self.feeds) for g in updated_dict: updated += updated_dict[g] log.info('Feed sync complete') return updated except: log.exception('Failure refreshing and syncing feeds') raise finally: end_session()
def teardown_session(exception=None): """ Teardown function to ensure no leaked db session prior to request termination. :param exception: :return: """ flask_app.logger.debug('Session teardown on request teardown') end_session()
def test_data_env(anchore_db): """ Fixture for a test data env :param anchore_db: :return: """ try: te = _init_te(True) yield te finally: logger.info("Cleaning up after test env") end_session()
def _load_images(test_env): logger.info('Loading images') image_results = [] try: for img_id, path in test_env.image_exports(): logger.info('Loading {}'.format(img_id)) file_url = 'file://' + path i = ImageLoadTask(user_id='0', image_id=img_id, url=file_url).execute() if not i: logger.info('Could not load image {}, already in system, ot an exception'.format(img_id)) logger.info('Load complete') finally: end_session()
def test_data_env_with_images_loaded(test_data_env): logger.info("Running test setup") _load_image("ruby", test_data_env) _load_image("node", test_data_env) test_env = test_data_env yield test_env logger.info("Ending db session") # Ensure the next init_db() call initializes fully end_session()
def test_data_env_with_images_loaded(test_data_env): logger.info('Running test setup') _load_image('ruby', test_data_env) _load_image('node', test_data_env) test_env = test_data_env yield test_env logger.info('Ending db session') # Ensure the next init_db() call initializes fully end_session()
def execute(self): log.info('Starting initial feed sync') f = DataFeeds.instance() try: updated = [] updated_dict = f.bulk_sync(to_sync=self.feed_list) for g in updated_dict: updated += updated_dict[g] log.info('Initial feed sync complete') return updated except: log.exception('Failure refreshing and syncing feeds') raise finally: end_session()
def check_all_imgs_vuln(): db = get_thread_scoped_session() try: for img in db.query(Image).all(): logger.info('Checking vulnerabilities for image: {}'.format(img.id)) if not img: logger.info('No image found with id: {}'.format(img.id)) raise Exception('Should have image') vulns = vulnerabilities.vulnerabilities_for_image(img) for v in vulns: db.merge(v) db.commit() logger.info('Found: {}'.format(vulns)) except Exception as e: logger.info('Error! {}'.format(e)) end_session()
def execute(self): log.info('Starting feed update') # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. # Create feed task begin event error = None with session_scope() as session: mgr = identities.manager_factory.for_session(session) catalog_client = internal_client_for(CatalogClient, userId=None) try: catalog_client.add_event( FeedSyncTaskStarted( groups=self.feeds if self.feeds else 'all')) except: log.exception('Ignoring event generation error before feed sync') start_time = datetime.datetime.utcnow() try: f = DataFeeds.instance() start_time = datetime.datetime.utcnow() f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches updated_dict = f.sync(to_sync=self.feeds, full_flush=self.full_flush, catalog_client=catalog_client) log.info('Feed sync complete. Results = {}'.format(updated_dict)) return updated_dict except Exception as e: error = e log.exception('Failure refreshing and syncing feeds') raise finally: end_time = datetime.datetime.utcnow() # log feed sync event try: if error: catalog_client.add_event( FeedSyncTaskFailed( groups=self.feeds if self.feeds else 'all', error=error)) else: catalog_client.add_event( FeedSyncTaskCompleted( groups=self.feeds if self.feeds else 'all')) except: log.exception( 'Ignoring event generation error after feed sync') try: self.rescan_images_created_between(from_time=start_time, to_time=end_time) except: log.exception( 'Unexpected exception rescanning vulns for images added during the feed sync' ) raise finally: end_session()
def execute(self) -> List[FeedSyncResult]: logger.info("Starting feed sync. (operation_id={})".format(self.uuid)) # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. # Create feed task begin event error = None with session_scope() as session: mgr = identities.manager_factory.for_session(session) catalog_client = internal_client_for(CatalogClient, userId=None) try: notify_event( FeedSyncTaskStarted(groups=list(self.sync_configs.keys( )) if self.sync_configs else "all"), catalog_client, self.uuid, ) except: logger.exception( "Ignoring event generation error before feed sync. (operation_id={})" .format(self.uuid)) start_time = datetime.datetime.utcnow() try: start_time = datetime.datetime.utcnow() updated_data_feeds = list() updated_data_feeds.extend( DataFeeds.sync( sync_util_provider=GrypeProvider().get_sync_utils( self.sync_configs), full_flush=self.full_flush, catalog_client=catalog_client, operation_id=self.uuid, )) updated_data_feeds.extend( DataFeeds.sync( sync_util_provider=LegacyProvider().get_sync_utils( self.sync_configs), full_flush=self.full_flush, catalog_client=catalog_client, operation_id=self.uuid, )) logger.info("Feed sync complete (operation_id={})".format( self.uuid)) return updated_data_feeds except Exception as e: error = e logger.exception( "Failure refreshing and syncing feeds. (operation_id={})". format(self.uuid)) raise finally: end_time = datetime.datetime.utcnow() # log feed sync event try: if error: notify_event( FeedSyncTaskFailed( groups=list(self.sync_configs.keys()) if self.sync_configs else "all", error=error, ), catalog_client, self.uuid, ) else: notify_event( FeedSyncTaskCompleted( groups=list(self.sync_configs.keys() ) if self.sync_configs else "all"), catalog_client, self.uuid, ) except: logger.exception( "Ignoring event generation error after feed sync (operation_id={})" .format(self.uuid)) try: get_vulnerabilities_provider( ).rescan_images_loaded_during_feed_sync(self.uuid, from_time=start_time, to_time=end_time) except: logger.exception( "Unexpected exception rescanning vulns for images added during the feed sync. (operation_id={})" .format(self.uuid)) raise finally: end_session()
def tearDownClass(cls): # Nothing to do since in-mem db end_session()