def sync_feeds(test_env, up_to=None): if up_to: test_env.set_max_feed_time(up_to) logger.info('Syncing vuln and packages') DataFeeds.__scratch_dir__ = '/tmp' DataFeeds.sync(['vulnerabilities', 'packages'], feed_client=test_env.feed_client) logger.info('Sync complete')
def sync_feeds(test_env, up_to=None): if up_to: test_env.set_max_feed_time(up_to) logger.info("Syncing vuln and packages") DataFeeds.__scratch_dir__ = "/tmp" DataFeeds.sync(["vulnerabilities", "packages"], feed_client=test_env.feed_client) logger.info("Sync complete")
def test_sync_fail(test_data_env): DataFeeds.__scratch_dir__ = '/tmp' # No such feed result = DataFeeds.sync(to_sync=['nvd'], feed_client=test_data_env.feed_client) assert len(result) == 1 assert result[0]['status'] == 'failure' DataFeeds.__scratch_dir__ = '/tmp' result = DataFeeds.sync(to_sync=['vulnerabilities', 'packages', 'nvdv2', 'vulndb'], feed_client=test_data_env.feed_client) assert len(result) == 4 assert not any(filter(lambda x: x['status'] == 'failure', result))
def cls_fully_loaded_test_env(cls_test_data_env2, request): """ Load the test env, including a feed sync and image analysis. Places the env in the class's test_env and test_image vars :param cls_test_data_env: :param request: :return: """ _init_distro_mappings() DataFeeds.__scratch_dir__ = '/tmp' DataFeeds.sync(to_sync=['vulnerabilities', 'packages', 'nvdv2', 'vulndb'], feed_client=request.cls.test_env.feed_client) load_images(request)
def test_sync_fail(test_data_env): DataFeeds.__scratch_dir__ = "/tmp" # No such feed result = DataFeeds.sync(to_sync=["nvd"], feed_client=test_data_env.feed_client) assert len(result) == 1 assert result[0]["status"] == "failure" DataFeeds.__scratch_dir__ = "/tmp" result = DataFeeds.sync( to_sync=["vulnerabilities", "packages", "nvdv2", "vulndb"], feed_client=test_data_env.feed_client, ) assert len(result) == 4 assert not any(filter(lambda x: x["status"] == "failure", result))
def run_legacy_sync( test_env: LocalTestDataEnvironment, to_sync: List[str] ) -> List[FeedSyncResult]: DataFeeds.__scratch_dir__ = "/tmp" feed_url = os.getenv("ANCHORE_GRYPE_DB_URL", "https://ancho.re/v1/service/feeds") data_clause = {} for feed_name in to_sync: data_clause[feed_name] = {"enabled": True, "url": feed_url} config = { "provider": "legacy", "sync": { "enabled": os.getenv("ANCHORE_FEEDS_ENABLED", True), "ssl_verify": os.getenv("ANCHORE_FEEDS_SSL_VERIFY", True), "connection_timeout_seconds": 3, "read_timeout_seconds": 60, "data": data_clause, }, } vulnerabilities_provider = LegacyProvider() default_sync_config = vulnerabilities_provider.get_default_sync_config() sync_configs = compute_selected_configs_to_sync( provider="legacy", vulnerabilities_config=config, default_provider_sync_config=default_sync_config, ) sync_utils = vulnerabilities_provider.get_sync_utils(sync_configs) sync_utils.get_client = MagicMock(return_value=test_env.feed_client) return DataFeeds.sync(sync_util_provider=sync_utils)
def test_vuln_sync(test_data_env): with session_scope() as db: vcount = db.query(Vulnerability).count() logger.info("Starting with {} vuln records".format(vcount)) assert vcount == 0, "Not starting with empty table" logger.info("Syncing vulnerabilities") t = time.time() DataFeeds.__scratch_dir__ = "/tmp" DataFeeds.sync(to_sync=["vulnerabilities"], feed_client=test_data_env.feed_client) t = time.time() - t logger.info("Done with vulnerabilities. Took: {} sec".format(t)) with session_scope() as db: logger.info("Has {} vuln records".format( db.query(Vulnerability).count()))
def test_vuln_sync(test_data_env): with session_scope() as db: vcount = db.query(Vulnerability).count() logger.info('Starting with {} vuln records'.format(vcount)) assert vcount == 0, 'Not starting with empty table' logger.info('Syncing vulnerabilities') t = time.time() DataFeeds.__scratch_dir__ = '/tmp' DataFeeds.sync(to_sync=['vulnerabilities'], feed_client=test_data_env.feed_client) t = time.time() - t logger.info('Done with vulnerabilities. Took: {} sec'.format(t)) with session_scope() as db: logger.info('Has {} vuln records'.format( db.query(Vulnerability).count()))
def test_package_sync(test_data_env): with session_scope() as db: ncount = db.query(NpmMetadata).count() gcount = db.query(GemMetadata).count() assert ncount == 0, "Not starting with empty table" assert gcount == 0, "Not starting with empty table" logger.info("Syncing packages") t = time.time() DataFeeds.__scratch_dir__ = "/tmp" DataFeeds.sync(to_sync=["packages"], feed_client=test_data_env.feed_client) t = time.time() - t logger.info("Done with packages. Took: {} sec".format(t)) with session_scope() as db: ncount = db.query(NpmMetadata).count() gcount = db.query(GemMetadata).count() logger.info("Has {} npm records".format(ncount)) logger.info("Has {} gem records".format(gcount))
def test_package_sync(test_data_env): with session_scope() as db: ncount = db.query(NpmMetadata).count() gcount = db.query(GemMetadata).count() assert ncount == 0, 'Not starting with empty table' assert gcount == 0, 'Not starting with empty table' logger.info('Syncing packages') t = time.time() DataFeeds.__scratch_dir__ = '/tmp' DataFeeds.sync(to_sync=['packages'], feed_client=test_data_env.feed_client) t = time.time() - t logger.info('Done with packages. Took: {} sec'.format(t)) with session_scope() as db: ncount = db.query(NpmMetadata).count() gcount = db.query(GemMetadata).count() logger.info('Has {} npm records'.format(ncount)) logger.info('Has {} gem records'.format(gcount))
def execute(self): logger.info('Starting feed sync. (operation_id={})'.format(self.uuid)) # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. # Create feed task begin event error = None with session_scope() as session: mgr = identities.manager_factory.for_session(session) catalog_client = internal_client_for(CatalogClient, userId=None) try: notify_event( FeedSyncTaskStarted( groups=self.feeds if self.feeds else 'all'), catalog_client, self.uuid) except: logger.exception( 'Ignoring event generation error before feed sync. (operation_id={})' .format(self.uuid)) start_time = datetime.datetime.utcnow() try: start_time = datetime.datetime.utcnow() updated_dict = DataFeeds.sync(to_sync=self.feeds, full_flush=self.full_flush, catalog_client=catalog_client, operation_id=self.uuid) logger.info('Feed sync complete (operation_id={})'.format( self.uuid)) return updated_dict except Exception as e: error = e logger.exception( 'Failure refreshing and syncing feeds. (operation_id={})'. format(self.uuid)) raise finally: end_time = datetime.datetime.utcnow() # log feed sync event try: if error: notify_event( FeedSyncTaskFailed( groups=self.feeds if self.feeds else 'all', error=error), catalog_client, self.uuid) else: notify_event( FeedSyncTaskCompleted( groups=self.feeds if self.feeds else 'all'), catalog_client, self.uuid) except: logger.exception( 'Ignoring event generation error after feed sync (operation_id={})' .format(self.uuid)) try: self.rescan_images_created_between(from_time=start_time, to_time=end_time) except: logger.exception( 'Unexpected exception rescanning vulns for images added during the feed sync. (operation_id={})' .format(self.uuid)) raise finally: end_session()
def execute(self) -> List[FeedSyncResult]: logger.info("Starting feed sync. (operation_id={})".format(self.uuid)) # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself. # Create feed task begin event error = None with session_scope() as session: mgr = identities.manager_factory.for_session(session) catalog_client = internal_client_for(CatalogClient, userId=None) try: notify_event( FeedSyncTaskStarted(groups=list(self.sync_configs.keys( )) if self.sync_configs else "all"), catalog_client, self.uuid, ) except: logger.exception( "Ignoring event generation error before feed sync. (operation_id={})" .format(self.uuid)) start_time = datetime.datetime.utcnow() try: start_time = datetime.datetime.utcnow() updated_data_feeds = list() updated_data_feeds.extend( DataFeeds.sync( sync_util_provider=GrypeProvider().get_sync_utils( self.sync_configs), full_flush=self.full_flush, catalog_client=catalog_client, operation_id=self.uuid, )) updated_data_feeds.extend( DataFeeds.sync( sync_util_provider=LegacyProvider().get_sync_utils( self.sync_configs), full_flush=self.full_flush, catalog_client=catalog_client, operation_id=self.uuid, )) logger.info("Feed sync complete (operation_id={})".format( self.uuid)) return updated_data_feeds except Exception as e: error = e logger.exception( "Failure refreshing and syncing feeds. (operation_id={})". format(self.uuid)) raise finally: end_time = datetime.datetime.utcnow() # log feed sync event try: if error: notify_event( FeedSyncTaskFailed( groups=list(self.sync_configs.keys()) if self.sync_configs else "all", error=error, ), catalog_client, self.uuid, ) else: notify_event( FeedSyncTaskCompleted( groups=list(self.sync_configs.keys() ) if self.sync_configs else "all"), catalog_client, self.uuid, ) except: logger.exception( "Ignoring event generation error after feed sync (operation_id={})" .format(self.uuid)) try: get_vulnerabilities_provider( ).rescan_images_loaded_during_feed_sync(self.uuid, from_time=start_time, to_time=end_time) except: logger.exception( "Unexpected exception rescanning vulns for images added during the feed sync. (operation_id={})" .format(self.uuid)) raise finally: end_session()