def create_feed_update(notification): """ Creates a feed data update notification. :param notification: :return: """ if not connexion.request.is_json: abort(400) notification = FeedUpdateNotification.from_dict(notification) result = [] try: feeds = get_selected_feeds_to_sync(localconfig.get_config()) task = FeedsUpdateTask(feeds_to_sync=feeds) result = task.execute() except HTTPException: raise except Exception as e: log.exception('Error executing feed update task') abort( Response(status=500, response=json.dumps({ 'error': 'feed sync failure', 'details': 'Failure syncing feed: {}'.format(e.message) }), mimetype='application/json')) return jsonify(['{}/{}'.format(x[0], x[1]) for x in result]), 200
def sync_feeds(sync=True, force_flush=False): """ POST /feeds?sync=True&force_flush=True :param sync: Boolean. If true, do a sync. If false, don't sync. :param force_flush: Boolean. If true, remove all previous data and replace with data from upstream source :return: """ result = [] if sync: try: result = FeedsUpdateTask.run_feeds_update(force_flush=force_flush) except (LeaseAcquisitionFailedError, LeaseUnavailableError) as e: log.exception( 'Could not acquire lock on feed sync, likely another sync already in progress' ) return make_response_error( 'Feed sync lock already held', in_httpcode=409, details={ 'error_codes': [AnchoreError.FEED_SYNC_ALREADY_IN_PROGRESS.name], 'message': AnchoreError.FEED_SYNC_ALREADY_IN_PROGRESS.value }), 409 except Exception as e: log.exception('Error executing feed update task') return jsonify(make_response_error(e, in_httpcode=500)), 500 return jsonify(result), 200
def do_feed_sync(msg): if 'FeedsUpdateTask' not in locals(): from anchore_engine.services.policy_engine.engine.tasks import FeedsUpdateTask if 'get_selected_feeds_to_sync' not in locals(): from anchore_engine.services.policy_engine.engine.feeds import get_selected_feeds_to_sync handler_success = False timer = time.time() logger.info("FIRING: feed syncer") try: feeds = get_selected_feeds_to_sync(localconfig.get_config()) logger.info('Syncing configured feeds: {}'.format(feeds)) result = FeedsUpdateTask.run_feeds_update(json_obj=msg.get('data')) if result is not None: handler_success = True else: logger.warn('Feed sync task marked as disabled, so skipping') except ValueError as e: logger.warn('Received msg of wrong type') except Exception as err: logger.warn("failure in feed sync handler - exception: " + str(err)) if handler_success: anchore_engine.subsys.metrics.summary_observe('anchore_monitor_runtime_seconds', time.time() - timer, function='do_feed_sync', status="success") else: anchore_engine.subsys.metrics.summary_observe('anchore_monitor_runtime_seconds', time.time() - timer, function='do_feed_sync', status="fail")
def sync_feeds(sync=True, force_flush=False): """ POST /feeds?sync=True&force_flush=True :param sync: Boolean. If true, do a sync. If false, don't sync. :param force_flush: Boolean. If true, remove all previous data and replace with data from upstream source :return: """ result = [] if sync: try: result = FeedsUpdateTask.run_feeds_update(force_flush=force_flush) except LeaseAcquisitionFailedError as e: log.exception( 'Could not acquire lock on feed sync, likely another sync already in progress' ) return make_response_error( 'Failed to execute feed sync', in_httpcode=409, detail= 'Could not acquire lock on feed sync, likely another sync already in progress' ), 409 except Exception as e: log.exception('Error executing feed update task') return make_response_error( 'Failed to execute feed sync due to an internal error', in_httpcode=500), 500 return jsonify(['{}/{}'.format(x[0], x[1]) for x in result]), 200
def do_feed_sync(msg): if "FeedsUpdateTask" not in locals(): from anchore_engine.services.policy_engine.engine.tasks import FeedsUpdateTask handler_success = False timer = time.time() logger.info("FIRING: feed syncer") try: result = FeedsUpdateTask.run_feeds_update(json_obj=msg.get("data")) if result is not None: handler_success = True else: logger.warn("Feed sync task marked as disabled, so skipping") except ValueError as e: logger.warn("Received msg of wrong type") except Exception as err: logger.warn("failure in feed sync handler - exception: " + str(err)) if handler_success: anchore_engine.subsys.metrics.summary_observe( "anchore_monitor_runtime_seconds", time.time() - timer, function="do_feed_sync", status="success", ) else: anchore_engine.subsys.metrics.summary_observe( "anchore_monitor_runtime_seconds", time.time() - timer, function="do_feed_sync", status="fail", )
def sync_feeds(sync=True, force_flush=False): """ POST /feeds?sync=True&force_flush=True :param sync: Boolean. If true, do a sync. If false, don't sync. :param force_flush: Boolean. If true, remove all previous data and replace with data from upstream source :return: """ result = [] if sync: try: result = FeedsUpdateTask.run_feeds_update(force_flush=force_flush) except HTTPException: raise except Exception as e: log.exception('Error executing feed update task') abort( Response(status=500, response=json.dumps({ 'error': 'feed sync failure', 'details': 'Failure syncing feed: {}'.format( e.message if hasattr(e, 'message') else e) }), mimetype='application/json')) return jsonify(['{}/{}'.format(x[0], x[1]) for x in result]), 200
def cls_fully_loaded_test_env(cls_test_data_env2, request): """ Load the test env, including a feed sync and image analysis. Places the env in the class's test_env and test_image vars :param cls_test_data_env: :param request: :return: """ _init_distro_mappings() from anchore_engine.services.policy_engine.engine.tasks import FeedsUpdateTask t = FeedsUpdateTask(feeds_to_sync=[ 'vulnerabilities', 'packages', 'nvdv2', 'nvd', 'vulndb' ]) t.execute() for image_id, path in request.cls.test_env.image_exports(): logger.info(('Ensuring loaded: image id: {} from file: {}'.format( image_id, path))) t = ImageLoadTask(image_id=image_id, user_id='0', url='file://' + path) t.execute() db = get_thread_scoped_session() test_image = db.query(Image).get((request.cls.test_env.get_images_named( request.cls.__default_image__)[0][0], '0')) request.cls.test_image = test_image db.rollback()
def test_feed_task(test_data_env, anchore_db): logger.info('Running a feed sync with config: {}'.format(localconfig.get_config())) t = FeedsUpdateTask() t.execute() with session_scope() as db: feeds = db.query(FeedMetadata).all() logger.info('{}'.format(feeds)) assert len(feeds) == 4 # packages, vulns, snyk, nvd feed_groups = db.query(FeedGroupMetadata).all() # See the test/data/test_data_env/feeds dir for the proper count here logger.info('{}'.format(feed_groups)) assert len(feed_groups) == 11 # ToDo: set the source data to a small number and make this an exact count assert db.query(Vulnerability).count() > 0 assert db.query(NpmMetadata).count() > 0 assert db.query(GemMetadata).count() > 0 assert db.query(NvdMetadata).count() == 0
db = get_session() try: db.add(test_image) db.add(test_package) db.commit() except sqlalchemy.exc.IntegrityError as e: db.rollback() except Exception as e: log.exception('Unexpected failure') raise db = get_session() try: db.add(test_cve) FeedsUpdateTask.process_updated_vulnerability(db, test_cve) db.commit() except: log.exception('Failed!') db.rollback() finally: db = get_session() i = db.query(Image).get((test_img_id, test_user_id)) print(('Vulns: {}'.format(i.vulnerabilities()))) db.commit() test_cve2 = Vulnerability(id='CVE123', namespace_name='centos:7') test_cve2.severity = 'Medium' test_cve2.description = 'some test cve' test_cve2.cvss2_score = '1.0' test_cve2.metadata_json = {}
def testFeedLoader(self): t = FeedsUpdateTask() t.execute()
def test_cve_updates(test_data_env): test_env = test_data_env test_env.init_feeds() test_user_id = 'test1' test_img_id = 'img1' test_image = Image(user_id=test_user_id, id=test_img_id, distro_name='centos', distro_version='7') test_image.familytree_json = [test_img_id] test_image.layers_json = [test_img_id] test_image.layer_info_json = ['somelayer_here'] test_image.like_distro = 'centos' test_image.state = 'analyzed' test_image.digest = 'digest1' test_image.anchore_type = 'undefined' test_image.dockerfile_mode = 'Guessed' test_image.docker_history_json = ['line1', 'line2'] test_image.docker_data_json = {'Config': {}, 'ContainerConfig': {}} test_image.dockerfile_contents = 'FROM BLAH' test_package = ImagePackage(image_user_id=test_user_id, image_id=test_img_id, name='testpackage', version='1.0', pkg_type='RPM') test_package.src_pkg = 'testpackage' test_package.distro_name = 'centos' test_package.distro_version = '7' test_package.like_distro = 'centos' test_package.license = 'apache2' test_package.fullversion = '1.0' test_package.normalized_src_pkg = '1.0' test_package.release = '' test_package.size = 1000 test_package.origin = 'upstream' test_package.arch = 'x86_64' test_package.image = test_image test_cve = Vulnerability(id='CVE123', namespace_name='centos:7') test_cve.severity = 'High' test_cve.description = 'some test cve' test_cve.cvss2_score = '1.0' test_cve.metadata_json = {} test_cve.cvss2_vectors = '' test_cve.link = 'http://mitre.com/cve123' test_fixedin = FixedArtifact(vulnerability_id=test_cve.id) test_fixedin.name = 'testpackage' test_fixedin.version = '1.1' test_fixedin.version_format = 'rpm' test_fixedin.epochless_version = '1.1' test_fixedin.include_later_versions = True test_fixedin.parent = test_cve test_cve.fixed_in = [test_fixedin] test_vulnin = VulnerableArtifact(vulnerability_id=test_cve.id) test_vulnin.name = 'testpackage' test_vulnin.version = '0.9' test_vulnin.epochless_version = '0.9' test_vulnin.namespace_name = 'centos:7' test_vulnin.version_format = 'rpm' test_vulnin.include_previous_versions = False test_vulnin.parent = test_cve test_cve.vulnerable_in = [test_vulnin] db = get_session() try: db.add(test_image) db.add(test_package) db.commit() except sqlalchemy.exc.IntegrityError as e: db.rollback() except Exception as e: logger.exception('Unexpected failure') raise db = get_session() try: db.add(test_cve) FeedsUpdateTask.process_updated_vulnerability(db, test_cve) db.commit() except: logger.exception('Failed!') db.rollback() finally: db = get_session() i = db.query(Image).get((test_img_id, test_user_id)) print(('Vulns: {}'.format(i.vulnerabilities()))) db.commit() test_cve2 = Vulnerability(id='CVE123', namespace_name='centos:7') test_cve2.severity = 'Medium' test_cve2.description = 'some test cve' test_cve2.cvss2_score = '1.0' test_cve2.metadata_json = {} test_cve2.cvss2_vectors = '' test_cve2.link = 'http://mitre.com/cve123' fix2 = FixedArtifact(name='pkg2', version='1.2', epochless_version='1.2') fix2.namespace_name = 'centos:7' fix2.vulnerability_id = test_cve2.id test_cve2.fixed_in = [fix2] db = get_session() try: t2 = db.merge(test_cve2) db.add(t2) FeedsUpdateTask.process_updated_vulnerability(db, t2) db.commit() except: logger.exception('Failed!') db.rollback() finally: db = get_session() i = db.query(Image).get((test_img_id, test_user_id)) print(('Vulns: {}'.format(i.vulnerabilities()))) db.commit()