def test_analyze_layer_missing_storage(self): """ Tests trying to analyze a layer with missing storage. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) # Delete the storage for the layer. path = model.storage.get_layer_path(layer.storage) locations = app.config["DISTRIBUTED_STORAGE_PREFERENCE"] storage.remove(locations, path) storage.remove(locations, "all_files_exist") with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, 1)
def test_layer_gc(self): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) # Delete the prod tag so that only the `latest` tag remains. model.tag.delete_tag(ADMIN_ACCESS_USER, SIMPLE_REPO, "prod") with fake_security_scanner() as security_scanner: # Analyze the layer. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) self.assertTrue( security_scanner.has_layer(security_scanner.layer_id(layer))) namespace_user = model.user.get_user(ADMIN_ACCESS_USER) model.user.change_user_tag_expiration(namespace_user, 0) # Delete the tag in the repository and GC. model.tag.delete_tag(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") time.sleep(1) repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) model.gc.garbage_collect_repo(repo) # Ensure that the security scanner no longer has the image. self.assertFalse( security_scanner.has_layer(security_scanner.layer_id(layer)))
def test_analyze_layer_unsupported_parent(self): """ Tests that attempting to analyze a layer whose parent is unanalyzable, results in the layer being marked as analyzed, but failed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail. security_scanner.set_fail_layer_id( security_scanner.layer_id(layer.parent)) # Attempt to the layer and its parents. This should mark the layer itself as unanalyzable. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, 1)
def test_notification_new_layers_not_vulnerable(self): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest', include_storage=True) layer_id = '%s.%s' % (layer.docker_image_id, layer.storage.uuid) # Add a repo event for the layer. repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) model.notification.create_repo_notification(repo, 'vulnerability_found', 'quay_notification', {}, {'level': 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) # Fire off the notification processing. with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, 'latest') self.assertAnalyzed(layer, security_scanner, True, 1) # Add a notification for the layer. notification_data = security_scanner.add_notification([layer_id], [], {}, {}) # Process the notification. self.assertTrue(process_notification_data(notification_data)) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get())
def test_analyze_layer_unexpected_status(self): """ Tests that a response from a scanner with an unexpected status code fails correctly. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail with an error. security_scanner.set_unexpected_status_layer_id( security_scanner.layer_id(layer.parent)) # Try to the layer and its parents, but with one request causing an error. analyzer = LayerAnalyzer(app.config, self.api) with self.assertRaises(APIRequestFailure): analyzer.analyze_recursively(layer) # Make sure it isn't analyzed. layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, -1)
def test_notification_no_new_layers(self): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) # Add a repo event for the layer. repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) model.notification.create_repo_notification(repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) # Fire off the notification processing. with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) # Add a notification for the layer. notification_data = security_scanner.add_notification([], [], {}, {}) # Process the notification. self.assertTrue(process_notification_data(notification_data)) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get())
def test_analyze_layer_error(self): """ Tests that failing to analyze a layer (because it 400s) marks it as analyzed but failed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail with an error. security_scanner.set_error_layer_id( security_scanner.layer_id(layer.parent)) # Try to the layer and its parents, but with one request causing an error. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) # Make sure it is marked as analyzed, but in a failed state. layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, 1)
def __init__(self, app, instance_keys, storage): self.app = app self._legacy_secscan_api = None validator = V2SecurityConfigValidator( app.config.get("FEATURE_SECURITY_SCANNER", False), app.config.get("SECURITY_SCANNER_ENDPOINT"), ) if not validator.valid(): msg = "Failed to validate security scanner V2 configuration" logger.warning(msg) raise InvalidConfigurationException(msg) url_scheme_and_hostname = URLSchemeAndHostname( app.config["PREFERRED_URL_SCHEME"], app.config["SERVER_HOSTNAME"]) self._legacy_secscan_api = SecurityScannerAPI( app.config, storage, app.config["SERVER_HOSTNAME"], app.config["HTTPCLIENT"], uri_creator=get_blob_download_uri_getter( app.test_request_context("/"), url_scheme_and_hostname), instance_keys=instance_keys, ) # NOTE: This import is in here because otherwise this class would depend upon app. # Its not great, but as this is intended to be legacy until its removed, its okay. from util.secscan.analyzer import LayerAnalyzer self._target_version = app.config.get( "SECURITY_SCANNER_ENGINE_VERSION_TARGET", 3) self._analyzer = LayerAnalyzer(app.config, self._legacy_secscan_api)
def test_analyze_layer_success(self): """ Tests that analyzing a layer successfully marks it as analyzed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1)
def test_analyze_layer_nodirectdownload_success(self): """ Tests analyzing a layer when direct download is disabled. """ # Disable direct download in fake storage. storage.put_content(["local_us"], "supports_direct_download", "false") try: app.register_blueprint(v2_bp, url_prefix="/v2") except: # Already registered. pass layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) # Ensure that the download is a registry+JWT download. uri, auth_header = self.api._get_image_url_and_auth(layer) self.assertIsNotNone(uri) self.assertIsNotNone(auth_header) # Ensure the download doesn't work without the header. rv = self.app.head(uri) self.assertEquals(rv.status_code, 401) # Ensure the download works with the header. Note we use a HEAD here, as GET causes DB # access which messes with the test runner's rollback. rv = self.app.head(uri, headers=[("authorization", auth_header)]) self.assertEquals(rv.status_code, 200) # Ensure the code works when called via analyze. with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1)
def test_analyze_layer_failure(self): """ Tests that failing to analyze a layer (because it 422s) marks it as analyzed but failed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: security_scanner.set_fail_layer_id( security_scanner.layer_id(layer)) analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, 1)
def test_analyze_layer_internal_error(self): """ Tests that failing to analyze a layer (because it 500s) marks it as not analyzed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: security_scanner.set_internal_error_layer_id( security_scanner.layer_id(layer)) analyzer = LayerAnalyzer(app.config, self.api) with self.assertRaises(APIRequestFailure): analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, -1)
def test_analyze_layer_missing_parent_handled(self): """ Tests that a missing parent causes an automatic reanalysis, which succeeds. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Analyze the layer and its parents. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) # Make sure it was analyzed. layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) # Mark the layer as not yet scanned. layer.security_indexed_engine = IMAGE_NOT_SCANNED_ENGINE_VERSION layer.security_indexed = False layer.save() # Remove the layer's parent entirely from the security scanner. security_scanner.remove_layer( security_scanner.layer_id(layer.parent)) # Analyze again, which should properly re-analyze the missing parent and this layer. analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1)
def __init__(self): super(SecurityWorker, self).__init__() validator = SecurityConfigValidator( app.config.get("FEATURE_SECURITY_SCANNER", False), app.config.get("SECURITY_SCANNER_ENDPOINT"), ) if not validator.valid(): logger.warning("Failed to validate security scan configuration") return self._target_version = app.config.get("SECURITY_SCANNER_ENGINE_VERSION_TARGET", 3) self._analyzer = LayerAnalyzer(app.config, secscan_api) self._next_token = None interval = app.config.get("SECURITY_SCANNER_INDEXING_INTERVAL", DEFAULT_INDEXING_INTERVAL) self.add_operation(self._index_images, interval)
def test_analyze_layer_invalid_parent(self): """ Tests that trying to reanalyze a parent that is invalid causes the layer to be marked as analyzed, but failed. """ layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Analyze the layer and its parents. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) # Make sure it was analyzed. layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) # Mark the layer as not yet scanned. layer.security_indexed_engine = IMAGE_NOT_SCANNED_ENGINE_VERSION layer.security_indexed = False layer.save() # Remove the layer's parent entirely from the security scanner. security_scanner.remove_layer( security_scanner.layer_id(layer.parent)) # Make is so trying to analyze the parent will fail. security_scanner.set_error_layer_id( security_scanner.layer_id(layer.parent)) # Try to analyze again, which should try to reindex the parent and fail. analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, False, 1)
def test_notification_new_layers(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) layer_id = "%s.%s" % (layer.docker_image_id, layer.storage.uuid) # Add a repo event for the layer. repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) model.notification.create_repo_notification(repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) # Fire off the notification processing. with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) vuln_info = { "Name": "CVE-TEST", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Low", "FixedIn": { "Version": "9.23-5" }, } security_scanner.set_vulns(layer_id, [vuln_info]) # Add a notification for the layer. notification_data = security_scanner.add_notification([], [layer_id], vuln_info, vuln_info) # Process the notification. self.assertTrue( process_notification_data(self.api, notification_data)) # Ensure an event was written for the tag. time.sleep(1) queue_item = notification_queue.get() self.assertIsNotNone(queue_item) item_body = json.loads(queue_item.body) self.assertEquals(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) self.assertEquals("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) self.assertEquals( "Low", item_body["event_data"]["vulnerability"]["priority"]) self.assertTrue( item_body["event_data"]["vulnerability"]["has_fix"])
def assert_notification_worker_offset_pages(self, indexed=False): layer1 = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) layer2 = model.tag.get_tag_image(ADMIN_ACCESS_USER, COMPLEX_REPO, "prod", include_storage=True) # Add a repo events for the layers. simple_repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) complex_repo = model.repository.get_repository(ADMIN_ACCESS_USER, COMPLEX_REPO) model.notification.create_repo_notification(simple_repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) model.notification.create_repo_notification(complex_repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) with fake_security_scanner() as security_scanner: # Test with an unknown notification. worker = SecurityNotificationWorker(None) self.assertFalse( worker.perform_notification_work( {"Name": "unknownnotification"})) # Add some analyzed layers. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer1) analyzer.analyze_recursively(layer2) # Add a notification with pages of data. new_vuln_info = { "Name": "CVE-TEST", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Critical", "FixedIn": { "Version": "9.23-5" }, } security_scanner.set_vulns(security_scanner.layer_id(layer1), [new_vuln_info]) security_scanner.set_vulns(security_scanner.layer_id(layer2), [new_vuln_info]) # Define offsetting sets of layer IDs, to test cross-pagination support. In this test, we # will only serve 2 layer IDs per page: the first page will serve both of the 'New' layer IDs, # but since the first 2 'Old' layer IDs are "earlier" than the shared ID of # `devtable/simple:latest`, they won't get served in the 'New' list until the *second* page. # The notification handling system should correctly not notify for this layer, even though it # is marked 'New' on page 1 and marked 'Old' on page 2. Clair will served these # IDs sorted in the same manner. idx_old_layer_ids = [ { "LayerName": "old1", "Index": 1 }, { "LayerName": "old2", "Index": 2 }, { "LayerName": security_scanner.layer_id(layer1), "Index": 3 }, ] idx_new_layer_ids = [ { "LayerName": security_scanner.layer_id(layer1), "Index": 3 }, { "LayerName": security_scanner.layer_id(layer2), "Index": 4 }, ] old_layer_ids = [t["LayerName"] for t in idx_old_layer_ids] new_layer_ids = [t["LayerName"] for t in idx_new_layer_ids] if not indexed: idx_old_layer_ids = None idx_new_layer_ids = None notification_data = security_scanner.add_notification( old_layer_ids, new_layer_ids, None, new_vuln_info, max_per_page=2, indexed_old_layer_ids=idx_old_layer_ids, indexed_new_layer_ids=idx_new_layer_ids, ) # Test with a known notification with pages. data = { "Name": notification_data["Name"], } worker = SecurityNotificationWorker(None) self.assertTrue( worker.perform_notification_work(data, layer_limit=2)) # Make sure all pages were processed by ensuring we have only one notification. If the second # page was not processed, then the `Old` entry for layer1 will not be found, and we'd get two # notifications. time.sleep(1) self.assertIsNotNone(notification_queue.get()) self.assertIsNone(notification_queue.get())
def test_notification_worker(self): layer1 = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) layer2 = model.tag.get_tag_image(ADMIN_ACCESS_USER, COMPLEX_REPO, "prod", include_storage=True) # Add a repo events for the layers. simple_repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) complex_repo = model.repository.get_repository(ADMIN_ACCESS_USER, COMPLEX_REPO) model.notification.create_repo_notification(simple_repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) model.notification.create_repo_notification(complex_repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) with fake_security_scanner() as security_scanner: # Test with an unknown notification. worker = SecurityNotificationWorker(None) self.assertFalse( worker.perform_notification_work( {"Name": "unknownnotification"})) # Add some analyzed layers. analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer1) analyzer.analyze_recursively(layer2) # Add a notification with pages of data. new_vuln_info = { "Name": "CVE-TEST", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Critical", "FixedIn": { "Version": "9.23-5" }, } security_scanner.set_vulns(security_scanner.layer_id(layer1), [new_vuln_info]) security_scanner.set_vulns(security_scanner.layer_id(layer2), [new_vuln_info]) layer_ids = [ security_scanner.layer_id(layer1), security_scanner.layer_id(layer2) ] notification_data = security_scanner.add_notification( [], layer_ids, None, new_vuln_info) # Test with a known notification with pages. data = { "Name": notification_data["Name"], } worker = SecurityNotificationWorker(None) self.assertTrue( worker.perform_notification_work(data, layer_limit=2)) # Make sure all pages were processed by ensuring we have two notifications. time.sleep(1) self.assertIsNotNone(notification_queue.get()) self.assertIsNotNone(notification_queue.get())
def test_notification_no_new_layers_increased_severity(self): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) layer_id = "%s.%s" % (layer.docker_image_id, layer.storage.uuid) # Add a repo event for the layer. repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) notification = model.notification.create_repo_notification( repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Ensure that there are no event queue items for the layer. self.assertIsNone(notification_queue.get()) # Fire off the notification processing. with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) old_vuln_info = { "Name": "CVE-TEST", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Low", } new_vuln_info = { "Name": "CVE-TEST", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Critical", "FixedIn": { "Version": "9.23-5" }, } security_scanner.set_vulns(layer_id, [new_vuln_info]) # Add a notification for the layer. notification_data = security_scanner.add_notification( [layer_id], [layer_id], old_vuln_info, new_vuln_info) # Process the notification. self.assertTrue(process_notification_data(notification_data)) # Ensure an event was written for the tag. time.sleep(1) queue_item = notification_queue.get() self.assertIsNotNone(queue_item) item_body = json.loads(queue_item.body) self.assertEquals(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) self.assertEquals("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) self.assertEquals( "Critical", item_body["event_data"]["vulnerability"]["priority"]) self.assertTrue( item_body["event_data"]["vulnerability"]["has_fix"]) # Verify that an event would be raised. event_data = item_body["event_data"] notification = self.notification_tuple(notification) self.assertTrue(VulnerabilityFoundEvent().should_perform( event_data, notification)) # Create another notification with a matching level and verify it will be raised. notification = model.notification.create_repo_notification( repo, "vulnerability_found", "quay_notification", {}, {"level": 1}) notification = self.notification_tuple(notification) self.assertTrue(VulnerabilityFoundEvent().should_perform( event_data, notification)) # Create another notification with a higher level and verify it won't be raised. notification = model.notification.create_repo_notification( repo, "vulnerability_found", "quay_notification", {}, {"level": 0}) notification = self.notification_tuple(notification) self.assertFalse(VulnerabilityFoundEvent().should_perform( event_data, notification))
def assert_analyze_layer_notify(self, security_indexed_engine, security_indexed, expect_notification): layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) self.assertEquals(-1, layer.security_indexed_engine) # Ensure there are no existing events. self.assertIsNone(notification_queue.get()) # Add a repo event for the layer. repo = model.repository.get_repository(ADMIN_ACCESS_USER, SIMPLE_REPO) model.notification.create_repo_notification(repo, "vulnerability_found", "quay_notification", {}, {"level": 100}) # Update the layer's state before analyzing. layer.security_indexed_engine = security_indexed_engine layer.security_indexed = security_indexed layer.save() with fake_security_scanner() as security_scanner: security_scanner.set_vulns( security_scanner.layer_id(layer), [ { "Name": "CVE-2014-9471", "Namespace": "debian:8", "Description": "Some service", "Link": "https://security-tracker.debian.org/tracker/CVE-2014-9471", "Severity": "Low", "FixedBy": "9.23-5", }, { "Name": "CVE-2016-7530", "Namespace": "debian:8", "Description": "Some other service", "Link": "https://security-tracker.debian.org/tracker/CVE-2016-7530", "Severity": "Unknown", "FixedBy": "19.343-2", }, ], ) analyzer = LayerAnalyzer(app.config, self.api) analyzer.analyze_recursively(layer) layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertAnalyzed(layer, security_scanner, True, 1) # Ensure an event was written for the tag (if necessary). time.sleep(1) queue_item = notification_queue.get() if expect_notification: self.assertIsNotNone(queue_item) body = json.loads(queue_item.body) self.assertEquals(set(["latest", "prod"]), set(body["event_data"]["tags"])) self.assertEquals("CVE-2014-9471", body["event_data"]["vulnerability"]["id"]) self.assertEquals("Low", body["event_data"]["vulnerability"]["priority"]) self.assertTrue(body["event_data"]["vulnerability"]["has_fix"]) self.assertEquals("CVE-2014-9471", body["event_data"]["vulnerabilities"][0]["id"]) self.assertEquals(2, len(body["event_data"]["vulnerabilities"])) # Ensure we get the correct event message out as well. event = VulnerabilityFoundEvent() msg = "1 Low and 1 more vulnerabilities were detected in repository devtable/simple in 2 tags" self.assertEquals(msg, event.get_summary(body["event_data"], {})) self.assertEquals("info", event.get_level(body["event_data"], {})) else: self.assertIsNone(queue_item) # Ensure its security indexed engine was updated. updated_layer = model.tag.get_tag_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest") self.assertEquals(updated_layer.id, layer.id) self.assertTrue(updated_layer.security_indexed_engine > 0)
class V2SecurityScanner(SecurityScannerInterface): """ Implementation of the security scanner interface for Clair V2 API-compatible implementations. NOTE: This is a legacy implementation and is intended to be removed once everyone is moved to the more modern V4 API. (Yes, we skipped V3) """ def __init__(self, app, instance_keys, storage): self.app = app self._legacy_secscan_api = None validator = V2SecurityConfigValidator( app.config.get("FEATURE_SECURITY_SCANNER", False), app.config.get("SECURITY_SCANNER_ENDPOINT"), ) if not validator.valid(): msg = "Failed to validate security scanner V2 configuration" logger.warning(msg) raise InvalidConfigurationException(msg) url_scheme_and_hostname = URLSchemeAndHostname( app.config["PREFERRED_URL_SCHEME"], app.config["SERVER_HOSTNAME"]) self._legacy_secscan_api = SecurityScannerAPI( app.config, storage, app.config["SERVER_HOSTNAME"], app.config["HTTPCLIENT"], uri_creator=get_blob_download_uri_getter( app.test_request_context("/"), url_scheme_and_hostname), instance_keys=instance_keys, ) # NOTE: This import is in here because otherwise this class would depend upon app. # Its not great, but as this is intended to be legacy until its removed, its okay. from util.secscan.analyzer import LayerAnalyzer self._target_version = app.config.get( "SECURITY_SCANNER_ENGINE_VERSION_TARGET", 3) self._analyzer = LayerAnalyzer(app.config, self._legacy_secscan_api) @property def legacy_api_handler(self): """ Exposes the legacy security scan API for legacy workers that need it. """ return self._legacy_secscan_api def register_model_cleanup_callbacks(self, data_model_config): if self._legacy_secscan_api is not None: data_model_config.register_image_cleanup_callback( self._legacy_secscan_api.cleanup_layers) def load_security_information(self, manifest_or_legacy_image, include_vulnerabilities=False): status = registry_model.get_security_status(manifest_or_legacy_image) if status is None: return SecurityInformationLookupResult.with_status( ScanLookupStatus.UNKNOWN_MANIFEST_OR_IMAGE) if status == SecurityScanStatus.FAILED: return SecurityInformationLookupResult.with_status( ScanLookupStatus.FAILED_TO_INDEX) if status == SecurityScanStatus.UNSUPPORTED: return SecurityInformationLookupResult.with_status( ScanLookupStatus.UNSUPPORTED_FOR_INDEXING) if status == SecurityScanStatus.QUEUED: return SecurityInformationLookupResult.with_status( ScanLookupStatus.NOT_YET_INDEXED) assert status == SecurityScanStatus.SCANNED try: if include_vulnerabilities: data = self._legacy_secscan_api.get_layer_data( manifest_or_legacy_image, include_vulnerabilities=True) else: data = self._legacy_secscan_api.get_layer_data( manifest_or_legacy_image, include_features=True) except APIRequestFailure as arf: return SecurityInformationLookupResult.for_request_error(str(arf)) if data is None: # If no data was found but we reached this point, then it indicates we have incorrect security # status for the manifest or legacy image. Mark the manifest or legacy image as unindexed # so it automatically gets re-indexed. if self.app.config.get("REGISTRY_STATE", "normal") == "normal": registry_model.reset_security_status(manifest_or_legacy_image) return SecurityInformationLookupResult.with_status( ScanLookupStatus.NOT_YET_INDEXED) return SecurityInformationLookupResult.for_data( SecurityInformation.from_dict(data)) def _candidates_to_scan(self, start_token=None): target_version = self._target_version def batch_query(): return get_images_eligible_for_scan(target_version) # Find the minimum ID. min_id = None if start_token is not None: min_id = start_token.min_id else: min_id = self.app.config.get("SECURITY_SCANNER_INDEXING_MIN_ID") if min_id is None: min_id = get_min_id_for_sec_scan(target_version) # Get the ID of the last image we can analyze. Will be None if there are no images in the # database. max_id = get_max_id_for_sec_scan() if max_id is None: return (None, None) if min_id is None or min_id > max_id: return (None, None) # 4^log10(total) gives us a scalable batch size into the billions. batch_size = int(4**log10(max(10, max_id - min_id))) # TODO: Once we have a clean shared NamedTuple for Images, send that to the secscan analyzer # rather than the database Image itself. iterator = yield_random_entries( batch_query, get_image_pk_field(), batch_size, max_id, min_id, ) return (iterator, ScanToken(max_id + 1)) def perform_indexing(self, start_token=None): """ Performs indexing of the next set of unindexed manifests/images. If start_token is given, the indexing should resume from that point. Returns a new start index for the next iteration of indexing. The tokens returned and given are assumed to be opaque outside of this implementation and should not be relied upon by the caller to conform to any particular format. """ # NOTE: This import is in here because otherwise this class would depend upon app. # Its not great, but as this is intended to be legacy until its removed, its okay. from util.secscan.analyzer import PreemptedException iterator, next_token = self._candidates_to_scan(start_token) if iterator is None: logger.debug("Found no additional images to scan") return None with UseThenDisconnect(self.app.config): for candidate, abt, num_remaining in iterator: try: self._analyzer.analyze_recursively(candidate) except PreemptedException: logger.debug("Another worker pre-empted us for layer: %s", candidate.id) abt.set() except APIRequestFailure: logger.exception("Security scanner service unavailable") return unscanned_images.set(num_remaining) return next_token