def test_expected_feed_sync_get_feeds( self, expected_feed, expected_group, expected_count, expected_content, initial_feed_sync_resp, is_legacy_test, ): """ Tests that the list feeds endpoint returns expected content after a feed sync has been completed Does this using parameterized list of expected feeds, groups, and the expected counts of that group """ # sync feeds and verify that the feed was a success initial_feed_sync_resp = initial_feed_sync_resp assert initial_feed_sync_resp == http_utils.APIResponse(200) # call get all feeds feeds_get_resp = policy_engine_api.feeds.get_feeds(True) # assert that expected feed is present in found list and enabled actual_feed = self._find_by_attr("name", feeds_get_resp.body, expected_feed) assert not isinstance(actual_feed, type(None)) assert actual_feed["enabled"] is True # Verify that the expected group is present and enabled actual_group = self._find_by_attr("name", actual_feed["groups"], expected_group) assert not isinstance(actual_group, type(None)) assert actual_group["enabled"] assert actual_group["record_count"] == expected_count
def test_get_vulnerabilities_schema( self, image_digest, image_digest_id_map, ingress_image, schema_validator ): # ingress image ingress_image(image_digest) # now that image is ingressed, can query vulnerabilities and assert response is 200 image_id = image_digest_id_map[image_digest] images_vuln_resp: http_utils.APIResponse = ( policy_engine_api.users.get_image_vulnerabilities(image_id) ) assert images_vuln_resp == http_utils.APIResponse(200) # check that response schema matches expected format vulnerability_schema_validator = schema_validator( "vulnerability_report.schema.json" ) is_valid: bool = vulnerability_schema_validator.is_valid(images_vuln_resp.body) assert is_valid, "\n".join( [ str(e) for e in vulnerability_schema_validator.iter_errors( images_vuln_resp.body ) ] )
def test_get_raw_object(self, expected_content, bucket, archive_id, filename): content = expected_content(filename) object_response = objects.get_raw_object(bucket, archive_id) assert object_response == http_utils.APIResponse(200) assert object_response.body == content
def test_expected_feed_sync_post( self, expected_feed, expected_group, expected_count, expected_content, initial_feed_sync_resp, is_legacy_test, ): """ Tests that the post endpoint to trigger a feed sync returns expected content Does this using parameterized list of expected feeds, groups, and the expected counts of that group In addition to asserting the feed, gorup, and count is accurate, it also verifies each were a "success" Uses the initial sync fixture for the resp """ assert initial_feed_sync_resp == http_utils.APIResponse(200) actual_feed = self._find_by_attr("feed", initial_feed_sync_resp.body, expected_feed) assert actual_feed is not None assert actual_feed["status"] == "success" actual_group = self._find_by_attr("group", actual_feed["groups"], expected_group) assert not isinstance(actual_group, type(None)) assert actual_group["status"] == "success" assert actual_group["updated_record_count"] == expected_count assert actual_group["total_time_seconds"] > 0
def test_query_vulnerabilities(self, query, expected_content): vulnerabilities_resp = ( policy_engine_api.query_vulnerabilities.get_vulnerabilities( query.id, **asdict(query.query_metadata))) assert vulnerabilities_resp == http_utils.APIResponse(200) expected = expected_content(query.expected_output_file) assert vulnerabilities_resp.body == expected
def test_image_load_content(self, image_digest, ingress_image, expected_content): # ingress image and check that response is 200 image_load_resp: http_utils.APIResponse = ingress_image(image_digest) assert image_load_resp == http_utils.APIResponse(200) # check that there are no errors in response assert len(image_load_resp.body["problems"]) == 0, image_load_resp.body[ "problems" ] assert image_load_resp.body["status"] == "loaded"
def test_post_raw_object(self, expected_content, bucket, archive_id, filename): content = expected_content(filename) create_doc_resp = objects.create_raw_object(bucket, archive_id, content) # content, create_doc_resp = post_raw_document(bucket, archive_id, filename) assert create_doc_resp == http_utils.APIResponse(200) resp_bucket, resp_archive = create_doc_resp.body.split("/")[-2::1] assert resp_bucket == bucket assert resp_archive == archive_id
def test_sync_timestamps_updated(self, initial_feed_sync_resp): """ Verifies that when a feed sync is triggered, the last sync and last updated timestamps are all updated Should not sync any new data that is not already present from initial feed sync so this testing the case in which nothing changes other than the time stamps """ first_feeds_get_resp = policy_engine_api.feeds.get_feeds(True) assert first_feeds_get_resp == http_utils.APIResponse(200) # From the first sync, build a has where key is feed name and value contains timestamps of feed and groups first_feed_sync = {} for feed in first_feeds_get_resp.body: group_timestamps = {} for group in feed["groups"]: group_timestamps[group["name"]] = group["last_sync"] first_feed_sync[feed["name"]] = { "last_full_sync": feed["last_full_sync"], "group_timestamps": group_timestamps, } # Sync again and call the get endpoint second_feeds_post_resp = policy_engine_api.feeds.feeds_sync() assert second_feeds_post_resp == http_utils.APIResponse(200) second_feeds_get_resp = policy_engine_api.feeds.get_feeds(True) assert second_feeds_get_resp == http_utils.APIResponse(200) # verify length of feeds is the same between first and second sync assert len(second_feeds_get_resp.body) == len( first_feeds_get_resp.body) # loop over second feed sync and verify the timestamps are all greater than the first for second_feed in second_feeds_get_resp.body: first_feed = first_feed_sync[second_feed["name"]] assert first_feed is not None assert first_feed["last_full_sync"] < second_feed["last_full_sync"] for second_group in second_feed["groups"]: assert (first_feed["group_timestamps"][second_group["name"]] < second_group["last_sync"])
def test_query_vulnerabilities(self, query, expected_content): vulnerabilities_resp = ( policy_engine_api.query_vulnerabilities.get_vulnerabilities( query.id, **asdict(query.query_metadata))) assert vulnerabilities_resp == http_utils.APIResponse(200) expected = expected_content(query.expected_output_file) if expected: expected.sort(key=lambda x: (x.get("id"), x.get("namespace"))) results = vulnerabilities_resp.body if results: results.sort(key=lambda x: (x.get("id"), x.get("namespace"))) assert results == expected
def test_no_grypedb_feed_if_legacy(self, initial_feed_sync_resp): """ If is legacy test, verify that grypedb feed is not returned when getting list feeds """ assert initial_feed_sync_resp == http_utils.APIResponse(200) grypedb_feed = self._find_by_attr("feed", initial_feed_sync_resp.body, GrypeDBFeed.__feed_name__) assert grypedb_feed is None feeds_get_resp = policy_engine_api.feeds.get_feeds(True) grypedb_feed = self._find_by_attr("name", feeds_get_resp.body, GrypeDBFeed.__feed_name__) assert grypedb_feed is None
def test_get_vulnerabilities_content( self, image_digest, image_digest_id_map, ingress_image, expected_content ): # ingress the image and get the image id ingress_image(image_digest) image_id = image_digest_id_map[image_digest] # get the vulnerabilities for the image images_vuln_resp: http_utils.APIResponse = ( policy_engine_api.users.get_image_vulnerabilities(image_id) ) assert images_vuln_resp == http_utils.APIResponse(200) actual_report = images_vuln_resp.body # load the expected results which is just a list of vulnerability matches expected_results = expected_content(image_digest) # impose some order expected_results.sort( key=lambda x: ( x["vulnerability"]["vulnerability_id"], x["vulnerability"]["feed_group"], x["artifact"]["name"], x["artifact"]["pkg_path"], ) ) # compare only the vulnerability matches actual_results = actual_report["results"] # impose same order actual_results.sort( key=lambda x: ( x["vulnerability"]["vulnerability_id"], x["vulnerability"]["feed_group"], x["artifact"]["name"], x["artifact"]["pkg_path"], ) ) # check number of results assert len(expected_results) == len(actual_results) # check the actual content for index in range(len(expected_results)): # compare only the vulnerability and artifacts, skip match as it contains a dynamic date assert ( expected_results[index]["vulnerability"] == actual_results[index]["vulnerability"] ) assert ( expected_results[index]["artifact"] == actual_results[index]["artifact"] )
def test_image_load_schema(self, image_digest, ingress_image, schema_validator): # ingress image and check that response is 200 image_load_resp: http_utils.APIResponse = ingress_image(image_digest) assert image_load_resp == http_utils.APIResponse(200) # check that response schema matches expected format ingress_schema_validator = schema_validator("ingress_image.schema.json") is_valid: bool = ingress_schema_validator.is_valid(image_load_resp.body) assert is_valid, "\n".join( [str(e) for e in ingress_schema_validator.iter_errors(image_load_resp.body)] ) # check that there are no errors in response assert len(image_load_resp.body["problems"]) == 0, image_load_resp.body[ "problems" ]
def test_expected_feed_sync(self, expected_feed, expected_group, expected_content, sync_feeds): # sync feeds and verify that the feed was a success feed_sync_resp = sync_feeds assert feed_sync_resp == http_utils.APIResponse(200) assert (self._find_by_attr( "feed", feed_sync_resp.body, expected_feed["name"])["status"] == "success") # call get all feeds feeds_get_resp = policy_engine_api.feeds.get_feeds(True) # assert that expected feed is present in found list and enabled actual_feed = self._find_by_attr("name", feeds_get_resp.body, expected_feed["name"]) assert not isinstance(actual_feed, type(None)) assert actual_feed["enabled"] # Verify that the expected group is present and enabled actual_group = self._find_by_attr("name", actual_feed["groups"], expected_group["name"]) assert not isinstance(actual_group, type(None)) assert actual_group["enabled"] # get expected cves and query to verify the count in the get feeds response expected_vulns = expected_content( os.path.join( FEEDS_DATA_PATH_PREFIX, expected_feed["name"], expected_group["name"], ))["data"] assert actual_group["record_count"] == len(expected_vulns) # using expected cves, query the vulnerabilites endpoint to verify they are in the system vuln_ids = self._get_vuln_ids(expected_vulns) vuln_response = policy_engine_api.query_vulnerabilities.get_vulnerabilities( vuln_ids, namespace=expected_group["name"]) assert len(vuln_response.body) == len(expected_vulns) assert len(set([x["id"] for x in vuln_response.body])) == len(expected_vulns)
def test_query_vulnerabilities(self, query, expected_content, is_legacy_test): vulnerabilities_resp = ( policy_engine_api.query_vulnerabilities.get_vulnerabilities( query.id, **asdict(query.query_metadata) ) ) assert vulnerabilities_resp == http_utils.APIResponse(200) for vuln in vulnerabilities_resp.body: assert vuln["id"] in query.id if query.query_metadata.namespace: assert vuln["namespace"] == query.query_metadata.namespace if query.query_metadata.affected_package: # build dict where key is name and value is array of affected versions package_versions = defaultdict(lambda: []) for package in vuln["affected_packages"]: package_versions[package["name"]].append(package["version"]) assert query.query_metadata.affected_package in package_versions.keys() if query.query_metadata.affected_package_version: assert ( query.query_metadata.affected_package_version in package_versions[query.query_metadata.affected_package] or "*" in package_versions[query.query_metadata.affected_package] ) if is_legacy_test: expected = expected_content(query.expected_output_file) if expected: expected.sort(key=lambda x: (x.get("id"), x.get("namespace"))) results = vulnerabilities_resp.body if results: results.sort(key=lambda x: (x.get("id"), x.get("namespace"))) assert results == expected
def test_get_vulnerabilities_content(self, image_digest, image_digest_id_map, ingress_image, expected_content): # ingress the image and get the image id ingress_image(image_digest) image_id = image_digest_id_map[image_digest] # get the vulnerabilities for the image images_vuln_resp: http_utils.APIResponse = ( policy_engine_api.users.get_image_vulnerabilities(image_id)) assert images_vuln_resp == http_utils.APIResponse(200) # get the expected results expected_content = expected_content(image_digest) # check that the row count in the legacy response matches actual_legacy_rowcount = images_vuln_resp.body["legacy_report"][ "multi"]["result"]["rowcount"] expected_legacy_rowcount = expected_content["legacy_report"]["multi"][ "result"]["rowcount"] assert actual_legacy_rowcount == expected_legacy_rowcount # check that the legacy rows match (these are OS pkg vulns usually) expected_legacy_rows = set([ tuple(x) for x in expected_content["legacy_report"]["multi"] ["result"]["rows"] ]) actual_legacy_rows = set([ tuple(x) for x in images_vuln_resp.body["legacy_report"]["multi"] ["result"]["rows"] ]) assert ( actual_legacy_rows == expected_legacy_rows ), f"Expected vulnerabilities missing:\n{expected_legacy_rows-actual_legacy_rows}" # check that the cpe reports match (non-os pkg vulns) expected_cpes = set( [json.dumps(x) for x in expected_content["cpe_report"]]) actual_cpes = set( [json.dumps(x) for x in images_vuln_resp.body["cpe_report"]]) assert ( actual_cpes == expected_cpes ), f"Expected vulnerabilities missing:\n{expected_cpes-actual_cpes}"
def test_image_load_content(self, image_digest, ingress_image, expected_content): # ingress image and check that response is 200 image_load_resp: http_utils.APIResponse = ingress_image(image_digest) assert image_load_resp == http_utils.APIResponse(200) # check that there are no errors in response assert len(image_load_resp.body["problems"] ) == 0, image_load_resp.body["problems"] # get the expected results expected_content = expected_content(image_digest) # check that the row count in the legacy response matches actual_legacy_rowcount = image_load_resp.body["vulnerability_report"][ "legacy_report"]["multi"]["result"]["rowcount"] expected_legacy_rowcount = expected_content["legacy_report"]["multi"][ "result"]["rowcount"] assert actual_legacy_rowcount == expected_legacy_rowcount # check that the legacy rows match (these are OS pkg vulns usually) expected_legacy_rows = set([ tuple(x) for x in expected_content["legacy_report"]["multi"] ["result"]["rows"] ]) actual_legacy_rows = set([ tuple(x) for x in image_load_resp.body["vulnerability_report"] ["legacy_report"]["multi"]["result"]["rows"] ]) assert ( actual_legacy_rows == expected_legacy_rows ), f"Expected vulnerabilities missing:\n{expected_legacy_rows - actual_legacy_rows}" # check that the cpe reports match (non-os pkg vulns) expected_cpes = set( [json.dumps(x) for x in expected_content["cpe_report"]]) actual_cpes = set([ json.dumps(x) for x in image_load_resp.body["vulnerability_report"]["cpe_report"] ]) assert ( actual_cpes == expected_cpes ), f"Expected vulnerabilities missing:\n{expected_cpes - actual_cpes}"