def begin_import() -> APIResponse: logger.info("Creating import operation.") import_response = http_post(["imports", "images"], payload={}) yield import_response logger.info("Deleting import operation.") delete_response = http_del( ["imports", "images", import_response.body["uuid"]]) if delete_response.code == 200: logger.info("Deletion successful.") else: logger.info(f"Deletion failed! Response: {delete_response.body}")
def test_upload_dockerfile( self, syft_json_name: str, dockerfile_name: str, module_path: str, begin_import: APIResponse, ): dockerfile = imports.load_file(module_path, dockerfile_name) result: APIResponse = http_post( ["imports", "images", begin_import.body["uuid"], "dockerfile"], payload=dockerfile, ) assert result.code == 200
def test_upload_packages( self, syft_json_name: str, dockerfile_name: str, module_path: str, begin_import: APIResponse, ): syft_json = imports.load_file(module_path, syft_json_name) packages = json.loads(syft_json) result: APIResponse = http_post( ["imports", "images", begin_import.body["uuid"], "packages"], payload=packages, ) assert result.code == 200
def feeds_sync(force_flush=None, feed=None): payload = { "force_flush": force_flush, "feed": feed, "user_id": policy_engine_api_conf().get("ANCHORE_API_USER"), } feed_sync_resp = http_utils.http_post(["feeds"], payload, config=policy_engine_api_conf) if feed_sync_resp.code != 200: raise http_utils.RequestFailedError(feed_sync_resp.url, feed_sync_resp.code, feed_sync_resp.body) return feed_sync_resp
def test_list_dockerfiles( self, syft_json_name: str, dockerfile_name: str, module_path: str, begin_import: APIResponse, ): operation_uuid = begin_import.body["uuid"] dockerfile = imports.load_file(module_path, dockerfile_name) result: APIResponse = http_post( ["imports", "images", operation_uuid, "dockerfile"], payload=dockerfile) assert result.code == 200 list_response = http_get( ["imports", "images", operation_uuid, "dockerfile"]) assert list_response.code == 200 assert list_response.body[0]["digest"] == result.body["digest"]
def add_subscription(api_conf: callable): added_subscription = None resp = http_post(['subscriptions'], ALPINE_LATEST_SUBSCRIPTION, config=api_conf) if resp.code == 500 and resp.body.get( 'message') == 'subscription already exists in DB': # Already exists resp = http_get(['subscriptions'], config=api_conf) subscription_list = resp.body for subscription in subscription_list: if subscription.get( 'subscription_type') == 'tag_update' and subscription.get( 'subscription_key') == 'docker.io/alpine:latest': added_subscription = subscription break elif resp.code != 200: raise RequestFailedError(resp.url, resp.code, resp.body) else: added_subscription = resp.body[0] return added_subscription
def add_document(bucket: str, archiveid: str, object: Dict) -> http_utils.APIResponse: if not bucket: raise ValueError("Cannot add document to object store without bucket") if not archiveid: raise ValueError( "Cannot add document to object store without archiveid") if not object: raise ValueError("Cannot add document to object store without object") payload = object add_document_resp = http_utils.http_post(["objects", bucket, archiveid], payload, config=catalog_api_conf) if add_document_resp.code != 200: raise http_utils.RequestFailedError(add_document_resp.url, add_document_resp.code, add_document_resp.body) return add_document_resp
def create_and_teardown_archive_rule(request): """ In order to interact with the archives API, a rule must be added first, which depends on there being an image added as well: 1. Add node:latest image (this isn't currently depended upon in other tests) 2. Add Archive Rule Note: This appears to only work for the root user ATM, so don't run w/ ft_user """ _logger.info("Adding alpine:edge Image for analysis") add_image_resp = http_post(["images"], {"tag": "alpine:edge"}, config=request.param) if add_image_resp.code != 200: raise RequestFailedError(add_image_resp.url, add_image_resp.code, add_image_resp.body) wait_for_image_to_analyze(get_image_id(add_image_resp), request.param) archive_rule_json = { "analysis_age_days": 0, "created_at": "2020-08-25T17:15:16.865Z", "last_updated": "2020-08-25T17:15:16.865Z", "selector": { "registry": "docker.io", "repository": "alpine", "tag": "edge" }, "system_global": True, "tag_versions_newer": 0, "transition": "archive", } _logger.info("Adding Archive Rule") archive_rule_resp = http_post(["archives", "rules"], archive_rule_json, config=request.param) if archive_rule_resp.code != 200: raise RequestFailedError(archive_rule_resp.url, archive_rule_resp.code, archive_rule_resp.body) archive_resp = http_post(["archives", "images"], [get_image_digest(add_image_resp)], config=request.param) if archive_resp.code != 200: raise RequestFailedError(archive_resp.url, archive_resp.code, archive_resp.body) def teardown(): _logger.info("Removing alpine:edge image from anchore") remove_image_resp = http_del( ["images", "by_id", get_image_id(add_image_resp)], query={"force": True}) if remove_image_resp.code != 200: raise RequestFailedError(remove_image_resp.url, remove_image_resp.code, remove_image_resp.body) _logger.info("Removing Archive Rule: rule_id={}".format( archive_rule_resp.body["rule_id"])) remove_rule_resp = http_del( ["archives", "rules", archive_rule_resp.body["rule_id"]]) if remove_rule_resp.code != 200: raise RequestFailedError(remove_rule_resp.url, remove_rule_resp.code, remove_rule_resp.body) delete_archive_image_resp = http_del( ["archives", "images", get_image_digest(add_image_resp)], config=request.param, ) if delete_archive_image_resp.code != 200: raise RequestFailedError( delete_archive_image_resp.url, delete_archive_image_resp.code, delete_archive_image_resp.body, ) request.addfinalizer(teardown) return add_image_resp, archive_rule_resp, archive_resp, request.param
def create_functional_test_account_with_teardown(request): _logger = logging.getLogger("conftest") """ This fixture implicitly tests get_by_account_name, create, update state, and delete operations, but essentially, creates a functional_test account with a user ('ft_user' unless overridden by environment variables), and then deletes this account (blocking until deletion is complete) at the end of the test session """ def disable_and_delete_functional_test_account(): """ This method wil dynamically, and in a blocking fashion, handle account deletion, which requires that the functional_test account be disabled before deletion. If the functional_test account is currently enabled, it will disable and then delete the account, waiting for the deletion to complete. If the functional_test account is already disabled, it will delete the account, and wait for the deletion to complete. If the functional_test account is currently awaiting deletion, it will wait for the deletion to complete. If the functional_test account is not found, it will exit. """ def await_account_deletion(): """ This method is helpful for awaiting account deletion of the functional_test account, with a timeout governed by DELETE_ACCOUNT_TIMEOUT_SEC. It awaits in 5 second intervals. """ start_time_sec = time.time() result = 200 while result != 404: time.sleep(5) ft_get_account_resp = http_get(["accounts", FT_ACCOUNT]) _logger.info( "Waiting for functional_test account to fully delete. Time Elapsed={}sec".format( int(time.time() - start_time_sec) ) ) if not ( ft_get_account_resp.code == 200 or ft_get_account_resp.code == 404 ): _logger.error(ft_get_account_resp) raise RequestFailedError( ft_get_account_resp.url, ft_get_account_resp.code, ft_get_account_resp.body, ) if time.time() - start_time_sec >= DELETE_ACCOUNT_TIMEOUT_SEC: raise TimeoutError( "Timed out waiting for functional_test account to delete" ) result = ft_get_account_resp.code ft_account_resp = http_get(["accounts", FT_ACCOUNT]) if ft_account_resp.code == 404: _logger.info("functional_test account not found") return state = ft_account_resp.body.get("state") if state == "enabled": _logger.info("functional_test account found, and enabled. Disabling") disable_account_resp = http_put( ["accounts", FT_ACCOUNT, "state"], {"state": "disabled"} ) if disable_account_resp.code != 200: raise RequestFailedError( disable_account_resp.url, disable_account_resp.code, disable_account_resp.body, ) elif state == "deleting": _logger.info( "functional_test account found, but is currently being deleted" ) await_account_deletion() return _logger.info("Deleting functional_test account") delete_resp = http_del(["accounts", FT_ACCOUNT]) if not (delete_resp.code == 200 or delete_resp.code == 404): raise RequestFailedError( delete_resp.url, delete_resp.code, delete_resp.body ) await_account_deletion() # Delete the account if it exists already for some reason (sanity check) disable_and_delete_functional_test_account() _logger.info("Creating functional_test account") create_resp = http_post( ["accounts"], {"name": FT_ACCOUNT, "email": "*****@*****.**"} ) if create_resp.code != 200: raise RequestFailedError(create_resp.url, create_resp.code, create_resp.body) ft_user = get_ft_user() _logger.info("Creating functional_test user: {}".format(ft_user["username"])) create_user_resp = http_post(["accounts", FT_ACCOUNT, "users"], ft_user) if create_user_resp.code != 200: raise RequestFailedError( create_user_resp.url, create_user_resp.code, create_user_resp.body ) request.addfinalizer(disable_and_delete_functional_test_account) return ft_user
def _add_image_for_analysis(tag): return http_post(["images"], {"tag": tag}, config=request.param)
def test_add_repository(self, api_conf): resp = http_post(['repositories'], None, query={'repository': 'docker.io/alpine'}, config=api_conf) assert resp == APIResponse(200)
def test_add_user(self, api_conf): create_resp = http_post(['accounts', FT_ACCOUNT, 'users'], {'username': '******', 'password': '******'}, config=api_conf) assert create_resp == APIResponse(200) delete_ft_account_user('creation_test', api_conf)
def test_full_upload( self, syft_json_name: str, dockerfile_name: str, module_path: str, begin_import: APIResponse, ): operation_uuid = begin_import.body["uuid"] syft_json = imports.load_file(module_path, syft_json_name) dockerfile = imports.load_file(module_path, dockerfile_name) metadata = imports.extract_syft_metadata(syft_json) # upload dockerfile upload_dockerfile_response: APIResponse = http_post( ["imports", "images", operation_uuid, "dockerfile"], payload=dockerfile, ) assert upload_dockerfile_response.code == 200 dockerfile_digest = upload_dockerfile_response.body["digest"] # upload packages packages = json.loads(syft_json) upload_packages_response: APIResponse = http_post( ["imports", "images", operation_uuid, "packages"], payload=packages) assert upload_packages_response.code == 200 packages_digest = upload_packages_response.body["digest"] # upload manifest upload_manifest_response: APIResponse = http_post_bytes( ["imports", "images", operation_uuid, "manifest"], payload=metadata["manifest"], ) assert upload_manifest_response.code == 200 manifest_digest = upload_manifest_response.body["digest"] # upload image config upload_config_response: APIResponse = http_post_bytes( ["imports", "images", operation_uuid, "image_config"], payload=metadata["image_config"], ) assert upload_config_response.code == 200 image_config_digest = upload_config_response.body["digest"] # upload image payload = { "source": { "import": { "digest": metadata["digest"], "local_image_id": metadata["local_image_id"], "contents": { "packages": packages_digest, "dockerfile": dockerfile_digest, "manifest": manifest_digest, "image_config": image_config_digest, }, "tags": metadata["tags"], "operation_uuid": operation_uuid, } }, "annotations": { "testkey1": "testvalue1", "testkey2": "testvalue2" }, } image_upload_response = http_post(["images"], payload=payload) assert image_upload_response.code == 200 # check image uploaded image_list_response = http_get(["images", metadata["digest"]]) assert image_list_response.code == 200