def test_store_blob_on_first_time_download(self, proxy_manifest_response): proxy_mock = proxy_manifest_response( self.tag, HELLO_WORLD_SCHEMA2_MANIFEST_JSON, DOCKER_SCHEMA2_MANIFEST_CONTENT_TYPE) params = { "repository": self.repository, "digest": self.digest, } with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): with patch("endpoints.v2.blob.model_cache", NoopDataModelCache(TEST_CACHE_CONFIG)): conduct_call( self.client, "v2.download_blob", url_for, "GET", params, expected_code=200, headers=self.headers, ) path = get_layer_path(self.blob) assert path is not None placements = ImageStoragePlacement.filter( ImageStoragePlacement.storage == self.blob) locations = [placements.get().location.name] assert storage.exists( locations, path), f"blob not found in storage at path {path}"
def test_create_blob_placement_on_first_time_download( self, proxy_manifest_response): proxy_mock = proxy_manifest_response( self.tag, HELLO_WORLD_SCHEMA2_MANIFEST_JSON, DOCKER_SCHEMA2_MANIFEST_CONTENT_TYPE) params = { "repository": self.repository, "digest": self.digest, } with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): with patch("endpoints.v2.blob.model_cache", NoopDataModelCache(TEST_CACHE_CONFIG)): conduct_call( self.client, "v2.download_blob", url_for, "GET", params, expected_code=200, headers=self.headers, ) placements = ImageStoragePlacement.filter( ImageStoragePlacement.storage == self.blob) assert placements.count() == 1
def test_require_xhr_from_browser(user_agent, include_header, expected_code, app, client): # Create a public repo with a dot in its name. user = model.user.get_user("devtable") model.repository.create_repository("devtable", "somerepo.bat", user, "public") # Retrieve the repository and ensure we either allow it through or fail, depending on the # user agent and header. params = {"repository": "devtable/somerepo.bat"} headers = { "User-Agent": user_agent, } if include_header: headers["X-Requested-With"] = "XMLHttpRequest" conduct_call(client, Repository, api.url_for, "GET", params, headers=headers, expected_code=expected_code)
def test_creates_repo_on_first_pull(self, test_name, proxy_manifest_response): test_params = storage_test_cases[test_name] repo = f"{self.orgname}/{test_params['image_name']}" params = { "repository": repo, "manifest_ref": test_params["manifest_ref"], } proxy_mock = proxy_manifest_response( test_params["manifest_ref"], test_params["manifest_json"], test_params["manifest_type"], ) with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): headers = _get_auth_headers(self.sub, self.ctx, repo) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, test_params["view_name"], url_for, "GET", params, expected_code=200, headers=headers, ) repo = model.repository.get_repository(self.orgname, test_params["image_name"]) assert repo is not None assert repo.visibility.name == "private"
def test_does_not_create_repo_when_upstream_repo_does_not_exist( self, test_name, proxy_manifest_response): test_params = storage_test_cases[test_name] repo = f"{self.orgname}/{test_params['image_name']}" params = { "repository": repo, "manifest_ref": test_params["manifest_ref"], } proxy_mock = proxy_manifest_response("not-existing-ref", "", "") with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): headers = _get_auth_headers(self.sub, self.ctx, repo) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, test_params["view_name"], url_for, "GET", params, expected_code=404, headers=headers, ) count = Repository.filter( Repository.name == test_params["image_name"], Repository.namespace_user == self.org.id).count() assert count == 0
def test_require_xhr_from_browser(user_agent, include_header, expected_code, app, client): # Create a public repo with a dot in its name. user = model.user.get_user('devtable') model.repository.create_repository('devtable', 'somerepo.bat', user, 'public') # Retrieve the repository and ensure we either allow it through or fail, depending on the # user agent and header. params = {'repository': 'devtable/somerepo.bat'} headers = { 'User-Agent': user_agent, } if include_header: headers['X-Requested-With'] = 'XMLHttpRequest' conduct_call(client, Repository, api.url_for, 'GET', params, headers=headers, expected_code=expected_code)
def test_e2e_query_count_manifest_norewrite(client, app): repo_ref = registry_model.lookup_repository("devtable", "simple") tag = registry_model.get_repo_tag(repo_ref, "latest") manifest = registry_model.get_manifest_for_tag(tag) params = { "repository": "devtable/simple", "manifest_ref": manifest.digest, } user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull", "push"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Conduct a call to prime the instance key and other caches. conduct_call( client, "v2.write_manifest_by_digest", url_for, "PUT", params, expected_code=201, headers=headers, raw_body=manifest.internal_manifest_bytes.as_encoded_str(), ) timecode = time.time() def get_time(): return timecode + 10 with patch("time.time", get_time): # Necessary in order to have the tag updates not occur in the same second, which is the # granularity supported currently. with count_queries() as counter: conduct_call( client, "v2.write_manifest_by_digest", url_for, "PUT", params, expected_code=201, headers=headers, raw_body=manifest.internal_manifest_bytes.as_encoded_str(), ) assert counter.count <= 27
def test_creates_manifest_on_first_pull(self, test_name, proxy_manifest_response): test_params = storage_test_cases[test_name] repo = f"{self.orgname}/{test_params['image_name']}" params = { "repository": repo, "manifest_ref": test_params["manifest_ref"], } proxy_mock = proxy_manifest_response( test_params["manifest_ref"], test_params["manifest_json"], test_params["manifest_type"], ) with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): headers = _get_auth_headers(self.sub, self.ctx, repo) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, test_params["view_name"], url_for, "GET", params, expected_code=200, headers=headers, ) repository_ref = registry_model.lookup_repository( self.orgname, test_params["image_name"]) assert repository_ref is not None tag = registry_model.get_repo_tag(repository_ref, test_params["manifest_ref"]) # when testing the fetch_manifest_by_digest view the tag created is temporary, # and it does not refer to the manifest digest (manifest_ref), so we need to # fetch it by its link to the repository instead. if test_params["ref_type"] == "digest": tag = Tag.filter(Tag.repository_id == repository_ref.id).get() # get_manifest_for_tag returns a tag of datatypes.Tag, so we convert # the one we have to that type. tag = datatypes.Tag.for_tag(tag, SyntheticIDHandler()) assert tag is not None manifest = registry_model.get_manifest_for_tag(tag) assert manifest is not None output_manifest = manifest.get_parsed_manifest() input_manifest = parse_manifest_from_bytes( Bytes.for_string_or_unicode(test_params["manifest_json"]), test_params["manifest_type"], sparse_manifest_support=True, ) assert output_manifest.schema_version == input_manifest.schema_version assert output_manifest.media_type == input_manifest.media_type assert output_manifest.is_manifest_list == input_manifest.is_manifest_list assert output_manifest.digest == input_manifest.digest assert output_manifest.manifest_dict == input_manifest.manifest_dict
def test_e2e_query_count_manifest_norewrite(client, app): tag_manifest = model.tag.load_tag_manifest('devtable', 'simple', 'latest') params = { 'repository': 'devtable/simple', 'manifest_ref': tag_manifest.digest, } user = model.user.get_user('devtable') access = [{ 'type': 'repository', 'name': 'devtable/simple', 'actions': ['pull', 'push'], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600, instance_keys) headers = { 'Authorization': 'Bearer %s' % token, } # Conduct a call to prime the instance key and other caches. conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202, headers=headers, raw_body=tag_manifest.json_data) timecode = time.time() def get_time(): return timecode + 10 with patch('time.time', get_time): # Necessary in order to have the tag updates not occur in the same second, which is the # granularity supported currently. with count_queries() as counter: conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202, headers=headers, raw_body=tag_manifest.json_data) assert counter.count <= 27
def test_blob_upload_offset(client, app): user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull", "push"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Create a blob upload request. params = { "repository": "devtable/simple", } response = conduct_call(client, "v2.start_blob_upload", url_for, "POST", params, expected_code=202, headers=headers) upload_uuid = response.headers["Docker-Upload-UUID"] # Attempt to start an upload past index zero. params = { "repository": "devtable/simple", "upload_uuid": upload_uuid, } headers = { "Authorization": "Bearer %s" % token.decode("ascii"), "Content-Range": "13-50", } conduct_call( client, "v2.upload_chunk", url_for, "PATCH", params, expected_code=416, headers=headers, body="something", )
def test_check_blob_exists_from_dockerhub(self): params = { "repository": self.repository, "digest": self.blob_digest, } conduct_call( self.client, "v2.check_blob_exists", url_for, "HEAD", params, expected_code=200, headers=self.headers, )
def test_pull_from_dockerhub(self): params = { "repository": self.repository, "digest": self.blob_digest, } conduct_call( self.client, "v2.download_blob", url_for, "GET", params, expected_code=200, headers=self.headers, )
def test_does_not_pull_from_upstream_when_manifest_already_exists( self, test_name, proxy_manifest_response): test_params = storage_test_cases[test_name] repo = f"{self.orgname}/{test_params['image_name']}" params = { "repository": repo, "manifest_ref": test_params["manifest_ref"], } r = model.repository.create_repository(self.orgname, test_params["image_name"], self.user) assert r is not None manifest = parse_manifest_from_bytes( Bytes.for_string_or_unicode(test_params["manifest_json"]), test_params["manifest_type"], sparse_manifest_support=True, ) m = oci.manifest.create_manifest(r.id, manifest) assert m is not None if test_params["ref_type"] == "digest": oci.tag.create_temporary_tag_if_necessary(m, 300) else: oci.tag.retarget_tag(test_params["manifest_ref"], m.id) proxy_mock = proxy_manifest_response( test_params["manifest_ref"], test_params["manifest_json"], test_params["manifest_type"], ) with patch("data.registry_model.registry_proxy_model.Proxy", MagicMock(return_value=proxy_mock)): headers = _get_auth_headers(self.sub, self.ctx, repo) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, test_params["view_name"], url_for, "GET", params, expected_code=200, headers=headers, ) assert proxy_mock.manifest_exists.call_count == 1 assert proxy_mock.get_manifest.call_count == 0
def test_pull_from_dockerhub_by_digest(self): digest = "sha256:f329d076a8806c0ce014ce5e554ca70f4ae9407a16bb03baa7fef287ee6371f1" params = { "repository": self.repository, "manifest_ref": digest, } conduct_call( self.client, "v2.fetch_manifest_by_digest", url_for, "GET", params, expected_code=200, headers=self._get_auth_headers(self.repository), )
def test_check_blob_exists_from_dockerhub_404(self): digest = "sha256:" + hashlib.sha256(b"a").hexdigest() params = { "repository": self.repository, "digest": digest, } conduct_call( self.client, "v2.check_blob_exists", url_for, "HEAD", params, expected_code=404, headers=self.headers, )
def test_pull_from_dockerhub_404(self): digest = "sha256:" + hashlib.sha256(b"a").hexdigest() params = { "repository": self.repository, "digest": digest, } conduct_call( self.client, "v2.download_blob", url_for, "GET", params, expected_code=404, headers=self.headers, )
def test_initialize_user( user_count, expected_code, feature_mailing, feature_user_initialize, metadata, client ): with patch("endpoints.web.has_users") as mock_user_count: with patch("features.MAILING", FeatureNameValue("MAILING", feature_mailing)): with patch( "features.USER_INITIALIZE", FeatureNameValue("USER_INITIALIZE", feature_user_initialize), ): mock_user_count.return_value = user_count user = conduct_call( client, "web.user_initialize", url_for, "POST", {}, body=metadata, expected_code=expected_code, headers={"Content-Type": "application/json"}, ) if expected_code == 200: assert user.json["username"] == metadata["username"] if feature_mailing: assert user.json["email"] == metadata["email"] else: assert user.json["email"] is None assert user.json.get("encrypted_password", None) if metadata.get("access_token"): assert 40 == len(user.json.get("access_token", "")) else: assert not user.json.get("access_token")
def test_check_manifest_exists_by_tag_404(self): params = { "repository": self.repository, "manifest_ref": "666", } headers = _get_auth_headers(self.sub, self.ctx, self.repository) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, "v2.fetch_manifest_by_tagname", url_for, "HEAD", params, expected_code=404, headers=headers, )
def test_check_manifest_exists_from_dockerhub_by_tag(self): params = { "repository": self.repository, "manifest_ref": self.tag, } headers = self._get_auth_headers(self.repository) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, "v2.fetch_manifest_by_tagname", url_for, "HEAD", params, expected_code=200, headers=headers, )
def test_pull_proxy_single_namespace(self): params = { "repository": self.repository2, "manifest_ref": self.tag, } headers = _get_auth_headers(self.sub, self.ctx, self.repository2) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, "v2.fetch_manifest_by_tagname", url_for, "GET", params, expected_code=200, headers=headers, )
def test_pull_proxy_whole_dockerhub_404(self): params = { "repository": self.repository, "manifest_ref": "666", } headers = self._get_auth_headers(self.repository) headers["Accept"] = ", ".join( DOCKER_SCHEMA2_CONTENT_TYPES.union(OCI_CONTENT_TYPES).union( DOCKER_SCHEMA1_CONTENT_TYPES)) conduct_call( self.client, "v2.fetch_manifest_by_tagname", url_for, "GET", params, expected_code=404, headers=headers, )
def test_verbs_security(user, endpoint, method, repository, single_repo_path, params, expected_statuses, app, client): headers = {} if user[1] is not None: headers['Authorization'] = gen_basic_auth(user[1], 'password') if single_repo_path: params['repository'] = repository else: (namespace, repo_name) = repository.split('/') params['namespace'] = namespace params['repository'] = repo_name conduct_call(client, 'verbs.' + endpoint, url_for, method, params, expected_code=expected_statuses[user[0]], headers=headers)
def test_start_build_disabled_trigger(app, client): trigger = model.build.list_build_triggers('devtable', 'building')[0] trigger.enabled = False trigger.save() params = { 'trigger_uuid': trigger.uuid, } headers = { 'Authorization': 'Basic ' + base64.b64encode('devtable:password'), } conduct_call(client, 'webhooks.build_trigger_webhook', url_for, 'POST', params, None, 400, headers=headers)
def test_verbs_security(user, endpoint, method, repository, single_repo_path, params, expected_statuses, app, client): headers = {} if user[1] is not None: headers["Authorization"] = gen_basic_auth(user[1], "password") if single_repo_path: params["repository"] = repository else: (namespace, repo_name) = repository.split("/") params["namespace"] = namespace params["repository"] = repo_name conduct_call( client, "verbs." + endpoint, url_for, method, params, expected_code=expected_statuses[user[0]], headers=headers, )
def test_blob_caching(method, endpoint, client, app): digest = "sha256:" + hashlib.sha256(b"a").hexdigest() location = ImageStorageLocation.get(name="local_us") model.blob.store_blob_record_and_temp_link("devtable", "simple", digest, location, 1, 10000000) params = { "repository": "devtable/simple", "digest": digest, } user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Run without caching to make sure the request works. This also preloads some of # our global model caches. conduct_call(client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers) with patch("endpoints.v2.blob.model_cache", InMemoryDataModelCache(TEST_CACHE_CONFIG)): # First request should make a DB query to retrieve the blob. conduct_call(client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers) # Subsequent requests should use the cached blob. with assert_query_count(0): conduct_call( client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers, )
def test_start_build_disabled_trigger(app, client): trigger = model.build.list_build_triggers("devtable", "building")[0] trigger.enabled = False trigger.save() params = { "trigger_uuid": trigger.uuid, } headers = { "Authorization": "Basic " + base64.b64encode("devtable:password"), } conduct_call( client, "webhooks.build_trigger_webhook", url_for, "POST", params, None, 400, headers=headers, )
def test_blob_caching(method, endpoint, client, app): digest = 'sha256:' + hashlib.sha256("a").hexdigest() location = ImageStorageLocation.get(name='local_us') model.blob.store_blob_record_and_temp_link('devtable', 'simple', digest, location, 1, 10000000) params = { 'repository': 'devtable/simple', 'digest': digest, } user = model.user.get_user('devtable') access = [{ 'type': 'repository', 'name': 'devtable/simple', 'actions': ['pull'], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600, instance_keys) headers = { 'Authorization': 'Bearer %s' % token, } # Run without caching to make sure the request works. This also preloads some of # our global model caches. conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers) with patch('endpoints.v2.blob.model_cache', InMemoryDataModelCache()): # First request should make a DB query to retrieve the blob. conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers) # Subsequent requests should use the cached blob. with assert_query_count(0): conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers)
def test_generate_registry_jwt(scope, username, password, expected_code, expected_scopes, app, client): params = { "service": original_app.config["SERVER_HOSTNAME"], "scope": scope, } if callable(password): password = password(username) headers = {} if username and password: headers["Authorization"] = "Basic %s" % (base64.b64encode( "%s:%s" % (username, password))) resp = conduct_call( client, "v2.generate_registry_jwt", url_for, "GET", params, {}, expected_code, headers=headers, ) if expected_code != 200: return token = resp.json["token"] decoded = decode_bearer_token(token, instance_keys, original_app.config) assert decoded["iss"] == "quay" assert decoded["aud"] == original_app.config["SERVER_HOSTNAME"] assert decoded["sub"] == username if username else "(anonymous)" expected_access = [] for scope in expected_scopes: name, actions_str = scope.split(":") actions = actions_str.split(",") if actions_str else [] expected_access.append({ "type": "repository", "name": name, "actions": actions, }) assert decoded["access"] == expected_access assert len(decoded["context"][CLAIM_TUF_ROOTS]) == len(expected_scopes)
def test_generate_registry_jwt(scope, username, password, expected_code, expected_scopes, app, client): params = { 'service': original_app.config['SERVER_HOSTNAME'], 'scope': scope, } if callable(password): password = password(username) headers = {} if username and password: headers['Authorization'] = 'Basic %s' % (base64.b64encode( '%s:%s' % (username, password))) resp = conduct_call(client, 'v2.generate_registry_jwt', url_for, 'GET', params, {}, expected_code, headers=headers) if expected_code != 200: return token = resp.json['token'] decoded = decode_bearer_token(token, instance_keys, original_app.config) assert decoded['iss'] == 'quay' assert decoded['aud'] == original_app.config['SERVER_HOSTNAME'] assert decoded['sub'] == username if username else '(anonymous)' expected_access = [] for scope in expected_scopes: name, actions_str = scope.split(':') actions = actions_str.split(',') if actions_str else [] expected_access.append({ 'type': 'repository', 'name': name, 'actions': actions, }) assert decoded['access'] == expected_access assert len(decoded['context'][CLAIM_TUF_ROOTS]) == len(expected_scopes)
def conduct_api_call(client, resource, method, params, body=None, expected_code=200, headers=None): """ Conducts an API call to the given resource via the given client, and ensures its returned status matches the code given. Returns the response. """ return conduct_call(client, resource, api.url_for, method, params, body, expected_code, headers=headers)