def _token_data(access=[], context=None, audience=TEST_AUDIENCE, user=TEST_USER, iat=None, exp=None, nbf=None, iss=None, subject=None): if subject is None: _, subject = build_context_and_subject(ValidatedAuthContext(user=user)) return { 'iss': iss or instance_keys.service_name, 'aud': audience, 'nbf': nbf if nbf is not None else int(time.time()), 'iat': iat if iat is not None else int(time.time()), 'exp': exp if exp is not None else int(time.time() + TOKEN_VALIDITY_LIFETIME_S), 'sub': subject, 'access': access, 'context': context, }
def _token_data( access=[], context=None, audience=TEST_AUDIENCE, user=TEST_USER, iat=None, exp=None, nbf=None, iss=None, subject=None, ): if subject is None: _, subject = build_context_and_subject(ValidatedAuthContext(user=user)) return { "iss": iss or instance_keys.service_name, "aud": audience, "nbf": nbf if nbf is not None else int(time.time()), "iat": iat if iat is not None else int(time.time()), "exp": exp if exp is not None else int(time.time() + TOKEN_VALIDITY_LIFETIME_S), "sub": subject, "access": access, "context": context, }
def test_e2e_query_count_manifest_norewrite(client, app): repo_ref = registry_model.lookup_repository("devtable", "simple") tag = registry_model.get_repo_tag(repo_ref, "latest") manifest = registry_model.get_manifest_for_tag(tag) params = { "repository": "devtable/simple", "manifest_ref": manifest.digest, } user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull", "push"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Conduct a call to prime the instance key and other caches. conduct_call( client, "v2.write_manifest_by_digest", url_for, "PUT", params, expected_code=201, headers=headers, raw_body=manifest.internal_manifest_bytes.as_encoded_str(), ) timecode = time.time() def get_time(): return timecode + 10 with patch("time.time", get_time): # Necessary in order to have the tag updates not occur in the same second, which is the # granularity supported currently. with count_queries() as counter: conduct_call( client, "v2.write_manifest_by_digest", url_for, "PUT", params, expected_code=201, headers=headers, raw_body=manifest.internal_manifest_bytes.as_encoded_str(), ) assert counter.count <= 27
def test_blob_caching(method, endpoint, client, app): digest = "sha256:" + hashlib.sha256(b"a").hexdigest() location = ImageStorageLocation.get(name="local_us") model.blob.store_blob_record_and_temp_link("devtable", "simple", digest, location, 1, 10000000) params = { "repository": "devtable/simple", "digest": digest, } user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Run without caching to make sure the request works. This also preloads some of # our global model caches. conduct_call(client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers) with patch("endpoints.v2.blob.model_cache", InMemoryDataModelCache(TEST_CACHE_CONFIG)): # First request should make a DB query to retrieve the blob. conduct_call(client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers) # Subsequent requests should use the cached blob. with assert_query_count(0): conduct_call( client, "v2." + endpoint, url_for, method, params, expected_code=200, headers=headers, )
def test_blob_caching(method, endpoint, client, app): digest = 'sha256:' + hashlib.sha256("a").hexdigest() location = ImageStorageLocation.get(name='local_us') model.blob.store_blob_record_and_temp_link('devtable', 'simple', digest, location, 1, 10000000) params = { 'repository': 'devtable/simple', 'digest': digest, } user = model.user.get_user('devtable') access = [{ 'type': 'repository', 'name': 'devtable/simple', 'actions': ['pull'], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600, instance_keys) headers = { 'Authorization': 'Bearer %s' % token, } # Run without caching to make sure the request works. This also preloads some of # our global model caches. conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers) with patch('endpoints.v2.blob.model_cache', InMemoryDataModelCache()): # First request should make a DB query to retrieve the blob. conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers) # Subsequent requests should use the cached blob. with assert_query_count(0): conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200, headers=headers)
def test_e2e_query_count_manifest_norewrite(client, app): tag_manifest = model.tag.load_tag_manifest('devtable', 'simple', 'latest') params = { 'repository': 'devtable/simple', 'manifest_ref': tag_manifest.digest, } user = model.user.get_user('devtable') access = [{ 'type': 'repository', 'name': 'devtable/simple', 'actions': ['pull', 'push'], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600, instance_keys) headers = { 'Authorization': 'Bearer %s' % token, } # Conduct a call to prime the instance key and other caches. conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202, headers=headers, raw_body=tag_manifest.json_data) timecode = time.time() def get_time(): return timecode + 10 with patch('time.time', get_time): # Necessary in order to have the tag updates not occur in the same second, which is the # granularity supported currently. with count_queries() as counter: conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202, headers=headers, raw_body=tag_manifest.json_data) assert counter.count <= 27
def test_blob_upload_offset(client, app): user = model.user.get_user("devtable") access = [{ "type": "repository", "name": "devtable/simple", "actions": ["pull", "push"], }] context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } # Create a blob upload request. params = { "repository": "devtable/simple", } response = conduct_call(client, "v2.start_blob_upload", url_for, "POST", params, expected_code=202, headers=headers) upload_uuid = response.headers["Docker-Upload-UUID"] # Attempt to start an upload past index zero. params = { "repository": "devtable/simple", "upload_uuid": upload_uuid, } headers = { "Authorization": "Bearer %s" % token.decode("ascii"), "Content-Range": "13-50", } conduct_call( client, "v2.upload_chunk", url_for, "PATCH", params, expected_code=416, headers=headers, body="something", )
def _get_image_url_and_auth(self, image): """ Returns a tuple of the url and the auth header value that must be used to fetch the layer data itself. If the image can't be addressed, we return None. """ if self._instance_keys is None: raise Exception( "No Instance keys provided to Security Scanner API") path = model.storage.get_layer_path(image.storage) locations = self._default_storage_locations if not self._storage.exists(locations, path): locations = get_storage_locations(image.storage.uuid) if not locations or not self._storage.exists(locations, path): logger.warning( "Could not find a valid location to download layer %s out of %s", compute_layer_id(image), locations, ) return None, None uri = self._storage.get_direct_download_url(locations, path) auth_header = None if uri is None: # Use the registry API instead, with a signed JWT giving access repo_name = image.repository.name namespace_name = image.repository.namespace_user.username repository_and_namespace = "/".join([namespace_name, repo_name]) # Generate the JWT which will authorize this audience = self._server_hostname context, subject = build_context_and_subject() access = [{ "type": "repository", "name": repository_and_namespace, "actions": ["pull"], }] auth_token = generate_bearer_token(audience, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, self._instance_keys) auth_header = "Bearer " + auth_token.decode("ascii") uri = self._uri_creator(repository_and_namespace, image.storage.content_checksum) return uri, auth_header
def _auth_header(self, gun, actions): """ Generate a registry auth token for apostille""" access = [{ 'type': 'repository', 'name': gun, 'actions': actions, }] context, subject = build_context_and_subject( auth_context=None, tuf_roots={gun: SIGNER_TUF_ROOT}) token = generate_bearer_token(self._config["SERVER_HOSTNAME"], subject, context, access, TOKEN_VALIDITY_LIFETIME_S, self._instance_keys) return {'Authorization': 'Bearer %s' % token}
def _auth_header(self, gun, actions): """ Generate a registry auth token for apostille. """ access = [{"type": "repository", "name": gun, "actions": actions,}] context, subject = build_context_and_subject( auth_context=None, tuf_roots={gun: SIGNER_TUF_ROOT} ) token = generate_bearer_token( self._config["SERVER_HOSTNAME"], subject, context, access, TOKEN_VALIDITY_LIFETIME_S, self._instance_keys, ) return {"Authorization": "Bearer %s" % token}
def setup(self, client, app): self.client = client self.user = model.user.get_user("devtable") context, subject = build_context_and_subject( ValidatedAuthContext(user=self.user)) access = [{ "type": "repository", "name": self.repository, "actions": ["pull"], }] token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) self.headers = { "Authorization": f"Bearer {token.decode('ascii')}", } try: model.organization.get(self.org) except Exception: org = model.organization.create_organization( self.org, "*****@*****.**", self.user) org.save() if self.config is None: self.config = model.proxy_cache.create_proxy_cache_config( org_name=self.org, upstream_registry=self.registry, expiration_s=3600, ) if self.repo_ref is None: r = model.repository.create_repository(self.org, self.image_name, self.user) assert r is not None self.repo_ref = registry_model.lookup_repository( self.org, self.image_name) assert self.repo_ref is not None if self.blob_digest is None: proxy_model = ProxyModel(self.org, self.image_name, self.user) manifest = proxy_model.lookup_manifest_by_digest( self.repo_ref, self.manifest_digest) self.blob_digest = manifest.get_parsed_manifest().blob_digests[0]
def setup(self, client, app): self.client = client self.user = model.user.get_user("devtable") context, subject = build_context_and_subject( ValidatedAuthContext(user=self.user)) self.ctx = context self.sub = subject if self.org is None: self.org = model.organization.create_organization( self.orgname, "{self.orgname}@devtable.com", self.user) self.org.save() self.config = model.proxy_cache.create_proxy_cache_config( org_name=self.orgname, upstream_registry=self.registry, expiration_s=3600, )
def setup(self, client, app): self.client = client self.user = model.user.get_user("devtable") context, sub = build_context_and_subject( ValidatedAuthContext(user=self.user)) self.ctx = context self.sub = sub try: model.organization.get(self.org) except Exception: org = model.organization.create_organization( self.org, "*****@*****.**", self.user) org.save() try: model.organization.get(self.org2) except Exception: org = model.organization.create_organization( self.org2, "*****@*****.**", self.user, ) org.save() try: model.proxy_cache.get_proxy_cache_config_for_org(self.org) except Exception: model.proxy_cache.create_proxy_cache_config( org_name=self.org, upstream_registry=self.registry, expiration_s=3600, ) try: model.proxy_cache.get_proxy_cache_config_for_org(self.org2) except Exception: model.proxy_cache.create_proxy_cache_config( org_name=self.org2, upstream_registry=self.registry + "/library", expiration_s=3600, )
def headers_for_download(self, repository_ref, blob, timeout=60): """ Returns the headers for downloading the given blob under the given repository. """ uri = self._storage.get_direct_download_url(self._storage.locations, blob.storage_path) if uri is not None: return {} # Otherwise, we mint a JWT and place it into an Auth header. audience = self._app.config["SERVER_HOSTNAME"] context, subject = build_context_and_subject() access = [{ "type": "repository", "name": _repository_and_namespace(repository_ref), "actions": ["pull"], }] assert set(act for acs in access for act in acs["actions"]) == {"pull"} auth_token = generate_bearer_token(audience, subject, context, access, timeout, self._instance_keys) return {"Authorization": ["Bearer " + auth_token.decode("ascii")]}
def generate_registry_jwt(auth_result): """ This endpoint will generate a JWT conforming to the Docker Registry v2 Auth Spec: https://docs.docker.com/registry/spec/auth/token/ """ audience_param = request.args.get("service") logger.debug("Request audience: %s", audience_param) scope_params = request.args.getlist("scope") or [] logger.debug("Scope request: %s", scope_params) auth_header = request.headers.get("authorization", "") auth_credentials_sent = bool(auth_header) # Load the auth context and verify thatg we've directly received credentials. has_valid_auth_context = False if get_authenticated_context(): has_valid_auth_context = not get_authenticated_context().is_anonymous if auth_credentials_sent and not has_valid_auth_context: # The auth credentials sent for the user are invalid. raise InvalidLogin(auth_result.error_message) if not has_valid_auth_context and len(scope_params) == 0: # In this case, we are doing an auth flow, and it's not an anonymous pull. logger.debug("No user and no token sent for empty scope list") raise Unauthorized() # Build the access list for the authenticated context. access = [] scope_results = [] for scope_param in scope_params: scope_result = _authorize_or_downscope_request(scope_param, has_valid_auth_context) if scope_result is None: continue scope_results.append(scope_result) access.append({ "type": "repository", "name": scope_result.registry_and_repo, "actions": scope_result.actions, }) # Issue user events. user_event_data = { "action": "login", } # Set the user event data for when authed. if len(scope_results) > 0: if "push" in scope_results[0].actions: user_action = "push_start" elif "pull" in scope_results[0].actions: user_action = "pull_start" else: user_action = "login" user_event_data = { "action": user_action, "namespace": scope_results[0].namespace, "repository": scope_results[0].repository, } # Send the user event. if get_authenticated_user() is not None: event = userevents.get_event(get_authenticated_user().username) event.publish_event_data("docker-cli", user_event_data) # Build the signed JWT. tuf_roots = { "%s/%s" % (scope_result.namespace, scope_result.repository): scope_result.tuf_root for scope_result in scope_results } context, subject = build_context_and_subject(get_authenticated_context(), tuf_roots=tuf_roots) token = generate_bearer_token(audience_param, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, instance_keys) return jsonify({"token": token})
def test_blob_mounting( mount_digest, source_repo, username, include_from_param, expected_code, client, app ): location = ImageStorageLocation.get(name="local_us") # Store and link some blobs. digest = "sha256:" + hashlib.sha256(b"a").hexdigest() model.blob.store_blob_record_and_temp_link("devtable", "simple", digest, location, 1, 10000000) digest = "sha256:" + hashlib.sha256(b"b").hexdigest() model.blob.store_blob_record_and_temp_link("devtable", "complex", digest, location, 1, 10000000) digest = "sha256:" + hashlib.sha256(b"c").hexdigest() model.blob.store_blob_record_and_temp_link( "public", "publicrepo", digest, location, 1, 10000000 ) params = { "repository": "devtable/building", "mount": mount_digest, } if include_from_param: params["from"] = source_repo user = model.user.get_user(username) access = [ { "type": "repository", "name": "devtable/building", "actions": ["pull", "push"], } ] if source_repo.find(username) == 0: access.append( { "type": "repository", "name": source_repo, "actions": ["pull"], } ) context, subject = build_context_and_subject(ValidatedAuthContext(user=user)) token = generate_bearer_token( realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys ) headers = { "Authorization": "Bearer %s" % token.decode("ascii"), } conduct_call( client, "v2.start_blob_upload", url_for, "POST", params, expected_code=expected_code, headers=headers, ) repository = model.repository.get_repository("devtable", "building") if expected_code == 201: # Ensure the blob now exists under the repo. assert model.oci.blob.get_repository_blob_by_digest(repository, mount_digest) else: assert model.oci.blob.get_repository_blob_by_digest(repository, mount_digest) is None
def setup(self, client, app, proxy_manifest_response): self.client = client self.user = model.user.get_user("devtable") context, subject = build_context_and_subject( ValidatedAuthContext(user=self.user)) access = [{ "type": "repository", "name": self.repository, "actions": ["pull"], }] token = generate_bearer_token(realapp.config["SERVER_HOSTNAME"], subject, context, access, 600, instance_keys) self.headers = { "Authorization": f"Bearer {token.decode('ascii')}", } if self.org is None: self.org = model.organization.create_organization( self.orgname, "{self.orgname}@devtable.com", self.user) self.org.save() self.config = model.proxy_cache.create_proxy_cache_config( org_name=self.orgname, upstream_registry=self.registry, expiration_s=3600, ) if self.repo_ref is None: r = model.repository.create_repository(self.orgname, self.image_name, self.user) assert r is not None self.repo_ref = registry_model.lookup_repository( self.orgname, self.image_name) assert self.repo_ref is not None def get_blob(layer): content = Bytes.for_string_or_unicode(layer).as_encoded_str() digest = str(sha256_digest(content)) blob = model.blob.store_blob_record_and_temp_link( self.orgname, self.image_name, digest, ImageStorageLocation.get(name="local_us"), len(content), 120, ) storage.put_content(["local_us"], get_layer_path(blob), content) return blob, digest if self.manifest is None: layer1 = json.dumps({ "config": {}, "rootfs": { "type": "layers", "diff_ids": [] }, "history": [{}], }) _, config_digest = get_blob(layer1) layer2 = "test" _, blob_digest = get_blob(layer2) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(config_digest, len(layer1.encode("utf-8"))) builder.add_layer(blob_digest, len(layer2.encode("utf-8"))) manifest = builder.build() created_manifest = model.oci.manifest.get_or_create_manifest( self.repo_ref.id, manifest, storage) self.manifest = created_manifest.manifest assert self.digest == blob_digest assert self.manifest is not None if self.blob is None: self.blob = ImageStorage.filter( ImageStorage.content_checksum == self.digest).get()
def test_blob_mounting(mount_digest, source_repo, username, expect_success, client, app): location = ImageStorageLocation.get(name='local_us') # Store and link some blobs. digest = 'sha256:' + hashlib.sha256("a").hexdigest() model.blob.store_blob_record_and_temp_link('devtable', 'simple', digest, location, 1, 10000000) digest = 'sha256:' + hashlib.sha256("b").hexdigest() model.blob.store_blob_record_and_temp_link('devtable', 'complex', digest, location, 1, 10000000) digest = 'sha256:' + hashlib.sha256("c").hexdigest() model.blob.store_blob_record_and_temp_link('public', 'publicrepo', digest, location, 1, 10000000) params = { 'repository': 'devtable/building', 'mount': mount_digest, 'from': source_repo, } user = model.user.get_user(username) access = [{ 'type': 'repository', 'name': 'devtable/building', 'actions': ['pull', 'push'], }] if source_repo.find(username) == 0: access.append({ 'type': 'repository', 'name': source_repo, 'actions': ['pull'], }) context, subject = build_context_and_subject( ValidatedAuthContext(user=user)) token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600, instance_keys) headers = { 'Authorization': 'Bearer %s' % token, } expected_code = 201 if expect_success else 202 conduct_call(client, 'v2.start_blob_upload', url_for, 'POST', params, expected_code=expected_code, headers=headers) if expect_success: # Ensure the blob now exists under the repo. model.blob.get_repo_blob_by_digest('devtable', 'building', mount_digest) else: with pytest.raises(model.blob.BlobDoesNotExist): model.blob.get_repo_blob_by_digest('devtable', 'building', mount_digest)