def bucket_notification_list(bucket_name): """Implement the 'Notifications: list' API.""" bucket = testbench_utils.lookup_bucket(bucket_name) bucket.check_preconditions(flask.request) return testbench_utils.filtered_response(flask.request, { 'kind': 'storage#notifications', 'items': bucket.list_notifications() })
def projects_get(project_id): """Implement the `Projects.serviceAccount: get` API.""" project = get_project(project_id) email = project.service_account_email() return testbench_utils.filtered_response(flask.request, { 'kind': 'storage#serviceAccount', 'email_address': email })
def bucket_default_object_acl_list(bucket_name): """Implement the 'BucketAccessControls: list' API.""" bucket = testbench_utils.lookup_bucket(bucket_name) bucket.check_preconditions(flask.request) result = { 'items': bucket.metadata.get('defaultObjectAcl', []), } return testbench_utils.filtered_response(flask.request, result)
def objects_acl_update(bucket_name, object_name, entity): """Implement the 'ObjectAccessControls: update' API.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) payload = json.loads(flask.request.data) acl = revision.update_acl(entity, payload.get('role', '')) return testbench_utils.filtered_response(flask.request, acl)
def objects_delete(bucket_name, object_name): """Implement the 'Objects: delete' API. Delete objects.""" object_path, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) remove = blob.del_revision(flask.request) if remove: testbench_utils.delete_object(object_path) return testbench_utils.filtered_response(flask.request, {})
def bucket_acl_create(bucket_name): """Implement the 'BucketAccessControls: create' API.""" bucket = testbench_utils.lookup_bucket(bucket_name) bucket.check_preconditions(flask.request) payload = json.loads(flask.request.data) return testbench_utils.filtered_response( flask.request, bucket.insert_acl(payload.get('entity', ''), payload.get('role', '')))
def hmac_keys_insert(project_id): """Implement the `HmacKeys: insert` API.""" project = get_project(project_id) service_account = flask.request.args.get('serviceAccount') if service_account is None: raise error_response.ErrorResponse( 'serviceAccount is a required parameter', status_code=400) return testbench_utils.filtered_response( flask.request, project.insert_hmac_key(service_account))
def projects_get(project_id): """Implement the `Projects.serviceAccount: get` API.""" return testbench_utils.filtered_response( flask.request, { 'kind': 'storage#serviceAccount', 'email_address': '*****@*****.**' })
def objects_acl_list(bucket_name, object_name): """Implement the 'ObjectAccessControls: list' API.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) result = { 'items': revision.metadata.get('acl', []), } return testbench_utils.filtered_response(flask.request, result)
def objects_insert(bucket_name): """Implement the 'Objects: insert' API. Insert a new GCS Object.""" gcs_url = flask.url_for( "objects_insert", bucket_name=bucket_name, _external=True ).replace("/upload/", "/") insert_magic_bucket(gcs_url) upload_type = flask.request.args.get("uploadType") if upload_type is None: raise error_response.ErrorResponse( "uploadType not set in Objects: insert", status_code=400 ) if upload_type not in {"multipart", "media", "resumable"}: raise error_response.ErrorResponse( "testbench does not support %s uploadType" % upload_type, status_code=400 ) if upload_type == "resumable": bucket = testbench_utils.lookup_bucket(bucket_name) upload_url = flask.url_for( "objects_insert", bucket_name=bucket_name, _external=True ) return bucket.create_resumable_upload(upload_url, flask.request) object_path = None blob = None current_version = None if upload_type == "media": object_name = flask.request.args.get("name", None) if object_name is None: raise error_response.ErrorResponse( "name not set in Objects: insert", status_code=412 ) object_path, blob = testbench_utils.get_object( bucket_name, object_name, gcs_object.GcsObject(bucket_name, object_name) ) blob.check_preconditions(flask.request) current_version = blob.insert(gcs_url, flask.request) else: resource, media_headers, media_body = testbench_utils.parse_multi_part( flask.request ) object_name = flask.request.args.get("name", resource.get("name", None)) if object_name is None: raise error_response.ErrorResponse( "name not set in Objects: insert", status_code=412 ) object_path, blob = testbench_utils.get_object( bucket_name, object_name, gcs_object.GcsObject(bucket_name, object_name) ) blob.check_preconditions(flask.request) current_version = blob.insert_multipart( gcs_url, flask.request, resource, media_headers, media_body ) testbench_utils.insert_object(object_path, blob) return testbench_utils.filtered_response(flask.request, current_version.metadata)
def objects_acl_create(bucket_name, object_name): """Implement the 'ObjectAccessControls: create' API.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) payload = json.loads(flask.request.data) return testbench_utils.filtered_response( flask.request, revision.insert_acl(payload.get("entity", ""), payload.get("role", "")), )
def buckets_list(): """Implement the 'Buckets: list' API: return the Buckets in a project.""" base_url = flask.url_for("gcs_index", _external=True) project = flask.request.args.get("project") if project is None or project.endswith("-"): raise error_response.ErrorResponse( "Invalid or missing project id in `Buckets: list`") insert_magic_bucket(base_url) result = {"next_page_token": "", "items": []} for name, b in testbench_utils.all_buckets(): result["items"].append(b.metadata) return testbench_utils.filtered_response(flask.request, result)
def buckets_list(): """Implement the 'Buckets: list' API: return the Buckets in a project.""" base_url = flask.url_for('gcs_index', _external=True) project = flask.request.args.get('project') if project is None or project.endswith('-'): raise error_response.ErrorResponse( 'Invalid or missing project id in `Buckets: list`') insert_magic_bucket(base_url) result = {'next_page_token': '', 'items': []} for name, b in testbench_utils.all_buckets(): result['items'].append(b.metadata) return testbench_utils.filtered_response(flask.request, result)
def objects_get(bucket_name, object_name): """Implement the 'Objects: get' API. Read objects or their metadata.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) media = flask.request.args.get("alt", None) if media is None or media == "json": return testbench_utils.filtered_response(flask.request, revision.metadata) if media != "media": raise error_response.ErrorResponse("Invalid alt=%s parameter" % media) revision.validate_encryption_for_read(flask.request) return objects_get_common(bucket_name, object_name, revision)
def delete_resumable_upload(bucket_name): upload_type = flask.request.args.get("uploadType") if upload_type != "resumable": raise error_response.ErrorResponse( "testbench can delete resumable uploadType only", status_code=400) upload_id = flask.request.args.get("upload_id") if upload_id is None: raise error_response.ErrorResponse( "missing upload_id in delete_resumable_upload", status_code=400) bucket = testbench_utils.lookup_bucket(bucket_name) if upload_id not in bucket.resumable_uploads: raise error_response.ErrorResponse("upload_id does not exist", status_code=404) bucket.resumable_uploads.pop(upload_id) return testbench_utils.filtered_response(flask.request, {})
def objects_rewrite(source_bucket, source_object, destination_bucket, destination_object): """Implement the 'Objects: rewrite' API.""" base_url = flask.url_for('gcs_index', _external=True) insert_magic_bucket(base_url) object_path, blob = testbench_utils.lookup_object(source_bucket, source_object) blob.check_preconditions( flask.request, if_generation_match='ifSourceGenerationMatch', if_generation_not_match='ifSourceGenerationNotMatch', if_metageneration_match='ifSourceMetagenerationMatch', if_metageneration_not_match='ifSourceMetagenerationNotMatch') response = blob.rewrite_step(base_url, flask.request, destination_bucket, destination_object) return testbench_utils.filtered_response(flask.request, response)
def buckets_insert(): """Implement the 'Buckets: insert' API: create a new Bucket.""" base_url = flask.url_for("gcs_index", _external=True) insert_magic_bucket(base_url) payload = json.loads(flask.request.data) bucket_name = payload.get("name") if bucket_name is None: raise error_response.ErrorResponse( "Missing bucket name in `Buckets: insert`", status_code=412) if testbench_utils.has_bucket(bucket_name): raise error_response.ErrorResponse("Bucket %s already exists" % bucket_name, status_code=400) bucket = gcs_bucket.GcsBucket(base_url, bucket_name) testbench_utils.insert_bucket(bucket_name, bucket) return testbench_utils.filtered_response(flask.request, bucket.metadata)
def objects_compose(bucket_name, object_name): """Implement the 'Objects: compose' API: concatenate Objects.""" payload = json.loads(flask.request.data) source_objects = payload["sourceObjects"] if source_objects is None: raise error_response.ErrorResponse( "You must provide at least one source component.", status_code=400 ) if len(source_objects) > 32: raise error_response.ErrorResponse( "The number of source components provided" " (%d) exceeds the maximum (32)" % len(source_objects), status_code=400, ) composed_media = b"" for source_object in source_objects: source_object_name = source_object.get("name") if source_object_name is None: raise error_response.ErrorResponse("Required.", status_code=400) source_object_path, source_blob = testbench_utils.lookup_object( bucket_name, source_object_name ) source_revision = source_blob.get_latest() generation = source_object.get("generation") if generation is not None: source_revision = source_blob.get_revision_by_generation(generation) if source_revision is None: raise error_response.ErrorResponse( "No such object: %s" % source_object_path, status_code=404 ) object_preconditions = source_object.get("objectPreconditions") if object_preconditions is not None: if_generation_match = object_preconditions.get("ifGenerationMatch") source_blob.check_preconditions_by_value( if_generation_match, None, None, None ) composed_media += source_revision.media composed_object_path, composed_object = testbench_utils.get_object( bucket_name, object_name, gcs_object.GcsObject(bucket_name, object_name) ) composed_object.check_preconditions(flask.request) base_url = flask.url_for("gcs_index", _external=True) current_version = composed_object.compose_from( base_url, flask.request, composed_media ) testbench_utils.insert_object(composed_object_path, composed_object) return testbench_utils.filtered_response(flask.request, current_version.metadata)
def buckets_update(bucket_name): """Implement the 'Buckets: update' API: update an existing Bucket.""" base_url = flask.url_for('gcs_index', _external=True) insert_magic_bucket(base_url) payload = json.loads(flask.request.data) name = payload.get('name') if name is None: raise error_response.ErrorResponse( 'Missing bucket name in `Buckets: update`', status_code=412) if name != bucket_name: raise error_response.ErrorResponse( 'Mismatched bucket name parameter in `Buckets: update`', status_code=400) bucket = testbench_utils.lookup_bucket(bucket_name) bucket.check_preconditions(flask.request) bucket.update_from_metadata(payload) return testbench_utils.filtered_response(flask.request, bucket.metadata)
def buckets_insert(): """Implement the 'Buckets: insert' API: create a new Bucket.""" base_url = flask.url_for('gcs_index', _external=True) insert_magic_bucket(base_url) payload = json.loads(flask.request.data) bucket_name = payload.get('name') if bucket_name is None: raise error_response.ErrorResponse( 'Missing bucket name in `Buckets: insert`', status_code=412) if not testbench_utils.validate_bucket_name(bucket_name): raise error_response.ErrorResponse( 'Invalid bucket name in `Buckets: insert`') if testbench_utils.has_bucket(bucket_name): raise error_response.ErrorResponse( 'Bucket %s already exists' % bucket_name, status_code=400) bucket = gcs_bucket.GcsBucket(base_url, bucket_name) bucket.update_from_metadata(payload) testbench_utils.insert_bucket(bucket_name, bucket) return testbench_utils.filtered_response(flask.request, bucket.metadata)
def objects_list(bucket_name): """Implement the 'Objects: list' API: return the objects in a bucket.""" # Lookup the bucket, if this fails the bucket does not exist, and this # function should return an error. base_url = flask.url_for("gcs_index", _external=True) insert_magic_bucket(base_url) _ = testbench_utils.lookup_bucket(bucket_name) result = {"next_page_token": "", "items": []} versions_parameter = flask.request.args.get("versions") all_versions = versions_parameter is not None and bool(versions_parameter) for name, o in testbench_utils.all_objects(): if name.find(bucket_name + "/o") != 0: continue if o.get_latest() is None: continue if all_versions: for object_version in o.revisions.values(): result["items"].append(object_version.metadata) else: result["items"].append(o.get_latest().metadata) return testbench_utils.filtered_response(flask.request, result)
def objects_insert(bucket_name): """Implement the 'Objects: insert' API. Insert a new GCS Object.""" gcs_url = flask.url_for('objects_insert', bucket_name=bucket_name, _external=True).replace('/upload/', '/') insert_magic_bucket(gcs_url) upload_type = flask.request.args.get('uploadType') if upload_type is None: raise error_response.ErrorResponse( 'uploadType not set in Objects: insert', status_code=400) if upload_type not in {'multipart', 'media', 'resumable'}: raise error_response.ErrorResponse( 'testbench does not support %s uploadType' % upload_type, status_code=400) if upload_type == 'resumable': bucket = testbench_utils.lookup_bucket(bucket_name) upload_url = flask.url_for('objects_insert', bucket_name=bucket_name, _external=True) return bucket.create_resumable_upload(upload_url, flask.request) object_name = flask.request.args.get('name', None) if object_name is None: raise error_response.ErrorResponse('name not set in Objects: insert', status_code=412) object_path, blob = testbench_utils.get_object( bucket_name, object_name, gcs_object.GcsObject(bucket_name, object_name)) blob.check_preconditions(flask.request) if upload_type == 'media': current_version = blob.insert(gcs_url, flask.request) else: current_version = blob.insert_multipart(gcs_url, flask.request) testbench_utils.insert_object(object_path, blob) return testbench_utils.filtered_response(flask.request, current_version.metadata)
def objects_list(bucket_name): """Implement the 'Objects: list' API: return the objects in a bucket.""" # Lookup the bucket, if this fails the bucket does not exist, and this # function should return an error. base_url = flask.url_for("gcs_index", _external=True) insert_magic_bucket(base_url) _ = testbench_utils.lookup_bucket(bucket_name) result = {"next_page_token": "", "items": [], "prefixes:": []} versions_parameter = flask.request.args.get("versions") all_versions = versions_parameter is not None and bool(versions_parameter) prefixes = set() prefix = flask.request.args.get("prefix", "", type("")) delimiter = flask.request.args.get("delimiter", "", type("")) start_offset = flask.request.args.get("startOffset", "", type("")) end_offset = flask.request.args.get("endOffset", "", type("")) bucket_link = bucket_name + "/o/" for name, o in testbench_utils.all_objects(): if name.find(bucket_link + prefix) != 0: continue if o.get_latest() is None: continue # We assume `delimiter` has only one character. if name[len(bucket_link):] < start_offset: continue if end_offset != "" and name[len(bucket_link):] >= end_offset: continue delimiter_index = name.find(delimiter, len(bucket_link + prefix)) if delimiter != "" and delimiter_index > 0: # We don't want to include `bucket_link` in the returned prefix. prefixes.add(name[len(bucket_link):delimiter_index + 1]) continue if all_versions: for object_version in o.revisions.values(): result["items"].append(object_version.metadata) else: result["items"].append(o.get_latest().metadata) result["prefixes"] = list(prefixes) return testbench_utils.filtered_response(flask.request, result)
def hmac_keys_list(project_id): """Implement the 'HmacKeys: list' API: return the HMAC keys in a project.""" # Lookup the bucket, if this fails the bucket does not exist, and this # function should return an error. project = get_project(project_id) result = {"kind": "storage#hmacKeysMetadata", "next_page_token": "", "items": []} state_filter = lambda x: x.get("state") != "DELETED" if flask.request.args.get("deleted") == "true": state_filter = lambda x: True items = [] if flask.request.args.get("serviceAccountEmail"): sa = flask.request.args.get("serviceAccountEmail") service_account = project.service_account(sa) if service_account: items = service_account.key_items() else: for sa in project.service_accounts.values(): items.extend(sa.key_items()) result["items"] = [i for i in items if state_filter(i)] return testbench_utils.filtered_response(flask.request, result)
def objects_get(bucket_name, object_name): """Implement the 'Objects: get' API. Read objects or their metadata.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) media = flask.request.args.get('alt', None) if media is None or media == 'json': return testbench_utils.filtered_response(flask.request, revision.metadata) if media != 'media': raise error_response.ErrorResponse('Invalid alt=%s parameter' % media) revision.validate_encryption_for_read(flask.request) instructions = flask.request.headers.get('x-goog-testbench-instructions') if instructions == 'return-corrupted-data': response_payload = testbench_utils.corrupt_media(revision.media) else: response_payload = revision.media response = flask.make_response(response_payload) length = len(response_payload) response.headers['Content-Range'] = 'bytes 0-%d/%d' % (length - 1, length) response.headers['x-goog-hash'] = revision.x_goog_hash_header() return response
def hmac_keys_list(project_id): """Implement the 'HmacKeys: list' API: return the HMAC keys in a project.""" # Lookup the bucket, if this fails the bucket does not exist, and this # function should return an error. project = get_project(project_id) result = {'kind': 'storage#hmacKeys', 'next_page_token': '', 'items': []} state_filter = lambda x: x.get('state') != 'DELETED' if flask.request.args.get('deleted') == 'true': state_filter = lambda x: True items = [] if flask.request.args.get('serviceAccount'): sa = flask.request.args.get('serviceAccount') service_account = project.service_account(sa) if service_account: items = service_account.key_items() else: for sa in project.service_accounts.values(): items.extend(sa.key_items()) result['items'] = [i for i in items if state_filter(i)] return testbench_utils.filtered_response(flask.request, result)
def objects_copy(source_bucket, source_object, destination_bucket, destination_object): """Implement the 'Objects: copy' API, copy an object.""" object_path, blob = testbench_utils.lookup_object(source_bucket, source_object) blob.check_preconditions( flask.request, if_generation_match='ifSourceGenerationMatch', if_generation_not_match='ifSourceGenerationNotMatch', if_metageneration_match='ifSourceMetagenerationMatch', if_metageneration_not_match='ifSourceMetagenerationNotMatch') source_revision = blob.get_revision(flask.request, 'sourceGeneration') if source_revision is None: raise error_response.ErrorResponse( 'Revision not found %s' % object_path, status_code=404) destination_path, destination = testbench_utils.get_object( destination_bucket, destination_object, gcs_object.GcsObject(destination_bucket, destination_object)) base_url = flask.url_for('gcs_index', _external=True) current_version = destination.copy_from(base_url, flask.request, source_revision) testbench_utils.insert_object(destination_path, destination) return testbench_utils.filtered_response(flask.request, current_version.metadata)
def objects_get(bucket_name, object_name): """Implement the 'Objects: get' API. Read objects or their metadata.""" _, blob = testbench_utils.lookup_object(bucket_name, object_name) blob.check_preconditions(flask.request) revision = blob.get_revision(flask.request) media = flask.request.args.get('alt', None) if media is None or media == 'json': return testbench_utils.filtered_response(flask.request, revision.metadata) if media != 'media': raise error_response.ErrorResponse('Invalid alt=%s parameter' % media) revision.validate_encryption_for_read(flask.request) # Respect the Range: header, if present. range_header = flask.request.headers.get('range') response_payload = revision.media begin = 0 end = len(response_payload) if range_header is not None: print("\n\n\nrange_header = %s\n\n" % range_header) m = re.match('bytes=([0-9]+)-([0-9]+)', range_header) if m: print("\n\n\nmatch = %s\n\n" % m) begin = int(m.group(1)) end = int(m.group(2)) response_payload = response_payload[begin:end + 1] # Process custome headers to test error conditions. instructions = flask.request.headers.get('x-goog-testbench-instructions') if instructions == 'return-corrupted-data': response_payload = testbench_utils.corrupt_media(response_payload) response = flask.make_response(response_payload) length = len(response_payload) content_range = 'bytes %d-%d/%d' % (begin, end - 1, length) response.headers['Content-Range'] = content_range response.headers['x-goog-hash'] = revision.x_goog_hash_header() return response
def hmac_keys_update(project_id, access_id): """Implement the `HmacKeys: delete` API.""" project = get_project(project_id) payload = json.loads(flask.request.data) return testbench_utils.filtered_response( flask.request, project.update_hmac_key(access_id, payload))
def hmac_keys_get(project_id, access_id): """Implement the `HmacKeys: delete` API.""" project = get_project(project_id) return testbench_utils.filtered_response( flask.request, project.get_hmac_key(access_id))