def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_backup_id = params.pop( # parser.Props.EXCLUSIVE_START_BACKUP_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = [] response = {} if backups and str(limit) == str(len(backups)): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = backups[-1].id self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_restore_job_id = params.pop( # parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = [] response = {} if restore_jobs and str(limit) == str(len(restore_jobs)): response[parser.Props. LAST_EVALUATED_RESTORE_JOB_ID] = restore_jobs[-1].id self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def parse_batch_get_request_items(cls, request_items_json): request_list = [] for table_name, request_body in request_items_json.iteritems(): validation.validate_table_name(table_name) validation.validate_object(request_body, table_name) consistent = request_body.pop(Props.CONSISTENT_READ, False) validation.validate_boolean(consistent, Props.CONSISTENT_READ) attributes_to_get = request_body.pop(Props.ATTRIBUTES_TO_GET, None) if attributes_to_get is not None: attributes_to_get = validation.validate_set( attributes_to_get, Props.ATTRIBUTES_TO_GET) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) keys = request_body.pop(Props.KEYS, None) validation.validate_list(keys, Props.KEYS) validation.validate_unexpected_props(request_body, table_name) for key in keys: key_attribute_map = cls.parse_item_attributes(key) request_list.append( models.GetItemRequest(table_name, key_attribute_map, attributes_to_get, consistent=consistent)) return request_list
def list_tables(self, req, project_id): LOG.debug(req.path_url) req.context.tenant = project_id params = req.params.copy() exclusive_start_table_name = params.pop( parser.Props.EXCLUSIVE_START_TABLE_NAME, None) if exclusive_start_table_name: validation.validate_table_name(exclusive_start_table_name) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) validation.validate_unexpected_props(params, "params") table_names = ( storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit ) ) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": "{url}/{name}".format( url=req.path_url, name=name)} for name in table_names] return res
def parse_batch_write_request_items(cls, request_items_json): request_map = {} for table_name, request_list_json in request_items_json.iteritems(): validation.validate_table_name(table_name) validation.validate_list_of_objects(request_list_json, table_name) request_list_for_table = [] for request_json in request_list_json: for request_type, request_body in request_json.iteritems(): validation.validate_string(request_type, "request_type") if request_type == Props.REQUEST_PUT: validation.validate_object(request_body, request_type) item = request_body.pop(Props.ITEM, None) validation.validate_object(item, Props.ITEM) validation.validate_unexpected_props( request_body, request_type) request_list_for_table.append( models.WriteItemRequest.put( cls.parse_item_attributes(item))) elif request_type == Props.REQUEST_DELETE: validation.validate_object(request_body, request_type) key = request_body.pop(Props.KEY, None) validation.validate_object(key, Props.KEY) validation.validate_unexpected_props( request_body, request_type) request_list_for_table.append( models.WriteItemRequest.delete( cls.parse_item_attributes(key))) else: raise exception.ValidationError( _("Unsupported request type found: " "%(request_type)s"), request_type=request_type) request_map[table_name] = request_list_for_table return request_map
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_restore_job_id = params.pop( # parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = [] response = {} if restore_jobs and str(limit) == str(len(restore_jobs)): response[ parser.Props.LAST_EVALUATED_RESTORE_JOB_ID ] = restore_jobs[-1].id self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def list_tables(self, req, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id params = req.params.copy() exclusive_start_table_name = params.pop( parser.Props.EXCLUSIVE_START_TABLE_NAME, None) if exclusive_start_table_name: validation.validate_table_name(exclusive_start_table_name) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) validation.validate_unexpected_props(params, "params") table_names = (storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit)) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": name} for name in table_names] return res
def list_tables(req, project_id): """Returns an array of table describing info associated with the current user in given tenant. """ params = req.params.copy() exclusive_start_table_name = params.pop( parser.Props.EXCLUSIVE_START_TABLE_NAME, None) if exclusive_start_table_name: validation.validate_table_name(exclusive_start_table_name) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) validation.validate_unexpected_props(params, "params") table_names = ( storage.list_tables( project_id, exclusive_start_table_name=exclusive_start_table_name, limit=limit ) ) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": "{url}/{name}".format( url=req.path_url, name=name)} for name in table_names] return res
def create_backup(req, project_id, table_name): """Creates a backup for a table.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): body = req.json_body validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup( project_id, table_name, backup_name, strategy ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def create_restore_job(req, project_id, table_name): """Creates a restore job.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) body = req.json_body validation.validate_object(body, "body") backup_id = body.pop(parser.Props.BACKUP_ID, None) if backup_id: backup_id = uuid.UUID(backup_id) source = body.pop(parser.Props.SOURCE, None) validation.validate_unexpected_props(body, "body") restore_job = storage.create_restore_job( project_id, table_name, backup_id, source ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) backup = None href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def describe_table(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) table_meta = storage.describe_table(req.context, table_name) url = req.path_url bookmark = req.path_url result = { parser.Props.TABLE: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def process_request(self, req, project_id, table_name, restore_job_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) restore_job = None href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def describe_table(self, req, project_id, table_name): validation.validate_table_name(table_name) table_meta = storage.describe_table(req.context, table_name) url = req.path_url bookmark = req.path_url result = { parser.Props.TABLE: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def process_request(self, req, project_id, table_name, restore_job_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) restore_job = storage.describe_restore_job( req.context, table_name, uuid.UUID(restore_job_id) ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def delete_table(req, project_id, table_name): """The DeleteTable operation deletes a table and all of its items.""" validation.validate_table_name(table_name) table_meta = storage.delete_table(project_id, table_name) url = req.path_url bookmark = req.path_url return { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.LOCAL_SECONDARY_INDEXES: ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } }
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.describe_backup(req.context, table_name, uuid.UUID(backup_id)) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.delete_backup( req.context, table_name, uuid.UUID(backup_id) ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def describe_restore_job(req, project_id, table_name, restore_job_id): """Describes a restore job.""" utils.check_project_id(project_id) validation.validate_table_name(table_name) restore_job = storage.describe_restore_job( project_id, table_name, uuid.UUID(restore_job_id) ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def describe_backup(req, project_id, table_name, backup_id): """Describes a backup.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.describe_backup( project_id, table_name, uuid.UUID(backup_id) ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + ".validation"): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup(req.context, table_name, backup_name, strategy) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def delete_table(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) table_schema = storage.delete_table(req.context, table_name) url = req.path_url bookmark = req.path_url return { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_schema.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_schema.schema.key_attributes)), parser.Props.LOCAL_SECONDARY_INDEXES: (parser.Parser.format_local_secondary_indexes( table_schema.schema.key_attributes[0], table_schema.schema.index_def_map)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_schema.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } }
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup(req.context, table_name, backup_name, strategy) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def list_backups(req, project_id, table_name): """List the backups.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_backup_id = params.pop( parser.Props.EXCLUSIVE_START_BACKUP_ID, None) if exclusive_start_backup_id: exclusive_start_backup_id = uuid.UUID( exclusive_start_backup_id ) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = storage.list_backups( project_id, table_name, exclusive_start_backup_id, limit) response = {} if backups and limit == len(backups): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = ( backups[-1].id.hex) self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def parse_batch_write_request_items(cls, request_items_json): request_map = {} for table_name, request_list_json in request_items_json.iteritems(): validation.validate_table_name(table_name) validation.validate_list_of_objects(request_list_json, table_name) request_list_for_table = [] for request_json in request_list_json: for request_type, request_body in request_json.iteritems(): validation.validate_string(request_type, "request_type") if request_type == Props.REQUEST_PUT: validation.validate_object(request_body, request_type) item = request_body.pop(Props.ITEM, None) validation.validate_object(item, Props.ITEM) validation.validate_unexpected_props(request_body, request_type) request_list_for_table.append( models.WriteItemRequest.put( cls.parse_item_attributes(item) ) ) elif request_type == Props.REQUEST_DELETE: validation.validate_object(request_body, request_type) key = request_body.pop(Props.KEY, None) validation.validate_object(key, Props.KEY) validation.validate_unexpected_props(request_body, request_type) request_list_for_table.append( models.WriteItemRequest.delete( cls.parse_item_attributes(key) ) ) else: raise exception.ValidationError( _("Unsupported request type found: " "%(request_type)s"), request_type=request_type ) request_map[table_name] = request_list_for_table return request_map
def list_restore_jobs(req, project_id, table_name): """List restore jobs.""" utils.check_project_id(project_id) validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_restore_job_id = params.pop( parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) if exclusive_start_restore_job_id: exclusive_start_restore_job_id = uuid.UUID( exclusive_start_restore_job_id ) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = storage.list_restore_jobs( project_id, table_name, exclusive_start_restore_job_id, limit) response = {} if restore_jobs and limit == len(restore_jobs): response[ parser.Props.LAST_EVALUATED_RESTORE_JOB_ID ] = restore_jobs[-1].id.hex self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_backup_id = params.pop( parser.Props.EXCLUSIVE_START_BACKUP_ID, None) if exclusive_start_backup_id: exclusive_start_backup_id = uuid.UUID( exclusive_start_backup_id) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = storage.list_backups(req.context, table_name, exclusive_start_backup_id, limit) response = {} if backups and limit == len(backups): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = ( backups[-1].id.hex) self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_id = body.pop(parser.Props.BACKUP_ID, None) if backup_id: backup_id = uuid.UUID(backup_id) source = body.pop(parser.Props.SOURCE, None) validation.validate_unexpected_props(body, "body") restore_job = storage.create_restore_job(req.context, table_name, backup_id, source) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def parse_batch_get_request_items(cls, request_items_json): request_list = [] for table_name, request_body in request_items_json.iteritems(): validation.validate_table_name(table_name) validation.validate_object(request_body, table_name) consistent = request_body.pop(Props.CONSISTENT_READ, False) validation.validate_boolean(consistent, Props.CONSISTENT_READ) attributes_to_get = request_body.pop( Props.ATTRIBUTES_TO_GET, None ) if attributes_to_get is not None: attributes_to_get = validation.validate_set( attributes_to_get, Props.ATTRIBUTES_TO_GET ) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) keys = request_body.pop(Props.KEYS, None) validation.validate_list(keys, Props.KEYS) validation.validate_unexpected_props(request_body, table_name) for key in keys: key_attribute_map = cls.parse_item_attributes(key) request_list.append( models.GetItemRequest( table_name, key_attribute_map, attributes_to_get, consistent=consistent ) ) return request_list
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_id = body.pop(parser.Props.BACKUP_ID, None) if backup_id: backup_id = uuid.UUID(backup_id) source = body.pop(parser.Props.SOURCE, None) validation.validate_unexpected_props(body, "body") restore_job = storage.create_restore_job( req.context, table_name, backup_id, source ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} # validate the uniqueness of table and its indices' key schema range_keys = [] if len(key_attrs) > 1: range_keys.append(key_attrs[1]) else: # table has hash type primary key if len(index_def_map) > 0: raise exception.ValidationError( _("Table without range key in primary key schema " "can not have indices")) for index in index_def_map.values(): range_keys.append(index.alt_range_key_attr) try: validation.validate_set(range_keys, "key_schema") except exception.ValidationError: raise exception.ValidationError( _("Table and its indices must have unique key schema")) validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None ) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS ) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json ) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop( parser.Props.LOCAL_SECONDARY_INDEXES, None ) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES ) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json ) else: index_def_map = {} # validate the uniqueness of table and its indices' key schema range_keys = [] if len(key_attrs) > 1: range_keys.append(key_attrs[1]) else: # table has hash type primary key if len(index_def_map) > 0: raise exception.ValidationError( _("Table without range key in primary key schema " "can not have indices")) for index in index_def_map.values(): range_keys.append(index.alt_range_key_attr) try: validation.validate_set(range_keys, "key_schema") except exception.ValidationError: raise exception.ValidationError( _("Table and its indices must have unique key schema")) validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table( req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None ) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS ) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json ) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop( parser.Props.LOCAL_SECONDARY_INDEXES, None ) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES ) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json ) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table( req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result