def list_tables(self, req, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id params = req.params.copy() exclusive_start_table_name = params.pop( parser.Props.EXCLUSIVE_START_TABLE_NAME, None) if exclusive_start_table_name: validation.validate_table_name(exclusive_start_table_name) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) validation.validate_unexpected_props(params, "params") table_names = (storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit)) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": name} for name in table_names] return res
def create_backup(req, project_id, table_name): """Creates a backup for a table.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): body = req.json_body validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup( project_id, table_name, backup_name, strategy ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def list_tables(self, req, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id exclusive_start_table_name = req.params.get( parser.Props.EXCLUSIVE_START_TABLE_NAME) limit = req.params.get(parser.Props.LIMIT) table_names = ( storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit ) ) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": name} for name in table_names] return res
def create_restore_job(req, project_id, table_name): """Creates a restore job.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) body = req.json_body validation.validate_object(body, "body") backup_id = body.pop(parser.Props.BACKUP_ID, None) if backup_id: backup_id = uuid.UUID(backup_id) source = body.pop(parser.Props.SOURCE, None) validation.validate_unexpected_props(body, "body") restore_job = storage.create_restore_job( project_id, table_name, backup_id, source ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def list_tables(self, req, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id params = req.params.copy() exclusive_start_table_name = params.pop( parser.Props.EXCLUSIVE_START_TABLE_NAME, None) if exclusive_start_table_name: validation.validate_table_name(exclusive_start_table_name) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) validation.validate_unexpected_props(params, "params") table_names = ( storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit ) ) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": "{url}/{name}".format( url=req.path_url, name=name)} for name in table_names] return res
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_restore_job_id = params.pop( # parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = [] response = {} if restore_jobs and str(limit) == str(len(restore_jobs)): response[ parser.Props.LAST_EVALUATED_RESTORE_JOB_ID ] = restore_jobs[-1].id self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_backup_id = params.pop( # parser.Props.EXCLUSIVE_START_BACKUP_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = [] response = {} if backups and str(limit) == str(len(backups)): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = backups[-1].id self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def wrapper(self, *args, **kwargs): context = args[0].context project_id = kwargs.get('project_id') utils.check_project_id(context, project_id) policy.enforce(context, rule, {}) LOG.debug('RBAC: Authorization granted') return f(self, *args, **kwargs)
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( body.get(parser.Props.EXPECTED, {}))) # parse item item_attributes = parser.Parser.parse_item_attributes( body[parser.Props.ITEM]) # parse return_values param return_values = body.get(parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) # put item req.context.tenant = project_id result = storage.put_item( req.context, models.PutItemRequest(table_name, item_attributes), if_not_exist=False, expected_condition_map=expected_item_conditions) if not result: raise exception.InternalFailure() # format response response = {} if return_values != parser.Values.RETURN_VALUES_NONE: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(item_attributes)) return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) # parse expected item conditions expected_item_conditions = parser.Parser.parse_expected_attribute_conditions( body.get(parser.Props.EXPECTED, {}) ) # parse item item_attributes = parser.Parser.parse_item_attributes(body[parser.Props.ITEM]) # parse return_values param return_values = body.get(parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) # put item req.context.tenant = project_id result = storage.put_item( req.context, models.PutItemRequest(table_name, item_attributes), if_not_exist=False, expected_condition_map=expected_item_conditions, ) if not result: raise exception.InternalFailure() # format response response = {} if return_values != parser.Values.RETURN_VALUES_NONE: response[parser.Props.ATTRIBUTES] = parser.Parser.format_item_attributes(item_attributes) return response
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) params = req.params.copy() # exclusive_start_restore_job_id = params.pop( # parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = [] response = {} if restore_jobs and str(limit) == str(len(restore_jobs)): response[parser.Props. LAST_EVALUATED_RESTORE_JOB_ID] = restore_jobs[-1].id self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") expected = body.pop(parser.Props.EXPECTED, {}) validation.validate_object(expected, parser.Props.EXPECTED) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions(expected) ) item = body.pop(parser.Props.ITEM, None) validation.validate_object(item, parser.Props.ITEM) # parse item item_attributes = parser.Parser.parse_item_attributes(item) # parse return_values param return_values_json = body.pop( parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE ) validation.validate_string(return_values_json, parser.Props.RETURN_VALUES) return_values = InsertReturnValuesType(return_values_json) # parse return_values param time_to_live = body.pop( parser.Props.TIME_TO_LIVE, None ) if time_to_live is not None: time_to_live = validation.validate_integer( time_to_live, parser.Props.TIME_TO_LIVE, min_val=0 ) validation.validate_unexpected_props(body, "body") # put item result, old_item = storage.put_item( req.context, table_name, item_attributes, return_values=return_values, if_not_exist=False, expected_condition_map=expected_item_conditions, ) response = {} if old_item: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(old_item) ) return response
def scan(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) req.context.tenant = project_id #TODO ikhudoshyn: table_name may be index name attrs_to_get = body.get(parser.Props.ATTRIBUTES_TO_GET) select = body.get(parser.Props.SELECT) select_type = parser.Parser.parse_select_type(select, attrs_to_get) limit = body.get(parser.Props.LIMIT) exclusive_start_key = body.get(parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key = parser.Parser.parse_item_attributes( exclusive_start_key) if exclusive_start_key else None scan_filter = body.get(parser.Props.SCAN_FILTER, {}) condition_map = parser.Parser.parse_attribute_conditions( scan_filter ) segment = body.get(parser.Props.SEGMENT, 0) total_segments = body.get(parser.Props.TOTAL_SEGMENTS, 1) assert segment < total_segments result = storage.scan( req.context, table_name, condition_map, attributes_to_get=attrs_to_get, limit=limit, exclusive_start_key=exclusive_start_key) response = { parser.Props.COUNT: result.count, parser.Props.SCANNED_COUNT: result.scanned_count } if not select_type.is_count: response[parser.Props.ITEMS] = [ parser.Parser.format_item_attributes(row) for row in result.items] if result.last_evaluated_key: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key ) ) return response
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) backup = None href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) table_name = body.get(parser.Props.TABLE_NAME) # parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) # parse table key schema key_attrs = parser.Parser.parse_key_schema(body.get(parser.Props.KEY_SCHEMA, [])) # parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get(parser.Props.LOCAL_SECONDARY_INDEXES, []) ) # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_attr_names) # creating table req.context.tenant = project_id table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions(table_meta.schema.attribute_type_map) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema(table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ {parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF}, {parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK}, ], } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) return result
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.jsonschema.validate'): validation.validate_object(body, "body") # parse expected item conditions expected_item_conditions_json = body.pop(parser.Props.EXPECTED, None) if expected_item_conditions_json: validation.validate_object(expected_item_conditions_json, parser.Props.EXPECTED) expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( expected_item_conditions_json)) else: expected_item_conditions = None # parse key_attributes key_attributes_json = body.pop(parser.Props.KEY, None) validation.validate_object(key_attributes_json, parser.Props.KEY) key_attributes = parser.Parser.parse_item_attributes( key_attributes_json) # parse return_values param return_values_json = body.pop(parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) validation.validate_string(return_values_json, parser.Props.RETURN_VALUES) return_values = DeleteReturnValuesType(return_values_json) validation.validate_unexpected_props(body, "body") # delete item storage.delete_item(req.context, table_name, key_attributes, expected_condition_map=expected_item_conditions) # format response response = {} if return_values.type != parser.Values.RETURN_VALUES_NONE: # TODO(cwang): # It is needed to return all deleted item attributes # response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(key_attributes)) return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") # get attributes_to_get attributes_to_get = body.pop(parser.Props.ATTRIBUTES_TO_GET, None) if attributes_to_get: attributes_to_get = validation.validate_set( attributes_to_get, parser.Props.ATTRIBUTES_TO_GET ) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) select_type = models.SelectType.specific_attributes( attributes_to_get ) else: select_type = models.SelectType.all() key = body.pop(parser.Props.KEY, None) validation.validate_object(key, parser.Props.KEY) # parse consistent_read consistent_read = body.pop(parser.Props.CONSISTENT_READ, False) validation.validate_boolean(consistent_read, parser.Props.CONSISTENT_READ) validation.validate_unexpected_props(body, "body") # parse key_attributes key_attributes = parser.Parser.parse_item_attributes(key) # format conditions to get item indexed_condition_map = { name: [models.IndexedCondition.eq(value)] for name, value in key_attributes.iteritems() } # get item result = storage.select_item( req.context, table_name, indexed_condition_map, select_type=select_type, limit=2, consistent=consistent_read) # format response if result.count == 0: return {} response = { parser.Props.ITEM: parser.Parser.format_item_attributes( result.items[0]) } return response
def process_request(self, req, project_id, table_name, restore_job_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) restore_job = None href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def scan(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) req.context.tenant = project_id #TODO ikhudoshyn: table_name may be index name attrs_to_get = body.get(parser.Props.ATTRIBUTES_TO_GET) select = body.get(parser.Props.SELECT) select_type = parser.Parser.parse_select_type(select, attrs_to_get) limit = body.get(parser.Props.LIMIT) exclusive_start_key = body.get(parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key = parser.Parser.parse_item_attributes( exclusive_start_key) if exclusive_start_key else None scan_filter = body.get(parser.Props.SCAN_FILTER, {}) condition_map = parser.Parser.parse_attribute_conditions(scan_filter) segment = body.get(parser.Props.SEGMENT, 0) total_segments = body.get(parser.Props.TOTAL_SEGMENTS, 1) assert segment < total_segments result = storage.scan(req.context, table_name, condition_map, attributes_to_get=attrs_to_get, limit=limit, exclusive_start_key=exclusive_start_key) response = { parser.Props.COUNT: result.count, parser.Props.SCANNED_COUNT: result.scanned_count } if not select_type.is_count: response[parser.Props.ITEMS] = [ parser.Parser.format_item_attributes(row) for row in result.items ] if result.last_evaluated_key: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key)) return response
def describe_table(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) table_meta = storage.describe_table(req.context, table_name) url = req.path_url bookmark = req.path_url result = { parser.Props.TABLE: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") expected = body.pop(parser.Props.EXPECTED, {}) validation.validate_object(expected, parser.Props.EXPECTED) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions(expected)) item = body.pop(parser.Props.ITEM, None) validation.validate_object(item, parser.Props.ITEM) # parse item item_attributes = parser.Parser.parse_item_attributes(item) # parse return_values param return_values_json = body.pop(parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) validation.validate_string(return_values_json, parser.Props.RETURN_VALUES) return_values = InsertReturnValuesType(return_values_json) # parse return_values param time_to_live = body.pop(parser.Props.TIME_TO_LIVE, None) if time_to_live is not None: time_to_live = validation.validate_integer( time_to_live, parser.Props.TIME_TO_LIVE, min_val=0) validation.validate_unexpected_props(body, "body") # put item result, old_item = storage.put_item( req.context, table_name, item_attributes, return_values=return_values, if_not_exist=False, expected_condition_map=expected_item_conditions, ) response = {} if old_item: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(old_item)) return response
def process_request(self, req, project_id, table_name, restore_job_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id validation.validate_table_name(table_name) restore_job = storage.describe_restore_job( req.context, table_name, uuid.UUID(restore_job_id) ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def describe_table(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id table_meta = storage.describe_table(req.context, table_name) url = req.path_url bookmark = req.path_url result = { parser.Props.TABLE: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.describe_backup(req.context, table_name, uuid.UUID(backup_id)) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def describe_restore_job(req, project_id, table_name, restore_job_id): """Describes a restore job.""" utils.check_project_id(project_id) validation.validate_table_name(table_name) restore_job = storage.describe_restore_job( project_id, table_name, uuid.UUID(restore_job_id) ) href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def process_request(self, req, project_id, table_name, backup_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.delete_backup( req.context, table_name, uuid.UUID(backup_id) ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def delete_table(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id table_schema = storage.describe_table(req.context, table_name) storage.delete_table(req.context, table_name) url = req.path_url bookmark = req.path_url return { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_schema.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_schema.schema.key_attributes ) ), parser.Props.LOCAL_SECONDARY_INDEXES: ( parser.Parser.format_local_secondary_indexes( table_schema.schema.key_attributes[0], table_schema.schema.index_def_map ) ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Values.TABLE_STATUS_ACTIVE), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } }
def describe_backup(req, project_id, table_name, backup_id): """Describes a backup.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) backup = storage.describe_backup( project_id, table_name, uuid.UUID(backup_id) ) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) req.context.tenant = project_id # get attributes_to_get attributes_to_get = body.get(parser.Props.ATTRIBUTES_TO_GET) select_type = ( models.SelectType.all() if attributes_to_get is None else models.SelectType.specified_attributes(attributes_to_get) ) # parse key_attributes key_attributes = parser.Parser.parse_item_attributes( body[parser.Props.KEY] ) # parse consistent_read consistent_read = body.get( parser.Props.CONSISTENT_READ, False ) # format conditions to get item indexed_condition_map = { name: [models.IndexedCondition.eq(value)] for name, value in key_attributes.iteritems() } # get item result = storage.select_item( req.context, table_name, indexed_condition_map, select_type=select_type, limit=2, consistent=consistent_read) # format response if result.count == 0: return {} response = { parser.Props.ITEM: parser.Parser.format_item_attributes( result.items[0]) } return response
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # backup_id = body.pop(parser.Props.BACKUP_ID, None) # source = body.pop(parser.Props.SOURCE, None) validation.validate_unexpected_props(body, "body") restore_job = None href_prefix = req.path_url response = parser.Parser.format_restore_job(restore_job, href_prefix) return response
def process_request(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) # parse request_items request_items = parser.Parser.parse_request_items( body[parser.Props.REQUEST_ITEMS]) req.context.tenant = project_id request_list = collections.deque() for rq_item in request_items: request_list.append(rq_item) unprocessed_items = storage.execute_write_batch( req.context, request_list) return { 'unprocessed_items': parser.Parser.format_request_items( unprocessed_items)}
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + ".validation"): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup(req.context, table_name, backup_name, strategy) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def list_tables(self, req, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id exclusive_start_table_name = req.params.get( parser.Props.EXCLUSIVE_START_TABLE_NAME) limit = req.params.get(parser.Props.LIMIT) table_names = (storage.list_tables( req.context, exclusive_start_table_name=exclusive_start_table_name, limit=limit)) res = {} if table_names and str(limit) == str(len(table_names)): res[parser.Props.LAST_EVALUATED_TABLE_NAME] = table_names[-1] res["tables"] = [{"rel": "self", "href": name} for name in table_names] return res
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) validation.validate_object(body, "body") backup_name = body.pop(parser.Props.BACKUP_NAME, None) strategy = body.pop(parser.Props.STRATEGY, {}) validation.validate_unexpected_props(body, "body") backup = storage.create_backup(req.context, table_name, backup_name, strategy) href_prefix = req.path_url response = parser.Parser.format_backup(backup, href_prefix) return response
def list_backups(req, project_id, table_name): """List the backups.""" utils.check_project_id(project_id) with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_backup_id = params.pop( parser.Props.EXCLUSIVE_START_BACKUP_ID, None) if exclusive_start_backup_id: exclusive_start_backup_id = uuid.UUID( exclusive_start_backup_id ) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = storage.list_backups( project_id, table_name, exclusive_start_backup_id, limit) response = {} if backups and limit == len(backups): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = ( backups[-1].id.hex) self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def process_request(self, req, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_backup_id = params.pop( parser.Props.EXCLUSIVE_START_BACKUP_ID, None) if exclusive_start_backup_id: exclusive_start_backup_id = uuid.UUID( exclusive_start_backup_id) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) backups = storage.list_backups(req.context, table_name, exclusive_start_backup_id, limit) response = {} if backups and limit == len(backups): response[parser.Props.LAST_EVALUATED_BACKUP_ID] = ( backups[-1].id.hex) self_link_prefix = req.path_url response[parser.Props.BACKUPS] = [ parser.Parser.format_backup(backup, self_link_prefix) for backup in backups ] return response
def list_restore_jobs(req, project_id, table_name): """List restore jobs.""" utils.check_project_id(project_id) validation.validate_table_name(table_name) params = req.params.copy() exclusive_start_restore_job_id = params.pop( parser.Props.EXCLUSIVE_START_RESTORE_JOB_ID, None) if exclusive_start_restore_job_id: exclusive_start_restore_job_id = uuid.UUID( exclusive_start_restore_job_id ) limit = params.pop(parser.Props.LIMIT, None) if limit: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) restore_jobs = storage.list_restore_jobs( project_id, table_name, exclusive_start_restore_job_id, limit) response = {} if restore_jobs and limit == len(restore_jobs): response[ parser.Props.LAST_EVALUATED_RESTORE_JOB_ID ] = restore_jobs[-1].id.hex self_link_prefix = req.path_url response[parser.Props.RESTORE_JOBS] = [ parser.Parser.format_restore_job(restore_job, self_link_prefix) for restore_job in restore_jobs ] return response
def process_request(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") request_items_json = body.pop(parser.Props.REQUEST_ITEMS, None) validation.validate_object(request_items_json, parser.Props.REQUEST_ITEMS) validation.validate_unexpected_props(body, "body") # parse request_items request_list = parser.Parser.parse_batch_get_request_items( request_items_json) result, unprocessed = storage.execute_get_batch( req.context, request_list) responses = {} for tname, res in result: if not res.items: continue table_items = responses.get(tname, None) if table_items is None: table_items = [] responses[tname] = table_items item = parser.Parser.format_item_attributes(res.items[0]) table_items.append(item) return { 'responses': responses, 'unprocessed_keys': parser.Parser.format_batch_get_unprocessed(unprocessed, request_items_json) }
def process_request(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") request_items_json = body.pop(parser.Props.REQUEST_ITEMS, None) validation.validate_object(request_items_json, parser.Props.REQUEST_ITEMS) validation.validate_unexpected_props(body, "body") # parse request_items request_list = parser.Parser.parse_batch_get_request_items( request_items_json ) result, unprocessed = storage.execute_get_batch( req.context, request_list) responses = {} for tname, res in result: if not res.items: continue table_items = responses.get(tname, None) if table_items is None: table_items = [] responses[tname] = table_items item = parser.Parser.format_item_attributes(res.items[0]) table_items.append(item) return { 'responses': responses, 'unprocessed_keys': parser.Parser.format_batch_get_unprocessed( unprocessed, request_items_json) }
def process_request(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") request_items_json = body.pop(parser.Props.REQUEST_ITEMS, None) validation.validate_object(request_items_json, parser.Props.REQUEST_ITEMS) validation.validate_unexpected_props(body, "body") # parse request_items request_map = parser.Parser.parse_batch_write_request_items( request_items_json ) unprocessed_items = storage.execute_write_batch( req.context, request_map) return { 'unprocessed_items': parser.Parser.format_request_items( unprocessed_items)}
def process_request(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") request_items_json = body.pop(parser.Props.REQUEST_ITEMS, None) validation.validate_object(request_items_json, parser.Props.REQUEST_ITEMS) validation.validate_unexpected_props(body, "body") # parse request_items request_map = parser.Parser.parse_batch_write_request_items( request_items_json) unprocessed_items = storage.execute_write_batch( req.context, request_map) return { 'unprocessed_items': parser.Parser.format_request_items(unprocessed_items) }
def bulk_load_app(environ, start_response): context = environ['webob.adhoc_attrs']['context'] path = environ['PATH_INFO'] LOG.debug('Request received: %s', path) if not re.match("^/v1/\w+/data/tables/\w+/bulk_load$", path): start_response('404 Not found', [('Content-Type', 'text/plain')]) yield 'Incorrect url. Please check it and try again\n' notifier.notify(context, notifier.EVENT_TYPE_STREAMING_PATH_ERROR, path, priority=notifier.PRIORITY_ERROR) return url_comp = path.split('/') project_id = url_comp[2] table_name = url_comp[5] LOG.debug('Tenant: %s, table name: %s', project_id, table_name) utils.check_project_id(context, project_id) notifier.notify(context, notifier.EVENT_TYPE_STREAMING_DATA_START, path) read_count = 0 processed_count = 0 unprocessed_count = 0 failed_count = 0 put_count = 0 done_count = [0] last_read = None failed_items = {} dont_process = False future_ready_event = Event() future_ready_queue = Queue.Queue() stream = environ['wsgi.input'] while True: chunk = stream.readline() if not chunk: break read_count += 1 if dont_process: LOG.debug('Skipping item #%d', read_count) unprocessed_count += 1 continue last_read = chunk try: future = storage.put_item_async(context, table_name, make_put_item(chunk)) put_count += 1 future.add_done_callback( make_callback(future_ready_queue, future_ready_event, done_count, chunk)) # try to get result of finished futures try: while True: finished_future, chunk = future_ready_queue.get_nowait() finished_future.result() processed_count += 1 except Queue.Empty: pass except Exception as e: failed_items[chunk] = repr(e) dont_process = True LOG.debug('Error inserting item: %s, message: %s', chunk, repr(e)) notifier.notify(context, notifier.EVENT_TYPE_STREAMING_DATA_ERROR, { 'path': path, 'item': chunk, 'error': e.message }) LOG.debug('Request body has been read completely') # wait for all futures to be finished while done_count[0] < put_count: LOG.debug('Waiting for %d item(s) to be processed...', put_count - done_count[0]) future_ready_event.wait() future_ready_event.clear() LOG.debug('All items are processed. Getting results of item processing...') # get results of finished futures while done_count[0] > processed_count + failed_count: LOG.debug('Waiting for %d result(s)...', done_count[0] - processed_count - failed_count) chunk = None try: finished_future, chunk = future_ready_queue.get_nowait() finished_future.result() processed_count += 1 except Queue.Empty: break except Exception as e: failed_count += 1 failed_items[chunk] = repr(e) LOG.debug('Error inserting item: %s, message: %s', chunk, repr(e)) notifier.notify(context, notifier.EVENT_TYPE_STREAMING_DATA_ERROR, { 'path': path, 'item': chunk, 'error': e.message }) # Update count if error happened before put_item_async was invoked if dont_process: failed_count += 1 start_response('200 OK', [('Content-Type', 'application/json')]) resp = { 'read': read_count, 'processed': processed_count, 'unprocessed': unprocessed_count, 'failed': failed_count, 'last_item': last_read, 'failed_items': failed_items } notifier.notify(context, notifier.EVENT_TYPE_STREAMING_DATA_END, { 'path': path, 'response': resp }) yield json.dumps(resp)
def scan(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # get attributes_to_get attributes_to_get = body.pop(parser.Props.ATTRIBUTES_TO_GET, None) if attributes_to_get: validation.validate_list(attributes_to_get, parser.Props.ATTRIBUTES_TO_GET) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) select = body.pop(parser.Props.SELECT, None) if select is None: if attributes_to_get: select = models.SelectType.SELECT_TYPE_SPECIFIC else: select = models.SelectType.SELECT_TYPE_ALL else: validation.validate_string(select, parser.Props.SELECT) select_type = models.SelectType(select, attributes_to_get) limit = body.pop(parser.Props.LIMIT, None) if limit is not None: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) # parse exclusive_start_key_attributes exclusive_start_key_attributes_json = body.pop( parser.Props.EXCLUSIVE_START_KEY, None) if exclusive_start_key_attributes_json is not None: validation.validate_object(exclusive_start_key_attributes_json, parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key_attributes = ( parser.Parser.parse_item_attributes( exclusive_start_key_attributes_json ) ) else: exclusive_start_key_attributes = None scan_filter_json = body.pop(parser.Props.SCAN_FILTER, None) if scan_filter_json: validation.validate_object(scan_filter_json, parser.Props.SCAN_FILTER) condition_map = parser.Parser.parse_attribute_conditions( scan_filter_json, condition_class=ScanCondition ) else: condition_map = None total_segments = body.pop(parser.Props.TOTAL_SEGMENTS, 1) total_segments = validation.validate_integer( total_segments, parser.Props.TOTAL_SEGMENTS, min_val=1, max_val=4096 ) segment = body.pop(parser.Props.SEGMENT, 0) segment = validation.validate_integer( segment, parser.Props.SEGMENT, min_val=0, max_val=total_segments ) validation.validate_unexpected_props(body, "body") result = storage.scan( req.context, table_name, condition_map, attributes_to_get=attributes_to_get, limit=limit, exclusive_start_key=exclusive_start_key_attributes) response = { parser.Props.COUNT: result.count, parser.Props.SCANNED_COUNT: result.scanned_count } if not select_type.is_count: response[parser.Props.ITEMS] = [ parser.Parser.format_item_attributes(row) for row in result.items] if result.last_evaluated_key: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key ) ) return response
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) table_name = body.get(parser.Props.TABLE_NAME) #parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {})) #parse table key schema key_attrs = parser.Parser.parse_key_schema( body.get(parser.Props.KEY_SCHEMA, [])) #parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get(parser.Props.LOCAL_SECONDARY_INDEXES, [])) #prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_attr_names) # creating table req.context.tenant = project_id table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def process_request(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # parse expected item conditions expected_item_conditions_json = body.pop(parser.Props.EXPECTED, None) if expected_item_conditions_json is not None: validation.validate_object(expected_item_conditions_json, parser.Props.EXPECTED) expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( expected_item_conditions_json)) else: expected_item_conditions = None attribute_updates_json = body.pop(parser.Props.ATTRIBUTE_UPDATES, None) validation.validate_object(attribute_updates_json, parser.Props.ATTRIBUTE_UPDATES) # parse attribute updates attribute_updates = parser.Parser.parse_attribute_updates( attribute_updates_json) # parse key_attributes key_attributes_json = body.pop(parser.Props.KEY, None) validation.validate_object(key_attributes_json, parser.Props.KEY) key_attribute_map = parser.Parser.parse_item_attributes( key_attributes_json) # parse return_values param return_values_json = body.pop(parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) validation.validate_string(return_values_json, parser.Props.RETURN_VALUES) return_values = UpdateReturnValuesType(return_values_json) validation.validate_unexpected_props(body, "body") result, old_item = storage.update_item( req.context, table_name, key_attribute_map=key_attribute_map, attribute_action_map=attribute_updates, expected_condition_map=expected_item_conditions) if not result: raise exception.BackendInteractionException() # format response response = {} if return_values.type != parser.Values.RETURN_VALUES_NONE: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(old_item)) return response
def query(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) req.context.tenant = project_id # parse select_type attributes_to_get = body.get(parser.Props.ATTRIBUTES_TO_GET) if attributes_to_get is not None: attributes_to_get = frozenset(attributes_to_get) select = body.get(parser.Props.SELECT) index_name = body.get(parser.Props.INDEX_NAME) select_type = parser.Parser.parse_select_type(select, attributes_to_get, index_name) # parse exclusive_start_key_attributes exclusive_start_key_attributes = body.get( parser.Props.EXCLUSIVE_START_KEY) if exclusive_start_key_attributes is not None: exclusive_start_key_attributes = ( parser.Parser.parse_item_attributes( exclusive_start_key_attributes)) # parse indexed_condition_map indexed_condition_map = parser.Parser.parse_attribute_conditions( body.get(parser.Props.KEY_CONDITIONS)) # TODO(dukhlov): # it would be nice to validate given table_name, key_attributes and # attributes_to_get to schema expectation consistent_read = body.get(parser.Props.CONSISTENT_READ, False) limit = body.get(parser.Props.LIMIT) order_asc = body.get(parser.Props.SCAN_INDEX_FORWARD) if order_asc is None: order_type = None elif order_asc: order_type = models.ORDER_TYPE_ASC else: order_type = models.ORDER_TYPE_DESC # select item result = storage.select_item( req.context, table_name, indexed_condition_map, select_type=select_type, index_name=index_name, limit=limit, consistent=consistent_read, order_type=order_type, exclusive_start_key=exclusive_start_key_attributes) # format response if select_type.type == models.SelectType.SELECT_TYPE_COUNT: response = {parser.Props.COUNT: result.count} else: response = { parser.Props.COUNT: result.count, parser.Props.ITEMS: [ parser.Parser.format_item_attributes(row) for row in result.items ] } if limit == result.count: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key)) return response
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None ) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS ) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json ) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop( parser.Props.LOCAL_SECONDARY_INDEXES, None ) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES ) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json ) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table( req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def query(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # get attributes_to_get attributes_to_get = body.pop(parser.Props.ATTRIBUTES_TO_GET, None) if attributes_to_get: validation.validate_list(attributes_to_get, parser.Props.ATTRIBUTES_TO_GET) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) index_name = body.pop(parser.Props.INDEX_NAME, None) if index_name is not None: validation.validate_index_name(index_name) select = body.pop(parser.Props.SELECT, None) if select is None: if attributes_to_get: select = models.SelectType.SELECT_TYPE_SPECIFIC else: if index_name is not None: select = models.SelectType.SELECT_TYPE_ALL_PROJECTED else: select = models.SelectType.SELECT_TYPE_ALL else: validation.validate_string(select, parser.Props.SELECT) select_type = models.SelectType(select, attributes_to_get) # parse exclusive_start_key_attributes exclusive_start_key_attributes_json = body.pop( parser.Props.EXCLUSIVE_START_KEY, None) if exclusive_start_key_attributes_json is not None: validation.validate_object(exclusive_start_key_attributes_json, parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key_attributes = ( parser.Parser.parse_item_attributes( exclusive_start_key_attributes_json)) else: exclusive_start_key_attributes = None # parse indexed_condition_map key_conditions = body.pop(parser.Props.KEY_CONDITIONS, None) validation.validate_object(key_conditions, parser.Props.KEY_CONDITIONS) indexed_condition_map = parser.Parser.parse_attribute_conditions( key_conditions, condition_class=IndexedCondition) # TODO(dukhlov): # it would be nice to validate given table_name, key_attributes and # attributes_to_get to schema expectation consistent_read = body.pop(parser.Props.CONSISTENT_READ, False) validation.validate_boolean(consistent_read, parser.Props.CONSISTENT_READ) limit = body.pop(parser.Props.LIMIT, None) if limit is not None: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) scan_forward = body.pop(parser.Props.SCAN_INDEX_FORWARD, None) if scan_forward is not None: validation.validate_boolean(scan_forward, parser.Props.SCAN_INDEX_FORWARD) order_type = (models.ORDER_TYPE_ASC if scan_forward else models.ORDER_TYPE_DESC) else: order_type = None validation.validate_unexpected_props(body, "body") # select item result = storage.select_item( req.context, table_name, indexed_condition_map, select_type=select_type, index_name=index_name, limit=limit, consistent=consistent_read, order_type=order_type, exclusive_start_key=exclusive_start_key_attributes) # format response if select_type.type == models.SelectType.SELECT_TYPE_COUNT: response = {parser.Props.COUNT: result.count} else: response = { parser.Props.COUNT: result.count, parser.Props.ITEMS: [ parser.Parser.format_item_attributes(row) for row in result.items ] } if limit == result.count: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key)) return response
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def query(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # get attributes_to_get attributes_to_get = body.pop(parser.Props.ATTRIBUTES_TO_GET, None) if attributes_to_get: validation.validate_list(attributes_to_get, parser.Props.ATTRIBUTES_TO_GET) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) index_name = body.pop(parser.Props.INDEX_NAME, None) if index_name is not None: validation.validate_index_name(index_name) select = body.pop(parser.Props.SELECT, None) if select is None: if attributes_to_get: select = models.SelectType.SELECT_TYPE_SPECIFIC else: if index_name is not None: select = models.SelectType.SELECT_TYPE_ALL_PROJECTED else: select = models.SelectType.SELECT_TYPE_ALL else: validation.validate_string(select, parser.Props.SELECT) select_type = models.SelectType(select, attributes_to_get) # parse exclusive_start_key_attributes exclusive_start_key_attributes_json = body.pop( parser.Props.EXCLUSIVE_START_KEY, None) if exclusive_start_key_attributes_json is not None: validation.validate_object(exclusive_start_key_attributes_json, parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key_attributes = ( parser.Parser.parse_item_attributes( exclusive_start_key_attributes_json ) ) else: exclusive_start_key_attributes = None # parse indexed_condition_map key_conditions = body.pop(parser.Props.KEY_CONDITIONS, None) validation.validate_object(key_conditions, parser.Props.KEY_CONDITIONS) indexed_condition_map = parser.Parser.parse_attribute_conditions( key_conditions, condition_class=IndexedCondition ) # TODO(dukhlov): # it would be nice to validate given table_name, key_attributes and # attributes_to_get to schema expectation consistent_read = body.pop(parser.Props.CONSISTENT_READ, False) validation.validate_boolean(consistent_read, parser.Props.CONSISTENT_READ) limit = body.pop(parser.Props.LIMIT, None) if limit is not None: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) scan_forward = body.pop(parser.Props.SCAN_INDEX_FORWARD, None) if scan_forward is not None: validation.validate_boolean(scan_forward, parser.Props.SCAN_INDEX_FORWARD) order_type = ( models.ORDER_TYPE_ASC if scan_forward else models.ORDER_TYPE_DESC ) else: order_type = None validation.validate_unexpected_props(body, "body") # select item result = storage.select_item( req.context, table_name, indexed_condition_map, select_type=select_type, index_name=index_name, limit=limit, consistent=consistent_read, order_type=order_type, exclusive_start_key=exclusive_start_key_attributes ) # format response if select_type.type == models.SelectType.SELECT_TYPE_COUNT: response = { parser.Props.COUNT: result.count } else: response = { parser.Props.COUNT: result.count, parser.Props.ITEMS: [ parser.Parser.format_item_attributes(row) for row in result.items ] } if limit == result.count: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key) ) return response
def scan(self, req, body, project_id, table_name): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validation'): validation.validate_object(body, "body") # get attributes_to_get attributes_to_get = body.pop(parser.Props.ATTRIBUTES_TO_GET, None) if attributes_to_get: validation.validate_list(attributes_to_get, parser.Props.ATTRIBUTES_TO_GET) for attr_name in attributes_to_get: validation.validate_attr_name(attr_name) select = body.pop(parser.Props.SELECT, None) if select is None: if attributes_to_get: select = models.SelectType.SELECT_TYPE_SPECIFIC else: select = models.SelectType.SELECT_TYPE_ALL else: validation.validate_string(select, parser.Props.SELECT) select_type = models.SelectType(select, attributes_to_get) limit = body.pop(parser.Props.LIMIT, None) if limit is not None: limit = validation.validate_integer(limit, parser.Props.LIMIT, min_val=0) # parse exclusive_start_key_attributes exclusive_start_key_attributes_json = body.pop( parser.Props.EXCLUSIVE_START_KEY, None) if exclusive_start_key_attributes_json is not None: validation.validate_object(exclusive_start_key_attributes_json, parser.Props.EXCLUSIVE_START_KEY) exclusive_start_key_attributes = ( parser.Parser.parse_item_attributes( exclusive_start_key_attributes_json)) else: exclusive_start_key_attributes = None scan_filter_json = body.pop(parser.Props.SCAN_FILTER, None) if scan_filter_json: validation.validate_object(scan_filter_json, parser.Props.SCAN_FILTER) condition_map = parser.Parser.parse_attribute_conditions( scan_filter_json, condition_class=ScanCondition) else: condition_map = None total_segments = body.pop(parser.Props.TOTAL_SEGMENTS, 1) total_segments = validation.validate_integer( total_segments, parser.Props.TOTAL_SEGMENTS, min_val=1, max_val=4096) segment = body.pop(parser.Props.SEGMENT, 0) segment = validation.validate_integer(segment, parser.Props.SEGMENT, min_val=0, max_val=total_segments) validation.validate_unexpected_props(body, "body") result = storage.scan( req.context, table_name, condition_map, attributes_to_get=attributes_to_get, limit=limit, exclusive_start_key=exclusive_start_key_attributes) response = { parser.Props.COUNT: result.count, parser.Props.SCANNED_COUNT: result.scanned_count } if not select_type.is_count: response[parser.Props.ITEMS] = [ parser.Parser.format_item_attributes(row) for row in result.items ] if result.last_evaluated_key: response[parser.Props.LAST_EVALUATED_KEY] = ( parser.Parser.format_item_attributes( result.last_evaluated_key)) return response