def test_create_table(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg()) self.storage_mocker.ReplayAll() Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.storage_mocker.VerifyAll()
def test_create_table_no_range(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( models.TableSchema( { 'hash': models.ATTRIBUTE_TYPE_NUMBER, 'indexed_field': models.ATTRIBUTE_TYPE_STRING }, ['hash'], {"index_name": models.IndexDefinition('indexed_field')}), models.TableMeta.TABLE_STATUS_ACTIVE)) self.storage_mocker.ReplayAll() Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) self.storage_mocker.VerifyAll()
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg()) storage.create_table( IgnoreArg(), IgnoreArg() ).AndRaise(TableAlreadyExistsException) self.storage_mocker.ReplayAll() Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) try: Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.fail() except JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def test_create_table(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash': models.AttributeType('N'), 'range': models.AttributeType('S'), 'indexed_field': models.AttributeType('S') }, ['hash', 'range'], { "index_name": models.IndexDefinition('hash', 'indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.ReplayAll() ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.storage_mocker.VerifyAll()
def test_create_table(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash': models.AttributeType('N'), 'range': models.AttributeType('S'), 'indexed_field': models.AttributeType('S') }, ['hash', 'range'], { "index_name": models.IndexDefinition('hash', 'indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.ReplayAll() Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.storage_mocker.VerifyAll()
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( self.action_params.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) # parse table key schema key_attrs = parser.Parser.parse_key_schema(self.action_params.get(parser.Props.KEY_SCHEMA, [])) # parse table indexed field list indexed_def_map = parser.Parser.parse_local_secondary_indexes( self.action_params.get(parser.Props.LOCAL_SECONDARY_INDEXES, []) ) # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_def_map) except Exception: raise AWSValidationException() try: # creating table table_meta = storage.create_table(self.context, table_name, table_schema) result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions(table_meta.schema.attribute_type_map) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema(table_meta.schema.key_attributes)), parser.Props.PROVISIONED_THROUGHPUT: (parser.Values.PROVISIONED_THROUGHPUT_DUMMY), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) return result except TableAlreadyExistsException: raise AWSDuplicateTableError(table_name) except AWSErrorResponseException as e: raise e except Exception: raise AWSErrorResponseException()
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) table_name = body.get(parser.Props.TABLE_NAME) # parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) # parse table key schema key_attrs = parser.Parser.parse_key_schema(body.get(parser.Props.KEY_SCHEMA, [])) # parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get(parser.Props.LOCAL_SECONDARY_INDEXES, []) ) # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_attr_names) # creating table req.context.tenant = project_id table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions(table_meta.schema.attribute_type_map) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema(table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ {parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF}, {parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK}, ], } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) return result
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) req.context.tenant = project_id with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None ) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS ) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json ) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop( parser.Props.LOCAL_SECONDARY_INDEXES, None ) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES ) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json ) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table( req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) table_name = body.get(parser.Props.TABLE_NAME) #parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {})) #parse table key schema key_attrs = parser.Parser.parse_key_schema( body.get(parser.Props.KEY_SCHEMA, [])) #parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get(parser.Props.LOCAL_SECONDARY_INDEXES, [])) #prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_attr_names) # creating table req.context.tenant = project_id table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) #parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( self.action_params.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) #parse table key schema key_attrs = parser.Parser.parse_key_schema( self.action_params.get(parser.Props.KEY_SCHEMA, []) ) #parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( self.action_params.get( parser.Props.LOCAL_SECONDARY_INDEXES, []) ) #prepare table_schema structure table_schema = models.TableSchema( table_name, attribute_definitions, key_attrs, indexed_attr_names) except Exception: raise exception.ValidationException() try: # creating table storage.create_table(self.context, table_schema) return { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( attribute_definitions ) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( key_attrs ) ), parser.Props.LOCAL_SECONDARY_INDEXES: ( parser.Parser.format_local_secondary_indexes( key_attrs[0], indexed_attr_names ) ), parser.Props.PROVISIONED_THROUGHPUT: ( parser.Values.PROVISIONED_THROUGHPUT_DUMMY ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Values.TABLE_STATUS_ACTIVE ), parser.Props.TABLE_SIZE_BYTES: 0 } } except exception.TableAlreadyExistsException: raise exception.ResourceInUseException() except exception.AWSErrorResponseException as e: raise e except Exception: raise exception.AWSErrorResponseException()
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( self.action_params.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) # parse table key schema key_attrs = parser.Parser.parse_key_schema( self.action_params.get(parser.Props.KEY_SCHEMA, []) ) # parse table indexed field list indexed_def_map = parser.Parser.parse_local_secondary_indexes( self.action_params.get( parser.Props.LOCAL_SECONDARY_INDEXES, []) ) # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, indexed_def_map ) except Exception: raise AWSValidationException() try: # creating table table_meta = storage.create_table( self.context, table_name, table_schema ) result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.PROVISIONED_THROUGHPUT: ( parser.Values.PROVISIONED_THROUGHPUT_DUMMY ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0 } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result except TableAlreadyExistsException: raise AWSDuplicateTableError(table_name) except AWSErrorResponseException as e: raise e except Exception: raise AWSErrorResponseException()
def create_table(self, req, body, project_id): validation.validate_params(self.schema, body) try: table_name = body.get(parser.Props.TABLE_NAME, None) #parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) #parse table key schema key_attrs = parser.Parser.parse_key_schema( body.get(parser.Props.KEY_SCHEMA, []) ) #parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get( parser.Props.LOCAL_SECONDARY_INDEXES, []) ) #prepare table_schema structure table_schema = models.TableSchema( table_name, attribute_definitions, key_attrs, indexed_attr_names) except Exception: raise exception.ValidationException() try: # creating table req.context.tenant = project_id storage.create_table(req.context, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name return { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( attribute_definitions ) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( key_attrs ) ), parser.Props.LOCAL_SECONDARY_INDEXES: ( parser.Parser.format_local_secondary_indexes( key_attrs[0], indexed_attr_names ) ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Values.TABLE_STATUS_ACTIVE ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } except exception.TableAlreadyExistsException: raise exception.ResourceInUseException() except exception.AWSErrorResponseException as e: raise e except Exception: raise exception.AWSErrorResponseException()
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash': models.AttributeType('N'), 'range': models.AttributeType('S'), 'indexed_field': models.AttributeType('S') }, ['hash', 'range'], { "index_name": models.IndexDefinition('hash', 'indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndRaise(exception.TableAlreadyExistsException) self.storage_mocker.ReplayAll() ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) try: ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.fail() except boto_exc.JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.assertEqual('Table already exists: test', e.body['message']) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( models.TableSchema( { 'hash': models.ATTRIBUTE_TYPE_NUMBER, 'range': models.ATTRIBUTE_TYPE_STRING, 'indexed_field': models.ATTRIBUTE_TYPE_STRING }, ['hash', 'range'], { "index_name": models.IndexDefinition('indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE ) ) storage.create_table( IgnoreArg(), IgnoreArg(), IgnoreArg() ).AndRaise(TableAlreadyExistsException) self.storage_mocker.ReplayAll() Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) try: Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.fail() except JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( models.TableSchema( { 'hash': models.ATTRIBUTE_TYPE_NUMBER, 'range': models.ATTRIBUTE_TYPE_STRING, 'indexed_field': models.ATTRIBUTE_TYPE_STRING }, ['hash', 'range'], {"index_name": models.IndexDefinition('indexed_field')}), models.TableMeta.TABLE_STATUS_ACTIVE)) storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndRaise(TableAlreadyExistsException) self.storage_mocker.ReplayAll() Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) try: Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey( 'indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) self.fail() except JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None ) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS ) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json ) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop( parser.Props.LOCAL_SECONDARY_INDEXES, None ) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES ) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json ) else: index_def_map = {} # validate the uniqueness of table and its indices' key schema range_keys = [] if len(key_attrs) > 1: range_keys.append(key_attrs[1]) else: # table has hash type primary key if len(index_def_map) > 0: raise exception.ValidationError( _("Table without range key in primary key schema " "can not have indices")) for index in index_def_map.values(): range_keys.append(index.alt_range_key_attr) try: validation.validate_set(range_keys, "key_schema") except exception.ValidationError: raise exception.ValidationError( _("Table and its indices must have unique key schema")) validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table( req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [ { parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK } ] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} # validate the uniqueness of table and its indices' key schema range_keys = [] if len(key_attrs) > 1: range_keys.append(key_attrs[1]) else: # table has hash type primary key if len(index_def_map) > 0: raise exception.ValidationError( _("Table without range key in primary key schema " "can not have indices")) for index in index_def_map.values(): range_keys.append(index.alt_range_key_attr) try: validation.validate_set(range_keys, "key_schema") except exception.ValidationError: raise exception.ValidationError( _("Table and its indices must have unique key schema")) validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result