def test_describe_table(self): self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.describe_table(IgnoreArg(), 'test_table').AndReturn( models.TableMeta( models.TableSchema( { 'city1': models.ATTRIBUTE_TYPE_STRING, 'id': models.ATTRIBUTE_TYPE_STRING, 'name': models.ATTRIBUTE_TYPE_STRING }, ['id', 'name'], {'index_name': models.IndexDefinition('city1')}), models.TableMeta.TABLE_STATUS_ACTIVE)) self.storage_mocker.ReplayAll() table = Table('test_table', connection=self.DYNAMODB_CON) table_description = table.describe() self.storage_mocker.VerifyAll() self.assertEquals('test_table', table_description['Table']['TableName']) self.assertItemsEqual([{ "AttributeName": "city1", "AttributeType": "S" }, { "AttributeName": "id", "AttributeType": "S" }, { "AttributeName": "name", "AttributeType": "S" }], table_description['Table']['AttributeDefinitions'])
def test_create_table_no_range(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( models.TableSchema( { 'hash': models.ATTRIBUTE_TYPE_NUMBER, 'indexed_field': models.ATTRIBUTE_TYPE_STRING }, ['hash'], {"index_name": models.IndexDefinition('indexed_field')}), models.TableMeta.TABLE_STATUS_ACTIVE)) self.storage_mocker.ReplayAll() Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) self.storage_mocker.VerifyAll()
def test_delete_table(self): self.storage_mocker.StubOutWithMock(storage, 'delete_table') self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.delete_table(mox.IgnoreArg(), 'test_table') storage.describe_table(mox.IgnoreArg(), 'test_table').AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'city1': models.AttributeType('S'), 'id': models.AttributeType('S'), 'name': models.AttributeType('S') }, ['id', 'name'], {'index_name': models.IndexDefinition('id', 'city1')} ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.ReplayAll() table = ddb_table.Table( 'test_table', connection=self.DYNAMODB_CON ) self.assertTrue(table.delete()) self.storage_mocker.VerifyAll()
def test_update_item(self): self.storage_mocker.StubOutWithMock(storage, 'get_item') hash_key = "4.5621201231232132132132132132132142354E126" range_key = "range" storage.get_item( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), select_type=mox.IgnoreArg(), consistent=mox.IgnoreArg() ).AndReturn( models.SelectResult( items=[ { "hash_key": models.AttributeValue('N', hash_key), "range_key": models.AttributeValue('S', range_key), "attr_value": models.AttributeValue('S', 'val') } ] ) ) self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.describe_table(mox.IgnoreArg(), 'test_table').AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash_key': models.AttributeType('N'), 'range_key': models.AttributeType('S') }, ['hash_key', 'range_key'], ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.StubOutWithMock(storage, 'update_item') storage.update_item( mox.IgnoreArg(), mox.IgnoreArg(), key_attribute_map=mox.IgnoreArg(), attribute_action_map=mox.IgnoreArg(), expected_condition_map=mox.IgnoreArg()).AndReturn((True, None)) self.storage_mocker.ReplayAll() table = ddb_table.Table( 'test_table', connection=self.DYNAMODB_CON ) item = table.get_item(consistent=False, hash_key=1, range_key="range") item['attr_value'] = 'updated' item.partial_save() self.storage_mocker.VerifyAll()
def test_update_item(self): self.storage_mocker.StubOutWithMock(storage, 'select_item') hash_key = "4.5621201231232132132132132132132142354E126" range_key = "range" storage.select_item( IgnoreArg(), IgnoreArg(), IgnoreArg(), select_type=IgnoreArg(), limit=IgnoreArg(), consistent=IgnoreArg()).AndReturn( models.SelectResult( items=[{ "hash_key": models.AttributeValue.number(hash_key), "range_key": models.AttributeValue.str(range_key), "attr_value": models.AttributeValue.str('val') }])) self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.describe_table(IgnoreArg(), 'test_table').AndReturn( models.TableMeta( models.TableSchema( { 'hash_key': models.ATTRIBUTE_TYPE_NUMBER, 'range_key': models.ATTRIBUTE_TYPE_STRING }, ['hash_key', 'range_key'], ), models.TableMeta.TABLE_STATUS_ACTIVE)) self.storage_mocker.StubOutWithMock(storage, 'update_item') storage.update_item(IgnoreArg(), IgnoreArg(), key_attribute_map=IgnoreArg(), attribute_action_map=IgnoreArg(), expected_condition_map=IgnoreArg()).AndReturn(True) self.storage_mocker.ReplayAll() table = Table('test_table', connection=self.DYNAMODB_CON) item = table.get_item(consistent=False, hash_key=1, range_key="range") item['attr_value'] = 'updated' item.partial_save() self.storage_mocker.VerifyAll()
def test_create_table(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash': models.AttributeType('N'), 'range': models.AttributeType('S'), 'indexed_field': models.AttributeType('S') }, ['hash', 'range'], { "index_name": models.IndexDefinition('hash', 'indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.ReplayAll() ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.storage_mocker.VerifyAll()
def test_update_item_add_number(self, mock_select_item): mock_execute_query = mock.Mock(return_value=[{'[applied]': True}]) mock_table_schema = models.TableSchema( key_attributes=['hash_key', 'range_key'], attribute_type_map={'hash_key': None, 'range_key': None, 'ViewsCount': None}, index_def_map=None ) driver = self.get_connection(mock_execute_query, mock_table_schema) def make_select_result(i): value = models.AttributeValue('N', i) return mock.Mock(items=[{'ViewsCount': value}]) values = [make_select_result(i) for i in range(10)] mock_select_item.side_effect = values context = mock.Mock(tenant='fake_tenant') table_info = mock.Mock( schema=mock_table_schema, internal_name='"u_fake_tenant"."u_fake_table"' ) key_attrs = { 'hash_key': models.AttributeValue('N', 1), 'range_key': models.AttributeValue('S', 'two') } attr_actions = { 'ViewsCount': models.UpdateItemAction( models.UpdateItemAction.UPDATE_ACTION_ADD, models.AttributeValue('N', 1) ) } for i in range(10): driver.update_item(context, table_info, key_attrs, attr_actions) expected_calls = [ mock.call('UPDATE "u_fake_tenant"."u_fake_table" SET ' '"u_ViewsCount"=%d WHERE "u_hash_key"=1 AND ' '"u_range_key"=\'two\' ' 'IF "u_ViewsCount"=%d' % (i, i - 1), consistent=True) for i in range(1, 11) ] self.assertEqual(expected_calls, mock_execute_query.mock_calls)
def test_update_item_delete_set(self, mock_select_item): mock_execute_query = mock.Mock(return_value=[{'[applied]': True}]) mock_table_schema = models.TableSchema( key_attributes=['hash_key', 'range_key'], attribute_type_map={'hash_key': None, 'range_key': None, 'Tags': None}, index_def_map=None ) driver = self.get_connection(mock_execute_query, mock_table_schema) value = models.AttributeValue('SS', {"Update", "Help"}) mock_select_item.return_value = mock.Mock(items=[{'Tags': value}]) context = mock.Mock(tenant='fake_tenant') table_info = mock.Mock( schema=mock_table_schema, internal_name='"u_fake_tenant"."u_fake_table"' ) key_attrs = { 'hash_key': models.AttributeValue('N', 1), 'range_key': models.AttributeValue('S', 'two') } attr_actions = { 'Tags': models.UpdateItemAction( models.UpdateItemAction.UPDATE_ACTION_DELETE, models.AttributeValue('SS', {"Update"}) ) } driver.update_item(context, table_info, key_attrs, attr_actions) expected_calls = [ mock.call('UPDATE "u_fake_tenant"."u_fake_table" SET ' '"u_Tags"={\'Help\'} WHERE "u_hash_key"=1 AND ' '"u_range_key"=\'two\' ' 'IF "u_Tags"={\'Help\',\'Update\'}', consistent=True) ] self.assertEqual(expected_calls, mock_execute_query.mock_calls)
def test_delete_table(self): self.storage_mocker.StubOutWithMock(storage, 'delete_table') self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.delete_table(IgnoreArg(), 'test_table') storage.describe_table(IgnoreArg(), 'test_table').AndReturn( models.TableMeta( models.TableSchema( { 'city1': models.ATTRIBUTE_TYPE_STRING, 'id': models.ATTRIBUTE_TYPE_STRING, 'name': models.ATTRIBUTE_TYPE_STRING }, ['id', 'name'], {'index_name': models.IndexDefinition('city1')}), models.TableMeta.TABLE_STATUS_ACTIVE)) self.storage_mocker.ReplayAll() table = Table('test_table', connection=self.DYNAMODB_CON) self.assertTrue(table.delete()) self.storage_mocker.VerifyAll()
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndReturn( models.TableMeta( models.TableSchema( { 'hash': models.ATTRIBUTE_TYPE_NUMBER, 'range': models.ATTRIBUTE_TYPE_STRING, 'indexed_field': models.ATTRIBUTE_TYPE_STRING }, ['hash', 'range'], {"index_name": models.IndexDefinition('indexed_field')}), models.TableMeta.TABLE_STATUS_ACTIVE)) storage.create_table(IgnoreArg(), IgnoreArg(), IgnoreArg()).AndRaise(TableAlreadyExistsException) self.storage_mocker.ReplayAll() Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey('indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) try: Table.create("test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.RangeKey( 'indexed_field', data_type=schema_types.STRING) ]) ], connection=self.DYNAMODB_CON) self.fail() except JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def create_table(self, req, body, project_id): utils.check_project_id(req.context, project_id) jsonschema.validate(body, self.schema) table_name = body.get(parser.Props.TABLE_NAME) #parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( body.get(parser.Props.ATTRIBUTE_DEFINITIONS, {})) #parse table key schema key_attrs = parser.Parser.parse_key_schema( body.get(parser.Props.KEY_SCHEMA, [])) #parse table indexed field list indexed_attr_names = parser.Parser.parse_local_secondary_indexes( body.get(parser.Props.LOCAL_SECONDARY_INDEXES, [])) #prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, indexed_attr_names) # creating table req.context.tenant = project_id table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse table attributes attribute_definitions = parser.Parser.parse_attribute_definitions( self.action_params.get(parser.Props.ATTRIBUTE_DEFINITIONS, {}) ) # parse table key schema key_attrs = parser.Parser.parse_key_schema( self.action_params.get(parser.Props.KEY_SCHEMA, []) ) # parse table indexed field list indexed_def_map = parser.Parser.parse_local_secondary_indexes( self.action_params.get( parser.Props.LOCAL_SECONDARY_INDEXES, []) ) # prepare table_schema structure table_schema = models.TableSchema( attribute_definitions, key_attrs, indexed_def_map ) except Exception: raise AWSValidationException() try: # creating table table_meta = storage.create_table( self.context, table_name, table_schema ) result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: ( parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map ) ), parser.Props.CREATION_DATE_TIME: 0, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: ( parser.Parser.format_key_schema( table_meta.schema.key_attributes ) ), parser.Props.PROVISIONED_THROUGHPUT: ( parser.Values.PROVISIONED_THROUGHPUT_DUMMY ), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: ( parser.Parser.format_table_status(table_meta.status) ), parser.Props.TABLE_SIZE_BYTES: 0 } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map ) ) return result except TableAlreadyExistsException: raise AWSDuplicateTableError(table_name) except AWSErrorResponseException as e: raise e except Exception: raise AWSErrorResponseException()
def test_create_table_no_sec_indexes(self, mock_create_table): mock_create_table.return_value = models.TableMeta( models.TableSchema(attribute_type_map={ "ForumName": models.ATTRIBUTE_TYPE_STRING, "Subject": models.ATTRIBUTE_TYPE_STRING, "LastPostDateTime": models.ATTRIBUTE_TYPE_STRING }, key_attributes=["ForumName", "Subject"]), models.TableMeta.TABLE_STATUS_ACTIVE) conn = httplib.HTTPConnection('localhost:8080') body = """ { "attribute_definitions": [ { "attribute_name": "ForumName", "attribute_type": "S" }, { "attribute_name": "Subject", "attribute_type": "S" }, { "attribute_name": "LastPostDateTime", "attribute_type": "S" } ], "table_name": "Thread", "key_schema": [ { "attribute_name": "ForumName", "key_type": "HASH" }, { "attribute_name": "Subject", "key_type": "RANGE" } ] } """ expected_response = { 'table_description': { 'attribute_definitions': [{ 'attribute_name': 'Subject', 'attribute_type': 'S' }, { 'attribute_name': 'LastPostDateTime', 'attribute_type': 'S' }, { 'attribute_name': 'ForumName', 'attribute_type': 'S' }], 'creation_date_time': 0, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'Subject', 'key_type': 'RANGE' }], 'table_name': 'Thread', 'table_size_bytes': 0, 'table_status': 'ACTIVE', 'links': [{ 'href': self.table_url, 'rel': 'self' }, { 'href': self.table_url, 'rel': 'bookmark' }] } } conn.request("POST", self.url, headers=self.headers, body=body) response = conn.getresponse() self.assertTrue(mock_create_table.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def test_create_table_duplicate(self): self.storage_mocker.StubOutWithMock(storage, 'create_table') storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash': models.AttributeType('N'), 'range': models.AttributeType('S'), 'indexed_field': models.AttributeType('S') }, ['hash', 'range'], { "index_name": models.IndexDefinition('hash', 'indexed_field') } ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) storage.create_table( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg() ).AndRaise(exception.TableAlreadyExistsException) self.storage_mocker.ReplayAll() ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) try: ddb_table.Table.create( "test", schema=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('range', data_type=schema_types.STRING) ], throughput={ 'read': 20, 'write': 10, }, indexes=[ fields.KeysOnlyIndex( 'index_name', parts=[ fields.HashKey('hash', data_type=schema_types.NUMBER), fields.RangeKey('indexed_field', data_type=schema_types.STRING) ] ) ], connection=self.DYNAMODB_CON ) self.fail() except boto_exc.JSONResponseError as e: self.assertEqual('ResourceInUseException', e.error_code) self.assertEqual('Table already exists: test', e.body['message']) self.storage_mocker.VerifyAll() except Exception as e: self.fail()
def test_create_table(self, mock_create_table): mock_create_table.return_value = models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema(attribute_type_map={ "ForumName": models.AttributeType('S'), "Subject": models.AttributeType('S'), "LastPostDateTime": models.AttributeType('S') }, key_attributes=["ForumName", "Subject"], index_def_map={ "LastPostIndex": models.IndexDefinition( "ForumName", "LastPostDateTime") }), models.TableMeta.TABLE_STATUS_ACTIVE, 123) conn = httplib.HTTPConnection('localhost:8080') body = """ { "attribute_definitions": [ { "attribute_name": "ForumName", "attribute_type": "S" }, { "attribute_name": "Subject", "attribute_type": "S" }, { "attribute_name": "LastPostDateTime", "attribute_type": "S" } ], "table_name": "Thread", "key_schema": [ { "attribute_name": "ForumName", "key_type": "HASH" }, { "attribute_name": "Subject", "key_type": "RANGE" } ], "local_secondary_indexes": [ { "index_name": "LastPostIndex", "key_schema": [ { "attribute_name": "ForumName", "key_type": "HASH" }, { "attribute_name": "LastPostDateTime", "key_type": "RANGE" } ], "projection": { "projection_type": "KEYS_ONLY" } } ] } """ expected_response = { 'table_description': { 'attribute_definitions': [{ 'attribute_name': 'Subject', 'attribute_type': 'S' }, { 'attribute_name': 'LastPostDateTime', 'attribute_type': 'S' }, { 'attribute_name': 'ForumName', 'attribute_type': 'S' }], 'creation_date_time': 123, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'Subject', 'key_type': 'RANGE' }], 'local_secondary_indexes': [{ 'index_name': 'LastPostIndex', 'index_size_bytes': 0, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'LastPostDateTime', 'key_type': 'RANGE' }], 'projection': { 'projection_type': 'ALL' } }], 'table_id': '00000000-0000-0000-0000-000000000000', 'table_name': 'Thread', 'table_size_bytes': 0, 'table_status': 'ACTIVE', 'links': [{ 'href': self.table_url, 'rel': 'self' }, { 'href': self.table_url, 'rel': 'bookmark' }] } } conn.request("POST", self.url, headers=self.headers, body=body) response = conn.getresponse() self.assertTrue(mock_create_table.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def test_describe_table(self, mock_describe_table): attr_map = { 'ForumName': models.AttributeType('S'), 'Subject': models.AttributeType('S'), 'LastPostDateTime': models.AttributeType('S') } key_attrs = ['ForumName', 'Subject'] index_map = { 'LastPostIndex': models.IndexDefinition('ForumName', 'LastPostDateTime') } table_meta = models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema(attr_map, key_attrs, index_map), models.TableMeta.TABLE_STATUS_ACTIVE, 123) mock_describe_table.return_value = table_meta headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } conn = httplib.HTTPConnection('localhost:8080') url = '/v1/data/default_tenant/tables/Thread' table_url = ('http://localhost:8080/v1/data/default_tenant' '/tables/Thread') expected_response = { 'table': { 'attribute_definitions': [{ 'attribute_name': 'Subject', 'attribute_type': 'S' }, { 'attribute_name': 'LastPostDateTime', 'attribute_type': 'S' }, { 'attribute_name': 'ForumName', 'attribute_type': 'S' }], 'creation_date_time': 123, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'Subject', 'key_type': 'RANGE' }], 'local_secondary_indexes': [{ 'index_name': 'LastPostIndex', 'index_size_bytes': 0, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'LastPostDateTime', 'key_type': 'RANGE' }], 'projection': { 'projection_type': 'ALL' } }], 'table_id': '00000000-0000-0000-0000-000000000000', 'table_name': 'Thread', 'table_size_bytes': 0, 'table_status': 'ACTIVE', 'links': [{ 'href': table_url, 'rel': 'self' }, { 'href': table_url, 'rel': 'bookmark' }] } } conn.request("GET", url, headers=headers) response = conn.getresponse() self.assertTrue(mock_describe_table.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def create_table(self, req, body, project_id): with probe.Probe(__name__ + '.validate'): validation.validate_object(body, "body") table_name = body.pop(parser.Props.TABLE_NAME, None) validation.validate_table_name(table_name) # parse table attributes attribute_definitions_json = body.pop( parser.Props.ATTRIBUTE_DEFINITIONS, None) validation.validate_list_of_objects( attribute_definitions_json, parser.Props.ATTRIBUTE_DEFINITIONS) attribute_definitions = parser.Parser.parse_attribute_definitions( attribute_definitions_json) # parse table key schema key_attrs_json = body.pop(parser.Props.KEY_SCHEMA, None) validation.validate_list(key_attrs_json, parser.Props.KEY_SCHEMA) key_attrs = parser.Parser.parse_key_schema(key_attrs_json) # parse table indexed field list lsi_defs_json = body.pop(parser.Props.LOCAL_SECONDARY_INDEXES, None) if lsi_defs_json: validation.validate_list_of_objects( lsi_defs_json, parser.Props.LOCAL_SECONDARY_INDEXES) index_def_map = parser.Parser.parse_local_secondary_indexes( lsi_defs_json) else: index_def_map = {} # validate the uniqueness of table and its indices' key schema range_keys = [] if len(key_attrs) > 1: range_keys.append(key_attrs[1]) else: # table has hash type primary key if len(index_def_map) > 0: raise exception.ValidationError( _("Table without range key in primary key schema " "can not have indices")) for index in index_def_map.values(): range_keys.append(index.alt_range_key_attr) try: validation.validate_set(range_keys, "key_schema") except exception.ValidationError: raise exception.ValidationError( _("Table and its indices must have unique key schema")) validation.validate_unexpected_props(body, "body") # prepare table_schema structure table_schema = models.TableSchema(attribute_definitions, key_attrs, index_def_map) table_meta = storage.create_table(req.context, table_name, table_schema) url = req.path_url + "/" + table_name bookmark = req.path_url + "/" + table_name result = { parser.Props.TABLE_DESCRIPTION: { parser.Props.ATTRIBUTE_DEFINITIONS: (parser.Parser.format_attribute_definitions( table_meta.schema.attribute_type_map)), parser.Props.CREATION_DATE_TIME: table_meta.creation_date_time, parser.Props.ITEM_COUNT: 0, parser.Props.KEY_SCHEMA: (parser.Parser.format_key_schema( table_meta.schema.key_attributes)), parser.Props.TABLE_ID: str(table_meta.id), parser.Props.TABLE_NAME: table_name, parser.Props.TABLE_STATUS: (parser.Parser.format_table_status(table_meta.status)), parser.Props.TABLE_SIZE_BYTES: 0, parser.Props.LINKS: [{ parser.Props.HREF: url, parser.Props.REL: parser.Values.SELF }, { parser.Props.HREF: bookmark, parser.Props.REL: parser.Values.BOOKMARK }] } } if table_meta.schema.index_def_map: table_def = result[parser.Props.TABLE_DESCRIPTION] table_def[parser.Props.LOCAL_SECONDARY_INDEXES] = ( parser.Parser.format_local_secondary_indexes( table_meta.schema.key_attributes[0], table_meta.schema.index_def_map)) return result