def test_update_item(self): self.storage_mocker.StubOutWithMock(storage, 'get_item') hash_key = "4.5621201231232132132132132132132142354E126" range_key = "range" storage.get_item( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), select_type=mox.IgnoreArg(), consistent=mox.IgnoreArg() ).AndReturn( models.SelectResult( items=[ { "hash_key": models.AttributeValue('N', hash_key), "range_key": models.AttributeValue('S', range_key), "attr_value": models.AttributeValue('S', 'val') } ] ) ) self.storage_mocker.StubOutWithMock(storage, 'describe_table') storage.describe_table(mox.IgnoreArg(), 'test_table').AndReturn( models.TableMeta( '00000000-0000-0000-0000-000000000000', models.TableSchema( { 'hash_key': models.AttributeType('N'), 'range_key': models.AttributeType('S') }, ['hash_key', 'range_key'], ), models.TableMeta.TABLE_STATUS_ACTIVE, None ) ) self.storage_mocker.StubOutWithMock(storage, 'update_item') storage.update_item( mox.IgnoreArg(), mox.IgnoreArg(), key_attribute_map=mox.IgnoreArg(), attribute_action_map=mox.IgnoreArg(), expected_condition_map=mox.IgnoreArg()).AndReturn((True, None)) self.storage_mocker.ReplayAll() table = ddb_table.Table( 'test_table', connection=self.DYNAMODB_CON ) item = table.get_item(consistent=False, hash_key=1, range_key="range") item['attr_value'] = 'updated' item.partial_save() self.storage_mocker.VerifyAll()
def test_select_item(self): self.storage_mocker.StubOutWithMock(storage, 'select_item') blob_data1 = bytes(bytearray([1, 2, 3, 4, 5])) blob_data2 = bytes(bytearray([5, 4, 3, 2, 1])) hash_key = "4.5621201231232132132132132132132142354E126" range_key = "range" storage.select_item( IgnoreArg(), IgnoreArg(), IgnoreArg(), select_type=IgnoreArg(), index_name=IgnoreArg(), limit=IgnoreArg(), exclusive_start_key=IgnoreArg(), consistent=IgnoreArg(), order_type=IgnoreArg(), ).AndReturn( models.SelectResult(items=[{ "hash_key": models.AttributeValue(models.ATTRIBUTE_TYPE_NUMBER, decimal.Decimal(hash_key)), "range_key": models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, range_key), "value_blob": models.AttributeValue(models.ATTRIBUTE_TYPE_BLOB, blob_data1), "value_blob_set": models.AttributeValue(models.ATTRIBUTE_TYPE_BLOB_SET, set([blob_data1, blob_data2])) }])) self.storage_mocker.ReplayAll() table = Table('test_table', connection=self.DYNAMODB_CON) items = list(table.query(consistent=False, hash_key__eq=1)) expected_item = { "hash_key": decimal.Decimal(hash_key), "range_key": range_key, "value_blob": types.Binary(blob_data1), "value_blob_set": set([types.Binary(blob_data1), types.Binary(blob_data2)]) } self.assertEqual(len(items), 1) self.assertDictEqual(expected_item, dict(items[0].items())) self.storage_mocker.VerifyAll()
def test_get_item(self): self.storage_mocker.StubOutWithMock(storage, 'get_item') blob_data1 = bytes(bytearray([1, 2, 3, 4, 5])) blob_data2 = bytes(bytearray([5, 4, 3, 2, 1])) hash_key = "4.5621201231232132132132132132132142354E126" range_key = "range" storage.get_item( mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(), select_type=mox.IgnoreArg(), consistent=mox.IgnoreArg() ).AndReturn( models.SelectResult( items=[ { "hash_key": models.AttributeValue('N', hash_key), "range_key": models.AttributeValue('S', range_key), "value_blob": models.AttributeValue( 'B', decoded_value=blob_data1 ), "value_blob_set": models.AttributeValue( 'BS', decoded_value={blob_data1, blob_data2} ) } ] ) ) self.storage_mocker.ReplayAll() table = ddb_table.Table( 'test_table', connection=self.DYNAMODB_CON ) item = table.get_item(consistent=False, hash_key=1, range_key="range") expected_item = { "hash_key": decimal.Decimal(hash_key), "range_key": range_key, "value_blob": types.Binary(blob_data1), "value_blob_set": set([types.Binary(blob_data1), types.Binary(blob_data2)]) } self.assertDictEqual(expected_item, dict(item.items())) self.storage_mocker.VerifyAll()
def test_update_item_add_number(self, mock_select_item): mock_execute_query = mock.Mock(return_value=[{'[applied]': True}]) mock_table_schema = models.TableSchema( key_attributes=['hash_key', 'range_key'], attribute_type_map={'hash_key': None, 'range_key': None, 'ViewsCount': None}, index_def_map=None ) driver = self.get_connection(mock_execute_query, mock_table_schema) def make_select_result(i): value = models.AttributeValue('N', i) return mock.Mock(items=[{'ViewsCount': value}]) values = [make_select_result(i) for i in range(10)] mock_select_item.side_effect = values context = mock.Mock(tenant='fake_tenant') table_info = mock.Mock( schema=mock_table_schema, internal_name='"u_fake_tenant"."u_fake_table"' ) key_attrs = { 'hash_key': models.AttributeValue('N', 1), 'range_key': models.AttributeValue('S', 'two') } attr_actions = { 'ViewsCount': models.UpdateItemAction( models.UpdateItemAction.UPDATE_ACTION_ADD, models.AttributeValue('N', 1) ) } for i in range(10): driver.update_item(context, table_info, key_attrs, attr_actions) expected_calls = [ mock.call('UPDATE "u_fake_tenant"."u_fake_table" SET ' '"u_ViewsCount"=%d WHERE "u_hash_key"=1 AND ' '"u_range_key"=\'two\' ' 'IF "u_ViewsCount"=%d' % (i, i - 1), consistent=True) for i in range(1, 11) ] self.assertEqual(expected_calls, mock_execute_query.mock_calls)
def parse_typed_attr_value(cls, typed_attr_value_json): if len(typed_attr_value_json) != 1: raise exception.ValidationError( _("Can't recognize attribute typed value format: '%(attr)s'"), attr=json.dumps(typed_attr_value_json)) (attr_type_json, attr_value_json) = (typed_attr_value_json.popitem()) return models.AttributeValue(attr_type_json, attr_value_json)
def test_strset_to_json(self): value = models.AttributeValue('SS', ['Update', 'Help']) expected = ( '{"__model__": "AttributeValue", ' '"attr_type": {"__model__": "AttributeType", "type": "SS"}, ' '"value": ["Help", "Update"]}' ) self.assertEqual(expected, value.to_json())
def parse_typed_attr_value(cls, typed_attr_value_json): if len(typed_attr_value_json) != 1: raise exception.AWSValidationException( "Can't recognize attribute format ['{}']".format( json.dumps(typed_attr_value_json) ) ) (attr_type_json, attr_value_json) = typed_attr_value_json.items()[0] return models.AttributeValue(attr_type_json, attr_value_json)
def test_update_item_delete_set(self, mock_select_item): mock_execute_query = mock.Mock(return_value=None) mock_table_schema = TableSchema( key_attributes=['hash_key', 'range_key'], attribute_type_map={ 'hash_key': None, 'range_key': None, 'Tags': None }, index_def_map=None) driver = self.get_connection(mock_execute_query, mock_table_schema) value = models.AttributeValue('SS', {"Update", "Help"}) mock_select_item.return_value = mock.Mock(items=[{'Tags': value}]) context = mock.Mock(tenant='fake_tenant') table_info = mock.Mock(schema=mock_table_schema, internal_name='"u_fake_tenant"."u_fake_table"') key_attrs = { 'hash_key': models.AttributeValue('N', 1), 'range_key': models.AttributeValue('S', 'two') } attr_actions = { 'Tags': models.UpdateItemAction( models.UpdateItemAction.UPDATE_ACTION_DELETE, models.AttributeValue('SS', {"Update"})) } driver.update_item(context, table_info, key_attrs, attr_actions) expected_calls = [ mock.call( 'UPDATE "u_fake_tenant"."u_fake_table" SET ' '"u_Tags"={\'Help\'} WHERE "u_hash_key"=1 AND ' '"u_range_key"=\'two\' ' 'IF "u_Tags"={\'Help\',\'Update\'}', consistent=True) ] self.assertEqual(expected_calls, mock_execute_query.mock_calls)
def decode_attr_value(cls, dynamodb_attr_type, dynamodb_attr_value): attr_type = cls.DYNAMODB_TO_STORAGE_TYPE_MAP[dynamodb_attr_type] if attr_type.collection_type is not None: attr_value = { cls.decode_single_value(attr_type.element_type, val) for val in dynamodb_attr_value } else: attr_value = cls.decode_single_value(attr_type.element_type, dynamodb_attr_value) return models.AttributeValue(attr_type, attr_value)
def test_execute_write_batch_put_delete_same_item( self, mock_repo_get, mock_validate_table_is_active, mock_validate_table_schema): table_info = mock.Mock() table_info.schema.key_attributes = ['id', 'range'] mock_repo_get.return_value = table_info context = mock.Mock(tenant='fake_tenant') table_name = 'fake_table' request_map = { table_name: [ models.WriteItemRequest.put({ 'id': models.AttributeValue('N', 1), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1'), }), models.WriteItemRequest.delete({ 'id': models.AttributeValue('N', 1), 'range': models.AttributeValue('S', '1') }) ] } storage_manager = simple_impl.SimpleStorageManager( storage_driver.StorageDriver(), table_info_repo.TableInfoRepository()) with self.assertRaises(exception.ValidationError) as raises_cm: storage_manager.execute_write_batch(context, request_map) exc = raises_cm.exception self.assertIn("More than one", exc._error_string)
def test_update_item(self, mock_update_item, mock_select_item): value = models.AttributeValue('S', '*****@*****.**') mock_select_item.return_value = mock.Mock(items=[{ 'LastPostedBy': value }]) mock_update_item.return_value = (True, None) headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } conn = httplib.HTTPConnection('localhost:8080') url = '/v1/default_tenant/data/tables/the_table/update_item' body = """ { "key": { "ForumName": { "S": "MagnetoDB" }, "Subject": { "S": "How do I delete an item?" } }, "attribute_updates": { "LastPostedBy": { "value": { "S": "*****@*****.**" }, "action": "PUT" } }, "expected": { "LastPostedBy": { "value": { "S": "*****@*****.**"} } } } """ conn.request("POST", url, headers=headers, body=body) response = conn.getresponse() self.assertTrue(mock_update_item.called) self.assertEqual(200, response.status) json_response = response.read() response_payload = json.loads(json_response) expected = {} self.assertEqual(expected, response_payload)
def parse_attribute_condition(cls, condition_type, condition_args, condition_class=models.IndexedCondition): actual_args_count = (len(condition_args) if condition_args is not None else 0) if condition_type == Values.BETWEEN: if actual_args_count != 2: raise exception.ValidationError( _("%(type)s condition type requires exactly 2 arguments, " "but %(actual_args_count)s given"), type=condition_type, actual_args_count=actual_args_count) if condition_args[0].attr_type != condition_args[1].attr_type: raise exception.ValidationError( _("%(type)s condition type requires arguments of the " "same type, but different types given"), type=condition_type, ) return [ condition_class.ge(condition_args[0]), condition_class.le(condition_args[1]) ] if condition_type == Values.BEGINS_WITH: first_condition = condition_class( condition_class.CONDITION_TYPE_GREATER_OR_EQUAL, condition_args) condition_arg = first_condition.arg if condition_arg.is_number: raise exception.ValidationError( _("%(condition_type)s condition type is not allowed for" "argument of the %(argument_type)s type"), condition_type=condition_type, argument_type=condition_arg.attr_type.type) first_value = condition_arg.decoded_value chr_fun = unichr if isinstance(first_value, unicode) else chr second_value = first_value[:-1] + chr_fun(ord(first_value[-1]) + 1) second_condition = condition_class.le( models.AttributeValue(condition_arg.attr_type, decoded_value=second_value)) return [first_condition, second_condition] return [condition_class(condition_type, condition_args)]
def test_execute_get_batch(self, mock_get_item, mock_repo_get, mock_validate_table_is_active, mock_validate_table_schema): future = futures.Future() future.set_result(True) mock_get_item.return_value = future context = mock.Mock(tenant='fake_tenant') table_name = 'fake_table' request_list = [ models.GetItemRequest( table_name, { 'id': models.AttributeValue('N', 1), 'str': models.AttributeValue('S', 'str1'), }, None, True), models.GetItemRequest( table_name, { 'id': models.AttributeValue('N', 1), 'str': models.AttributeValue('S', 'str2'), }, None, True) ] expected_get = [ mock.call(context, req.table_name, req.key_attribute_map, req.attributes_to_get, req.consistent) for req in request_list ] storage_manager = simple_impl.SimpleStorageManager( None, table_info_repo.TableInfoRepository()) result, unprocessed_items = storage_manager.execute_get_batch( context, request_list) mock_get_item.has_calls(expected_get) self.assertEqual(unprocessed_items, [])
def test_execute_write_batch(self, mock_connect, mock_execute_query, mock_put_item, mock_delete_item): mock_execute_query.return_value = None mock_put_item.return_value = None mock_delete_item.return_value = None conn = cassandra_impl.CassandraStorageImpl() context = mock.Mock(tenant='fake_tenant') table_name = 'fake_table' request_list = [ models.PutItemRequest( table_name, { 'id': models.AttributeValue(models.ATTRIBUTE_TYPE_NUMBER, 1), 'range': models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, '1'), 'str': models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, 'str1'), }), models.PutItemRequest( table_name, { 'id': models.AttributeValue(models.ATTRIBUTE_TYPE_NUMBER, 2), 'range': models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, '1'), 'str': models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, 'str1'), }), models.DeleteItemRequest( table_name, { 'id': models.AttributeValue.number(3), 'range': models.AttributeValue.str('3') }) ] expected_put = [ mock.call(context, request_list[0]), mock.call(context, request_list[1]), ] expected_delete = [mock.call(context, request_list[2])] unprocessed_items = conn.execute_write_batch(context, request_list) self.assertEqual(expected_put, mock_put_item.call_args_list) self.assertEqual(expected_delete, mock_delete_item.call_args_list) self.assertEqual(unprocessed_items, [])
def parse_attribute_condition(cls, condition_type, condition_args, condition_class=models.IndexedCondition): actual_args_count = ( len(condition_args) if condition_args is not None else 0 ) if condition_type == Values.BETWEEN: if actual_args_count != 2: raise exception.AWSValidationException( "{} condition type requires exactly 2 arguments, " "but {} given".format(condition_type, actual_args_count), ) if condition_args[0].attr_type != condition_args[1].attr_type: raise exception.AWSValidationException( "{} condition type requires arguments of the " "same type, but different types given".format( condition_type ), ) return [ condition_class.ge(condition_args[0]), condition_class.le(condition_args[1]) ] if condition_type == Values.BEGINS_WITH: first = condition_class( condition_class.CONDITION_TYPE_GREATER_OR_EQUAL, condition_args ) condition_arg = first.arg second = condition_class.le( models.AttributeValue( condition_arg.attr_type, decoded_value=( condition_arg.decoded_value[:-1] + chr(ord(condition_arg.decoded_value[-1]) + 1) ) ) ) return [first, second] return [condition_class(condition_type, condition_args)]
def test_execute_write_batch(self, mock_put_item, mock_delete_item, mock_repo_get, mock_validate_table_is_active, mock_validate_table_schema, mock_batch_write): future = Future() future.set_result(True) mock_put_item.return_value = future mock_delete_item.return_value = future mock_batch_write.side_effect = NotImplementedError() table_info = mock.Mock() table_info.schema.key_attributes = ['id', 'range'] mock_repo_get.return_value = table_info context = mock.Mock(tenant='fake_tenant') table_name = 'fake_table' request_map = { table_name: [ WriteItemRequest.put({ 'id': models.AttributeValue('N', 1), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1'), }), WriteItemRequest.put({ 'id': models.AttributeValue('N', 2), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1') }), WriteItemRequest.delete({ 'id': models.AttributeValue('N', 3), 'range': models.AttributeValue('S', '3') }) ] } expected_put = [ mock.call( context, table_info, { 'id': models.AttributeValue('N', 1), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1') }), mock.call( context, table_info, { 'id': models.AttributeValue('N', 2), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1') }), ] expected_delete = [ mock.call( context, table_info, { 'id': models.AttributeValue('N', 3), 'range': models.AttributeValue('S', '3') }) ] storage_manager = SimpleStorageManager(StorageDriver(), TableInfoRepository()) unprocessed_items = storage_manager.execute_write_batch( context, request_map) self.assertEqual(expected_put, mock_put_item.call_args_list) self.assertEqual(expected_delete, mock_delete_item.call_args_list) self.assertEqual(unprocessed_items, {})
def make_select_result(i): value = models.AttributeValue('N', i) return mock.Mock(items=[{'ViewsCount': value}])
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( self.action_params.get(parser.Props.EXPECTED, {}))) #parse attribute updates attribute_updates = parser.Parser.parse_attribute_updates( self.action_params.get(parser.Props.ATTRIBUTE_UPDATES, {})) # parse key key_attributes = parser.Parser.parse_item_attributes( self.action_params[parser.Props.KEY]) # parse return_values param return_values = self.action_params.get( parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) # parse return_item_collection_metrics return_item_collection_metrics = self.action_params.get( parser.Props.RETURN_ITEM_COLLECTION_METRICS, parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE) return_consumed_capacity = self.action_params.get( parser.Props.RETURN_CONSUMED_CAPACITY, parser.Values.RETURN_CONSUMED_CAPACITY_NONE) select_result = None indexed_condition_map_for_select = { name: models.IndexedCondition.eq(value) for name, value in key_attributes.iteritems() } except Exception: raise exception.ValidationException() try: if return_values in (parser.Values.RETURN_VALUES_UPDATED_OLD, parser.Values.RETURN_VALUES_ALL_OLD): select_result = storage.select_item( self.context, table_name, indexed_condition_map_for_select) # update item result = storage.update_item( self.context, table_name, key_attribute_map=key_attributes, attribute_action_map=attribute_updates, expected_condition_map=expected_item_conditions) if not result: raise exception.AWSErrorResponseException() if return_values in (parser.Values.RETURN_VALUES_UPDATED_NEW, parser.Values.RETURN_VALUES_ALL_NEW): select_result = storage.select_item( self.context, table_name, indexed_condition_map_for_select) # format response response = {} if return_values != parser.Values.RETURN_VALUES_NONE: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes( select_result.items[0])) if (return_item_collection_metrics != parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE): response[parser.Props.ITEM_COLLECTION_METRICS] = { parser.Props.ITEM_COLLECTION_KEY: { parser.Parser.format_item_attributes( models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, "key")) }, parser.Props.SIZE_ESTIMATED_RANGE_GB: [0] } if (return_consumed_capacity != parser.Values.RETURN_CONSUMED_CAPACITY_NONE): response[parser.Props.CONSUMED_CAPACITY] = ( parser.Parser.format_consumed_capacity( return_consumed_capacity, None)) return response except exception.AWSErrorResponseException as e: raise e except Exception: raise exception.AWSErrorResponseException()
def parse_attribute_conditions( cls, attribute_conditions_json): attribute_conditions = {} attribute_conditions_json = attribute_conditions_json or {} for (attr_name, dynamodb_condition) in ( attribute_conditions_json.iteritems()): dynamodb_condition_type = ( dynamodb_condition[Props.COMPARISON_OPERATOR] ) condition_args = map( lambda attr_value: cls.decode_attr_value( *attr_value.items()[0]), dynamodb_condition.get(Props.ATTRIBUTE_VALUE_LIST, {}) ) if dynamodb_condition_type == Values.EQ: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.IndexedCondition.eq(condition_args[0]) ] elif dynamodb_condition_type == Values.GT: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.IndexedCondition.gt(condition_args[0]) ] elif dynamodb_condition_type == Values.LT: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.IndexedCondition.lt(condition_args[0]) ] elif dynamodb_condition_type == Values.GE: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.IndexedCondition.ge(condition_args[0]) ] elif dynamodb_condition_type == Values.LE: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.IndexedCondition.le(condition_args[0]) ] elif dynamodb_condition_type == Values.BEGINS_WITH: assert len(condition_args) == 1 first = condition_args[0] second = models.AttributeValue( first.type, first.value[:-1] + chr(ord(first.value[-1]) + 1) ) attribute_conditions[attr_name] = [ models.IndexedCondition.ge(first), models.IndexedCondition.lt(second) ] elif dynamodb_condition_type == Values.BETWEEN: assert len(condition_args) == 2 assert condition_args[0].type == condition_args[1].type attribute_conditions[attr_name] = [ models.IndexedCondition.ge(condition_args[0]), models.IndexedCondition.le(condition_args[1]) ] elif dynamodb_condition_type == Values.NE: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.ScanCondition.neq(condition_args[0]) ] elif dynamodb_condition_type == Values.CONTAINS: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.ScanCondition.contains(condition_args[0]) ] elif dynamodb_condition_type == Values.NOT_CONTAINS: assert len(condition_args) == 1 attribute_conditions[attr_name] = [ models.ScanCondition.not_contains(condition_args[0]) ] elif dynamodb_condition_type == Values.IN: attribute_conditions[attr_name] = [ models.ScanCondition.in_set(condition_args) ] elif dynamodb_condition_type == Values.NULL: attribute_conditions[attr_name] = [ models.ExpectedCondition.not_exists() ] elif dynamodb_condition_type == Values.NOT_NULL: attribute_conditions[attr_name] = [ models.ExpectedCondition.exists() ] return attribute_conditions
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( self.action_params.get(parser.Props.EXPECTED, {}) ) ) # parse item item_attributes = parser.Parser.parse_item_attributes( self.action_params[parser.Props.ITEM] ) # parse return_values param return_values_json = self.action_params.get( parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE ) return_values = models.InsertReturnValuesType(return_values_json) # parse return_item_collection_metrics return_item_collection_metrics = self.action_params.get( parser.Props.RETURN_ITEM_COLLECTION_METRICS, parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE ) return_consumed_capacity = self.action_params.get( parser.Props.RETURN_CONSUMED_CAPACITY, parser.Values.RETURN_CONSUMED_CAPACITY_NONE ) except Exception: raise AWSValidationException() try: # put item result, old_item = storage.put_item( self.context, table_name, item_attributes, return_values, if_not_exist=False, expected_condition_map=expected_item_conditions ) if not result: raise AWSErrorResponseException() # format response response = {} if old_item: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(item_attributes) ) if (return_item_collection_metrics != parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE): response[parser.Props.ITEM_COLLECTION_METRICS] = { parser.Props.ITEM_COLLECTION_KEY: { parser.Parser.format_item_attributes( models.AttributeValue("S", "key") ) }, parser.Props.SIZE_ESTIMATED_RANGE_GB: [0] } if (return_consumed_capacity != parser.Values.RETURN_CONSUMED_CAPACITY_NONE): response[parser.Props.CONSUMED_CAPACITY] = ( parser.Parser.format_consumed_capacity( return_consumed_capacity, None ) ) return response except AWSErrorResponseException as e: raise e except Exception: raise AWSErrorResponseException()
def test_scan(self, mock_scan): items = [ { 'ForumName': models.AttributeValue('S', 'Gerrit workflow'), 'LastPostDateTime': models.AttributeValue('S', '3/19/14'), 'Posts': models.AttributeValue('SS', ['Hi', 'Hello']) }, { 'ForumName': models.AttributeValue('S', 'Testing OS API'), 'LastPostDateTime': models.AttributeValue('S', '3/18/14'), 'Posts': models.AttributeValue('SS', ['Opening post']) }, ] last_evaluated_key = { 'ForumName': models.AttributeValue('S', 'Testing OS API'), 'Subject': models.AttributeValue('S', 'Some subject'), } mock_scan.return_value = models.ScanResult( items=items, last_evaluated_key=last_evaluated_key, count=2, scanned_count=10) headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} conn = httplib.HTTPConnection('localhost:8080') url = '/v1/default_tenant/data/tables/Threads/scan' body = """ { "attributes_to_get": [ "ForumName", "LastPostDateTime", "Posts" ], "exclusive_start_key": { "ForumName" : { "S": "Another forum" } }, "limit": 2, "scan_filter": { "LastPostDateTime" : { "attribute_value_list": [ { "S": "3/10/14" } ], "comparison_operator": "GT" } }, "segment": 0, "select": "SPECIFIC_ATTRIBUTES", "total_segments": 1 } """ expected_response = { "count": 2, "items": [ { 'ForumName': {'S': 'Gerrit workflow'}, 'LastPostDateTime': {'S': '3/19/14'}, 'Posts': {'SS': ['Hello', 'Hi']} }, { 'ForumName': {'S': 'Testing OS API'}, 'LastPostDateTime': {'S': '3/18/14'}, 'Posts': {'SS': ['Opening post']} } ], "last_evaluated_key": { 'ForumName': {'S': 'Testing OS API'}, 'Subject': {'S': 'Some subject'} }, "scanned_count": 10 } conn.request("POST", url, headers=headers, body=body) response = conn.getresponse() self.assertTrue(mock_scan.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( self.action_params.get(parser.Props.EXPECTED, {}))) # parse item key_attributes = parser.Parser.parse_item_attributes( self.action_params[parser.Props.KEY]) # parse return_values param return_values = self.action_params.get( parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) # parse return_item_collection_metrics return_item_collection_metrics = self.action_params.get( parser.Props.RETURN_ITEM_COLLECTION_METRICS, parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE) return_consumed_capacity = self.action_params.get( parser.Props.RETURN_CONSUMED_CAPACITY, parser.Values.RETURN_CONSUMED_CAPACITY_NONE) except Exception: raise AWSValidationException() try: # put item result = storage.delete_item( self.context, table_name, key_attributes, expected_condition_map=expected_item_conditions) except AWSErrorResponseException as e: raise e except Exception: raise AWSErrorResponseException() if not result: raise AWSErrorResponseException() # format response response = {} try: if return_values != parser.Values.RETURN_VALUES_NONE: # TODO(dukhlov): # It is needed to return all deleted item attributes # response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(key_attributes)) if (return_item_collection_metrics != parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE): response[parser.Props.ITEM_COLLECTION_METRICS] = { parser.Props.ITEM_COLLECTION_KEY: { parser.Parser.format_item_attributes( models.AttributeValue(models.ATTRIBUTE_TYPE_STRING, "key")) }, parser.Props.SIZE_ESTIMATED_RANGE_GB: [0] } if (return_consumed_capacity != parser.Values.RETURN_CONSUMED_CAPACITY_NONE): response[parser.Props.CONSUMED_CAPACITY] = ( parser.Parser.format_consumed_capacity( return_consumed_capacity, None)) return response except Exception: raise exception.AWSErrorResponseException()
def __call__(self): try: table_name = self.action_params.get(parser.Props.TABLE_NAME, None) # parse expected item conditions expected_item_conditions = ( parser.Parser.parse_expected_attribute_conditions( self.action_params.get(parser.Props.EXPECTED, {}))) # parse attribute updates attribute_updates = parser.Parser.parse_attribute_updates( self.action_params.get(parser.Props.ATTRIBUTE_UPDATES, {})) # parse key key_attributes = parser.Parser.parse_item_attributes( self.action_params[parser.Props.KEY]) # parse return_values param return_values = self.action_params.get( parser.Props.RETURN_VALUES, parser.Values.RETURN_VALUES_NONE) # parse return_item_collection_metrics return_item_collection_metrics = self.action_params.get( parser.Props.RETURN_ITEM_COLLECTION_METRICS, parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE) return_consumed_capacity = self.action_params.get( parser.Props.RETURN_CONSUMED_CAPACITY, parser.Values.RETURN_CONSUMED_CAPACITY_NONE) except Exception: raise exception.AWSValidationException() try: # update item result, old_item = storage.update_item( self.context, table_name, key_attribute_map=key_attributes, attribute_action_map=attribute_updates, expected_condition_map=expected_item_conditions) if not result: raise exception.AWSErrorResponseException() # format response response = {} if return_values != parser.Values.RETURN_VALUES_NONE: response[parser.Props.ATTRIBUTES] = ( parser.Parser.format_item_attributes(old_item)) if (return_item_collection_metrics != parser.Values.RETURN_ITEM_COLLECTION_METRICS_NONE): response[parser.Props.ITEM_COLLECTION_METRICS] = { parser.Props.ITEM_COLLECTION_KEY: { parser.Parser.format_item_attributes( models.AttributeValue("S", "key")) }, parser.Props.SIZE_ESTIMATED_RANGE_GB: [0] } if (return_consumed_capacity != parser.Values.RETURN_CONSUMED_CAPACITY_NONE): response[parser.Props.CONSUMED_CAPACITY] = ( parser.Parser.format_consumed_capacity( return_consumed_capacity, None)) return response except exception.AWSErrorResponseException as e: raise e except Exception: raise exception.AWSErrorResponseException()
def test_notify_batch_write(self, mock_put_item, mock_delete_item, mock_repo_get, mock_validate_table_is_active, mock_validate_table_schema, mock_batch_write): self.cleanup_test_notifier() future = Future() future.set_result(True) mock_put_item.return_value = future mock_delete_item.return_value = future table_info = mock.Mock() table_info.schema.key_attributes = ['id', 'range'] mock_repo_get.return_value = table_info mock_batch_write.side_effect = NotImplementedError() context = mock.Mock(tenant='fake_tenant') table_name = 'fake_table' request_map = { table_name: [ WriteItemRequest.put({ 'id': models.AttributeValue('N', 1), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1'), }), WriteItemRequest.put({ 'id': models.AttributeValue('N', 2), 'range': models.AttributeValue('S', '1'), 'str': models.AttributeValue('S', 'str1') }), WriteItemRequest.delete({ 'id': models.AttributeValue('N', 3), 'range': models.AttributeValue('S', '3') }) ] } storage_manager = SimpleStorageManager(StorageDriver(), TableInfoRepository()) storage_manager.execute_write_batch(context, request_map) # check notification queue self.assertEqual(len(self.get_notifications()), 2) start_event = self.get_notifications()[0] end_event = self.get_notifications()[1] self.assertEqual(start_event['priority'], 'INFO') self.assertEqual(start_event['event_type'], notifier.EVENT_TYPE_DATA_BATCHWRITE_START) self.assertEqual(len(start_event['payload']), len(request_map)) self.assertEqual(end_event['priority'], 'INFO') self.assertEqual(end_event['event_type'], notifier.EVENT_TYPE_DATA_BATCHWRITE_END) self.assertEqual(len(end_event['payload']['write_request_map']), len(request_map)) self.assertEqual(len(end_event['payload']['unprocessed_items']), 0) time_start = datetime.datetime.strptime(start_event['timestamp'], DATETIMEFORMAT) time_end = datetime.datetime.strptime(end_event['timestamp'], DATETIMEFORMAT) self.assertTrue(time_start < time_end, "start event is later than end event")
def test_query(self, mock_query): items = [ { 'ForumName': models.AttributeValue('S', 'Testing OS API'), 'LastPostDateTime': models.AttributeValue('S', '3/18/14'), 'Posts': models.AttributeValue('SS', ['Opening post']) }, { 'ForumName': models.AttributeValue('S', 'Testing OS API'), 'LastPostDateTime': models.AttributeValue('S', '3/19/14'), 'Posts': models.AttributeValue('SS', ['Hi', 'Hello']) }, ] last_evaluated_key = { 'ForumName': models.AttributeValue('S', 'Testing OS API'), 'LastPostDateTime': models.AttributeValue('S', '3/19/14'), } mock_query.return_value = models.SelectResult( items=items, last_evaluated_key=last_evaluated_key, count=2) headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} conn = httplib.HTTPConnection('localhost:8080') url = '/v1/data/default_tenant/tables/Threads/query' body = """ { "attributes_to_get": [ "ForumName", "LastPostDateTime", "Posts" ], "exclusive_start_key": { "ForumName" : { "S": "Testing OS API" }, "LastPostDayTime" : { "S": "3/1/14" } }, "index_name": "LastPostIndex", "limit": 2, "consistent_read": true, "scan_index_forward": true, "key_conditions": { "ForumName" : { "attribute_value_list": [ { "S": "Testing OS API" } ], "comparison_operator": "EQ" }, "LastPostDateTime" : { "attribute_value_list": [ { "S": "3/10/14" } ], "comparison_operator": "GT" } }, "select": "SPECIFIC_ATTRIBUTES" } """ expected_response = { "count": 2, "items": [ { 'ForumName': {'S': 'Testing OS API'}, 'LastPostDateTime': {'S': '3/18/14'}, 'Posts': {'SS': ['Opening post']} }, { 'ForumName': {'S': 'Testing OS API'}, 'LastPostDateTime': {'S': '3/19/14'}, 'Posts': {'SS': ['Hello', 'Hi']} } ], "last_evaluated_key": { 'ForumName': {'S': 'Testing OS API'}, 'LastPostDateTime': {'S': '3/19/14'} } } conn.request("POST", url, headers=headers, body=body) response = conn.getresponse() self.assertTrue(mock_query.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def decode_attr_value(cls, dynamodb_attr_type, dynamodb_attr_value): return models.AttributeValue(dynamodb_attr_type, dynamodb_attr_value)