def test(self): item = {'user': '******', 'value': 123.123, 'empty': ''} fmt_item = utils.format_item(item) assert_equal(fmt_item['user'], {'S': 'Rhett'}) assert_equal(fmt_item['value'], {'N': '123.123'}) assert 'empty' not in fmt_item
def do_query(self, args): table_spec = self.tables[args['TableName']] sql_table = table_spec['sql_table'] key_spec = table_spec['key_spec'] table_def = table_spec['table_def'] unsupported_keys = set(args.keys()) - set([ 'Limit', 'TableName', 'HashKeyValue', 'ScanIndexForward', 'ConsistentRead', 'RangeKeyCondition', 'AttributesToGet', 'ExclusiveStartKey' ]) if unsupported_keys: raise NotImplementedError(unsupported_keys) scan_forward = args.get('ScanIndexForward', True) expr = sql_table.c.hash_key == utils.parse_value(args['HashKeyValue']) if 'RangeKeyCondition' in args: if len(key_spec) < 2: raise NotImplementedError operator = args['RangeKeyCondition']['ComparisonOperator'] if operator == 'BETWEEN': start = utils.parse_value( args['RangeKeyCondition']['AttributeValueList'][0]) end = utils.parse_value( args['RangeKeyCondition']['AttributeValueList'][1]) expr = sql.and_(expr, sql_table.c.range_key.between(start, end)) else: range_value = utils.parse_value( args['RangeKeyCondition']['AttributeValueList'][0]) if operator == 'GT': expr = sql.and_(expr, sql_table.c.range_key > range_value) elif operator == 'LT': expr = sql.and_(expr, sql_table.c.range_key < range_value) elif operator == 'GE': expr = sql.and_(expr, sql_table.c.range_key >= range_value) elif operator == 'LE': expr = sql.and_(expr, sql_table.c.range_key <= range_value) elif operator == 'EQ': expr = sql.and_(expr, sql_table.c.range_key == range_value) elif operator == 'BEGINS_WITH': expr = sql.and_( expr, sql_table.c.range_key.like('%s%%' % range_value)) else: raise NotImplementedError if 'ExclusiveStartKey' in args: range_key = utils.parse_value( args['ExclusiveStartKey']['RangeKeyElement']) if scan_forward: expr = sql.and_(expr, sql_table.c.range_key > range_key) else: expr = sql.and_(expr, self.table.c.range_key < range_key) q = sql.select([sql_table], expr) if 'Limit' in args: limit = min(DEFAULT_LIMIT, args['Limit']) else: limit = DEFAULT_LIMIT q = q.limit(limit + 1) if len(key_spec) > 1: if scan_forward: q = q.order_by(sql_table.c.range_key.asc()) else: q = q.order_by(sql_table.c.range_key.desc()) capacity_size = 0 out = {'Items': [], 'Count': 0} for res in self.engine.execute(q): if out['Count'] == limit: break capacity_size += (len(res[sql_table.c.content]) / 1024) + 1 item_data = json.loads(res[sql_table.c.content]) item = utils.parse_item(item_data) if 'AttributesToGet' in args: out_item = dict((col_name, item_data[col_name]) for col_name in args['AttributesToGet']) out['Items'].append(out_item) else: out['Items'].append(item_data) out['Count'] += 1 if len(key_spec) > 1: out['LastEvaluatedKey'] = utils.format_item({ 'HashKeyElement': res[sql_table.c.hash_key], 'RangeKeyElement': res[sql_table.c.range_key] }) else: # If we didn't break out of our loop, that means we're on the last page of our result set and should have a key # The docs say the last evaulated key should be 'null', but I'm suspicious that isn't the case. if 'LastEvaluatedKey' in out: #out['LastEvaluatedKey'] = None del out['LastEvaluatedKey'] self.table_read_op_capacity[args['TableName']] += capacity_size self.tables[args['TableName']]['read_counter'].record(capacity_size) out['Count'] = len(out['Items']) return out
def do_updateitem(self, args): table_spec = self.tables[args['TableName']] sql_table = table_spec['sql_table'] key_spec = table_spec['key_spec'] table_def = table_spec['table_def'] if 'Expected' in args: raise NotImplementedError if args.get('ReturnValues') not in (None, 'ALL_OLD', 'ALL_NEW'): raise NotImplementedError if table_def.range_key: key = (utils.parse_value(args['Key']['HashKeyElement']), utils.parse_value(args['Key']['RangeKeyElement'])) expr = sql.and_(sql_table.c.hash_key == key[0], sql_table.c.range_key == key[1]) else: key = (utils.parse_value(args['Key']['HashKeyElement']), ) expr = sql_table.c.hash_key == key[0] # Update is one of our few transactionally important operations. By # setting self.connection, our callees should use that rather than the # connectionless self.engine method, allowing us to control the # transaction directly. self.connection = self.engine.connect() txn = self.connection.begin() q = sql.select([sql_table], expr) res = list(self.connection.execute(q)) if res: item = json.loads(res[0][sql_table.c.content]) else: item = {} item.update(utils.format_key(key_spec, key)) real_item = utils.parse_item(item) # Apply our updates for attribute, value_update in args['AttributeUpdates'].iteritems(): if value_update['Action'] == "ADD": if attribute in real_item: if isinstance(real_item[attribute], (int, float, list)): real_item[attribute] += utils.parse_value( value_update['Value']) elif isinstance(real_item[attribute], list): if hasattr(value_update['Value'], '__iter__'): real_item[attribute] += [ utils.parse_value(v) for v in value_update['Value'] ] else: real_item[attribute].append( utils.parse_value(value_update['Value'])) else: real_item[attribute].append( utils.parse_value(value_update['Value'])) else: real_item[attribute] = utils.parse_value( value_update['Value']) elif value_update['Action'] == "PUT": real_item[attribute] = utils.parse_value(value_update['Value']) elif value_update['Action'] == "DELETE": if attribute in real_item: del real_item[attribute] else: raise ValueError(value_update['Action']) # write to the db self._put_item(args['TableName'], utils.format_item(real_item)) txn.commit() self.connection = None if args.get('ReturnValues', 'NONE') == 'NONE': return {} elif args['ReturnValues'] == 'ALL_NEW': return {'Attributes': utils.format_item(real_item)} elif args['ReturnValues'] == 'ALL_OLD': return {'Attributes': item}
def do_query(self, args): table_spec = self.tables[args['TableName']] sql_table = table_spec['sql_table'] key_spec = table_spec['key_spec'] table_def = table_spec['table_def'] unsupported_keys = set(args.keys()) - set(['Limit', 'TableName', 'HashKeyValue', 'ScanIndexForward', 'ConsistentRead', 'RangeKeyCondition', 'AttributesToGet', 'ExclusiveStartKey']) if unsupported_keys: raise NotImplementedError(unsupported_keys) scan_forward = args.get('ScanIndexForward', True) expr = sql_table.c.hash_key == utils.parse_value(args['HashKeyValue']) if 'RangeKeyCondition' in args: if len(key_spec) < 2: raise NotImplementedError operator = args['RangeKeyCondition']['ComparisonOperator'] if operator == 'BETWEEN': start = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][0]) end = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][1]) expr = sql.and_(expr, sql_table.c.range_key.between(start, end)) else: range_value = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][0]) if operator == 'GT': expr = sql.and_(expr, sql_table.c.range_key > range_value) elif operator == 'LT': expr = sql.and_(expr, sql_table.c.range_key < range_value) elif operator == 'GE': expr = sql.and_(expr, sql_table.c.range_key >= range_value) elif operator == 'LE': expr = sql.and_(expr, sql_table.c.range_key <= range_value) elif operator == 'EQ': expr = sql.and_(expr, sql_table.c.range_key == range_value) elif operator == 'BEGINS_WITH': expr = sql.and_(expr, sql_table.c.range_key.like('%s%%' % range_value)) else: raise NotImplementedError if 'ExclusiveStartKey' in args: range_key = utils.parse_value(args['ExclusiveStartKey']['RangeKeyElement']) if scan_forward: expr = sql.and_(expr, sql_table.c.range_key > range_key) else: expr = sql.and_(expr, self.table.c.range_key < range_key) q = sql.select([sql_table], expr) if 'Limit' in args: limit = min(DEFAULT_LIMIT, args['Limit']) else: limit = DEFAULT_LIMIT q = q.limit(limit + 1) if len(key_spec) > 1: if scan_forward: q = q.order_by(sql_table.c.range_key.asc()) else: q = q.order_by(sql_table.c.range_key.desc()) capacity_size = 0 out = {'Items': [], 'Count': 0} for res in self.engine.execute(q): if out['Count'] == limit: break capacity_size += (len(res[sql_table.c.content]) / 1024) + 1 item_data = json.loads(res[sql_table.c.content]) item = utils.parse_item(item_data) if 'AttributesToGet' in args: out_item = dict((col_name, item_data[col_name]) for col_name in args['AttributesToGet']) out['Items'].append(out_item) else: out['Items'].append(item_data) out['Count'] += 1 if len(key_spec) > 1: out['LastEvaluatedKey'] = utils.format_item({'HashKeyElement': res[sql_table.c.hash_key], 'RangeKeyElement': res[sql_table.c.range_key]}) else: # If we didn't break out of our loop, that means we're on the last page of our result set and should have a key # The docs say the last evaulated key should be 'null', but I'm suspicious that isn't the case. if 'LastEvaluatedKey' in out: #out['LastEvaluatedKey'] = None del out['LastEvaluatedKey'] self.table_read_op_capacity[args['TableName']] += capacity_size self.tables[args['TableName']]['read_counter'].record(capacity_size) out['Count'] = len(out['Items']) return out
def do_updateitem(self, args): table_spec = self.tables[args['TableName']] sql_table = table_spec['sql_table'] key_spec = table_spec['key_spec'] table_def = table_spec['table_def'] if 'Expected' in args: raise NotImplementedError if args.get('ReturnValues') not in (None, 'ALL_OLD', 'ALL_NEW'): raise NotImplementedError if table_def.range_key: key = (utils.parse_value(args['Key']['HashKeyElement']), utils.parse_value(args['Key']['RangeKeyElement'])) expr = sql.and_(sql_table.c.hash_key == key[0], sql_table.c.range_key == key[1]) else: key = (utils.parse_value(args['Key']['HashKeyElement']),) expr = sql_table.c.hash_key == key[0] # Update is one of our few transactionally important operations. By # setting self.connection, our callees should use that rather than the # connectionless self.engine method, allowing us to control the # transaction directly. self.connection = self.engine.connect() txn = self.connection.begin() q = sql.select([sql_table], expr) res = list(self.connection.execute(q)) if res: item = json.loads(res[0][sql_table.c.content]) else: item = {} item.update(utils.format_key(key_spec, key)) real_item = utils.parse_item(item) # Apply our updates for attribute, value_update in args['AttributeUpdates'].iteritems(): if value_update['Action'] == "ADD": if attribute in real_item: if isinstance(real_item[attribute], (int,float,list)): real_item[attribute] += utils.parse_value(value_update['Value']) elif isinstance(real_item[attribute], list): if hasattr(value_update['Value'], '__iter__'): real_item[attribute] += [utils.parse_value(v) for v in value_update['Value']] else: real_item[attribute].append(utils.parse_value(value_update['Value'])) else: real_item[attribute].append(utils.parse_value(value_update['Value'])) else: real_item[attribute] = utils.parse_value(value_update['Value']) elif value_update['Action'] == "PUT": real_item[attribute] = utils.parse_value(value_update['Value']) elif value_update['Action'] == "DELETE": if attribute in real_item: del real_item[attribute] else: raise ValueError(value_update['Action']) # write to the db self._put_item(args['TableName'], utils.format_item(real_item)) txn.commit() self.connection = None if args.get('ReturnValues', 'NONE') == 'NONE': return {} elif args['ReturnValues'] == 'ALL_NEW': return {'Attributes': utils.format_item(real_item)} elif args['ReturnValues'] == 'ALL_OLD': return {'Attributes': item}