Example #1
0
    def do_scan(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']

        q = sql.select([sql_table])
        if 'Limit' in args:
            q = q.limit(args['Limit'])

        unsupported_keys = set(args.keys()) - set(['Limit', 'TableName', 'ScanFilter']) 
        if unsupported_keys:
            raise NotImplementedError(unsupported_keys)

        capacity_size = 0
        out = {'Items': []}
        for res in self.engine.execute(q):
            capacity_size += (len(res[sql_table.c.content]) / 1024) + 1
            item_data = json.loads(res[sql_table.c.content])
            item = utils.parse_item(item_data)
            if 'ScanFilter' in args:
                for attribute, filter_spec in args['ScanFilter'].iteritems():
                    if attribute not in item:
                        continue
                    for value_spec in filter_spec['AttributeValueList']:
                        value = utils.parse_value(value_spec)
                        if compare(filter_spec['ComparisonOperator'], item[attribute], value):
                            out['Items'].append(item_data)
                            break
            else:
                out['Items'].append(item_data)

        self.table_read_op_capacity[args['TableName']] += capacity_size
        self.tables[args['TableName']]['read_counter'].record(capacity_size)

        return out
Example #2
0
 def test(self):
     item = utils.parse_item({
         'id': {
             'S': 'Rhett'
         },
         'value': {
             'N': '10.125'
         }
     })
     assert_equal(item['id'], 'Rhett')
     assert_equal(item['value'], 10.125)
Example #3
0
    def _put_item(self, table_name, item_data):
        table_spec = self.tables[table_name]
        sql_table = table_spec['sql_table']
        table_def = table_spec['table_def']
        key_spec = table_spec['key_spec']

        item = utils.parse_item(item_data)
        hash_key = item[key_spec[0]]

        item_json = json.dumps(item_data)

        capacity_size = (len(item_json) / 1024) + 1
        self.table_write_op_capacity[table_name] += capacity_size
        self.tables[table_name]['write_counter'].record(capacity_size)

        if table_def.range_key:
            range_key = item[key_spec[1]]
            del_q = sql_table.delete().where(
                sql.and_(sql_table.c.hash_key == hash_key,
                         sql_table.c.range_key == range_key))
            ins = sql_table.insert().values(hash_key=hash_key,
                                            range_key=range_key,
                                            content=item_json)
        else:
            del_q = sql_table.delete().where(sql_table.c.hash_key == hash_key)
            ins = sql_table.insert().values(hash_key=hash_key,
                                            content=item_json)

        cnxn = self.connection or self.engine

        try:
            cnxn.execute(ins)
        except sqlalchemy.exc.IntegrityError:
            # Try again
            txn = None
            if cnxn == self.engine:
                cnxn = self.engine.connect()
                txn = cnxn.begin()

            cnxn.execute(del_q)
            cnxn.execute(ins)

            if txn:
                txn.commit()

        return capacity_size
Example #4
0
    def _put_item(self, table_name, item_data):
        table_spec = self.tables[table_name]
        sql_table = table_spec['sql_table']
        table_def = table_spec['table_def']
        key_spec = table_spec['key_spec']

        item = utils.parse_item(item_data)
        hash_key = item[key_spec[0]]

        item_json = json.dumps(item_data)

        capacity_size = (len(item_json) / 1024) + 1
        self.table_write_op_capacity[table_name] += capacity_size
        self.tables[table_name]['write_counter'].record(capacity_size)

        if table_def.range_key:
            range_key = item[key_spec[1]]
            del_q = sql_table.delete().where(sql.and_(sql_table.c.hash_key==hash_key, sql_table.c.range_key==range_key))
            ins = sql_table.insert().values(hash_key=hash_key, range_key=range_key, content=item_json)
        else:
            del_q = sql_table.delete().where(sql_table.c.hash_key==hash_key)
            ins = sql_table.insert().values(hash_key=hash_key, content=item_json)

        cnxn = self.connection or self.engine

        try:
            cnxn.execute(ins)
        except sqlalchemy.exc.IntegrityError:
            # Try again
            txn = None
            if cnxn == self.engine:
                cnxn = self.engine.connect()
                txn = cnxn.begin()

            cnxn.execute(del_q)
            cnxn.execute(ins)

            if txn:
                txn.commit()
        
        return capacity_size
Example #5
0
    def do_scan(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']

        q = sql.select([sql_table])
        if 'Limit' in args:
            q = q.limit(args['Limit'])

        unsupported_keys = set(args.keys()) - set(
            ['Limit', 'TableName', 'ScanFilter'])
        if unsupported_keys:
            raise NotImplementedError(unsupported_keys)

        capacity_size = 0
        out = {'Items': []}
        for res in self.engine.execute(q):
            capacity_size += (len(res[sql_table.c.content]) / 1024) + 1
            item_data = json.loads(res[sql_table.c.content])
            item = utils.parse_item(item_data)
            if 'ScanFilter' in args:
                for attribute, filter_spec in args['ScanFilter'].iteritems():
                    if attribute not in item:
                        continue
                    for value_spec in filter_spec['AttributeValueList']:
                        value = utils.parse_value(value_spec)
                        if compare(filter_spec['ComparisonOperator'],
                                   item[attribute], value):
                            out['Items'].append(item_data)
                            break
            else:
                out['Items'].append(item_data)

        self.table_read_op_capacity[args['TableName']] += capacity_size
        self.tables[args['TableName']]['read_counter'].record(capacity_size)

        return out
Example #6
0
    def do_query(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']
        key_spec = table_spec['key_spec']
        table_def = table_spec['table_def']

        unsupported_keys = set(args.keys()) - set([
            'Limit', 'TableName', 'HashKeyValue', 'ScanIndexForward',
            'ConsistentRead', 'RangeKeyCondition', 'AttributesToGet',
            'ExclusiveStartKey'
        ])
        if unsupported_keys:
            raise NotImplementedError(unsupported_keys)

        scan_forward = args.get('ScanIndexForward', True)

        expr = sql_table.c.hash_key == utils.parse_value(args['HashKeyValue'])

        if 'RangeKeyCondition' in args:
            if len(key_spec) < 2:
                raise NotImplementedError

            operator = args['RangeKeyCondition']['ComparisonOperator']
            if operator == 'BETWEEN':
                start = utils.parse_value(
                    args['RangeKeyCondition']['AttributeValueList'][0])
                end = utils.parse_value(
                    args['RangeKeyCondition']['AttributeValueList'][1])
                expr = sql.and_(expr,
                                sql_table.c.range_key.between(start, end))

            else:
                range_value = utils.parse_value(
                    args['RangeKeyCondition']['AttributeValueList'][0])
                if operator == 'GT':
                    expr = sql.and_(expr, sql_table.c.range_key > range_value)
                elif operator == 'LT':
                    expr = sql.and_(expr, sql_table.c.range_key < range_value)
                elif operator == 'GE':
                    expr = sql.and_(expr, sql_table.c.range_key >= range_value)
                elif operator == 'LE':
                    expr = sql.and_(expr, sql_table.c.range_key <= range_value)
                elif operator == 'EQ':
                    expr = sql.and_(expr, sql_table.c.range_key == range_value)
                elif operator == 'BEGINS_WITH':
                    expr = sql.and_(
                        expr, sql_table.c.range_key.like('%s%%' % range_value))
                else:
                    raise NotImplementedError

        if 'ExclusiveStartKey' in args:
            range_key = utils.parse_value(
                args['ExclusiveStartKey']['RangeKeyElement'])
            if scan_forward:
                expr = sql.and_(expr, sql_table.c.range_key > range_key)
            else:
                expr = sql.and_(expr, self.table.c.range_key < range_key)

        q = sql.select([sql_table], expr)

        if 'Limit' in args:
            limit = min(DEFAULT_LIMIT, args['Limit'])
        else:
            limit = DEFAULT_LIMIT

        q = q.limit(limit + 1)

        if len(key_spec) > 1:
            if scan_forward:
                q = q.order_by(sql_table.c.range_key.asc())
            else:
                q = q.order_by(sql_table.c.range_key.desc())

        capacity_size = 0
        out = {'Items': [], 'Count': 0}
        for res in self.engine.execute(q):
            if out['Count'] == limit:
                break

            capacity_size += (len(res[sql_table.c.content]) / 1024) + 1

            item_data = json.loads(res[sql_table.c.content])
            item = utils.parse_item(item_data)
            if 'AttributesToGet' in args:
                out_item = dict((col_name, item_data[col_name])
                                for col_name in args['AttributesToGet'])
                out['Items'].append(out_item)
            else:
                out['Items'].append(item_data)

            out['Count'] += 1
            if len(key_spec) > 1:
                out['LastEvaluatedKey'] = utils.format_item({
                    'HashKeyElement':
                    res[sql_table.c.hash_key],
                    'RangeKeyElement':
                    res[sql_table.c.range_key]
                })
        else:
            # If we didn't break out of our loop, that means we're on the last page of our result set and should have a key

            # The docs say the last evaulated key should be 'null', but I'm suspicious that isn't the case.
            if 'LastEvaluatedKey' in out:
                #out['LastEvaluatedKey'] = None
                del out['LastEvaluatedKey']

        self.table_read_op_capacity[args['TableName']] += capacity_size
        self.tables[args['TableName']]['read_counter'].record(capacity_size)

        out['Count'] = len(out['Items'])

        return out
Example #7
0
    def do_updateitem(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']
        key_spec = table_spec['key_spec']
        table_def = table_spec['table_def']

        if 'Expected' in args:
            raise NotImplementedError

        if args.get('ReturnValues') not in (None, 'ALL_OLD', 'ALL_NEW'):
            raise NotImplementedError

        if table_def.range_key:
            key = (utils.parse_value(args['Key']['HashKeyElement']),
                   utils.parse_value(args['Key']['RangeKeyElement']))
            expr = sql.and_(sql_table.c.hash_key == key[0],
                            sql_table.c.range_key == key[1])
        else:
            key = (utils.parse_value(args['Key']['HashKeyElement']), )
            expr = sql_table.c.hash_key == key[0]

        # Update is one of our few transactionally important operations.  By
        # setting self.connection, our callees should use that rather than the
        # connectionless self.engine method, allowing us to control the
        # transaction directly.
        self.connection = self.engine.connect()
        txn = self.connection.begin()

        q = sql.select([sql_table], expr)
        res = list(self.connection.execute(q))
        if res:
            item = json.loads(res[0][sql_table.c.content])
        else:
            item = {}
            item.update(utils.format_key(key_spec, key))

        real_item = utils.parse_item(item)

        # Apply our updates
        for attribute, value_update in args['AttributeUpdates'].iteritems():
            if value_update['Action'] == "ADD":
                if attribute in real_item:
                    if isinstance(real_item[attribute], (int, float, list)):
                        real_item[attribute] += utils.parse_value(
                            value_update['Value'])
                    elif isinstance(real_item[attribute], list):
                        if hasattr(value_update['Value'], '__iter__'):
                            real_item[attribute] += [
                                utils.parse_value(v)
                                for v in value_update['Value']
                            ]
                        else:
                            real_item[attribute].append(
                                utils.parse_value(value_update['Value']))
                    else:
                        real_item[attribute].append(
                            utils.parse_value(value_update['Value']))
                else:
                    real_item[attribute] = utils.parse_value(
                        value_update['Value'])

            elif value_update['Action'] == "PUT":
                real_item[attribute] = utils.parse_value(value_update['Value'])
            elif value_update['Action'] == "DELETE":
                if attribute in real_item:
                    del real_item[attribute]
            else:
                raise ValueError(value_update['Action'])

        # write to the db
        self._put_item(args['TableName'], utils.format_item(real_item))

        txn.commit()
        self.connection = None

        if args.get('ReturnValues', 'NONE') == 'NONE':
            return {}
        elif args['ReturnValues'] == 'ALL_NEW':
            return {'Attributes': utils.format_item(real_item)}
        elif args['ReturnValues'] == 'ALL_OLD':
            return {'Attributes': item}
Example #8
0
    def do_query(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']
        key_spec = table_spec['key_spec']
        table_def = table_spec['table_def']

        unsupported_keys = set(args.keys()) - set(['Limit', 'TableName', 'HashKeyValue', 'ScanIndexForward', 'ConsistentRead', 'RangeKeyCondition', 'AttributesToGet', 'ExclusiveStartKey']) 
        if unsupported_keys:
            raise NotImplementedError(unsupported_keys)

        scan_forward = args.get('ScanIndexForward', True)

        expr = sql_table.c.hash_key == utils.parse_value(args['HashKeyValue'])

        if 'RangeKeyCondition' in args:
            if len(key_spec) < 2:
                raise NotImplementedError

            operator = args['RangeKeyCondition']['ComparisonOperator']
            if operator == 'BETWEEN':
                start = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][0])
                end = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][1])
                expr = sql.and_(expr, sql_table.c.range_key.between(start, end))

            else:
                range_value = utils.parse_value(args['RangeKeyCondition']['AttributeValueList'][0])
                if operator == 'GT':
                    expr = sql.and_(expr, sql_table.c.range_key > range_value)
                elif operator == 'LT':
                    expr = sql.and_(expr, sql_table.c.range_key < range_value)
                elif operator == 'GE':
                    expr = sql.and_(expr, sql_table.c.range_key >= range_value)
                elif operator == 'LE':
                    expr = sql.and_(expr, sql_table.c.range_key <= range_value)
                elif operator == 'EQ':
                    expr = sql.and_(expr, sql_table.c.range_key == range_value)
                elif operator == 'BEGINS_WITH':
                    expr = sql.and_(expr, sql_table.c.range_key.like('%s%%' % range_value))
                else:
                    raise NotImplementedError

        if 'ExclusiveStartKey' in args:
            range_key = utils.parse_value(args['ExclusiveStartKey']['RangeKeyElement'])
            if scan_forward:
                expr = sql.and_(expr, sql_table.c.range_key > range_key)
            else:
                expr = sql.and_(expr, self.table.c.range_key < range_key)

        q = sql.select([sql_table], expr)

        if 'Limit' in args:
            limit = min(DEFAULT_LIMIT, args['Limit'])
        else:
            limit = DEFAULT_LIMIT

        q = q.limit(limit + 1)

        if len(key_spec) > 1:
            if scan_forward:
                q = q.order_by(sql_table.c.range_key.asc())
            else:
                q = q.order_by(sql_table.c.range_key.desc())

        capacity_size = 0
        out = {'Items': [], 'Count': 0}
        for res in self.engine.execute(q):
            if out['Count'] == limit:
                break

            capacity_size += (len(res[sql_table.c.content]) / 1024) + 1

            item_data = json.loads(res[sql_table.c.content])
            item = utils.parse_item(item_data)
            if 'AttributesToGet' in args:
                out_item = dict((col_name, item_data[col_name]) for col_name in args['AttributesToGet'])
                out['Items'].append(out_item)
            else:
                out['Items'].append(item_data)

            out['Count'] += 1
            if len(key_spec) > 1:
                out['LastEvaluatedKey'] = utils.format_item({'HashKeyElement': res[sql_table.c.hash_key], 'RangeKeyElement': res[sql_table.c.range_key]})
        else:
            # If we didn't break out of our loop, that means we're on the last page of our result set and should have a key

            # The docs say the last evaulated key should be 'null', but I'm suspicious that isn't the case.
            if 'LastEvaluatedKey' in out:
                #out['LastEvaluatedKey'] = None
                del out['LastEvaluatedKey']
            

        self.table_read_op_capacity[args['TableName']] += capacity_size
        self.tables[args['TableName']]['read_counter'].record(capacity_size)

        out['Count'] = len(out['Items'])

        return out
Example #9
0
    def do_updateitem(self, args):
        table_spec = self.tables[args['TableName']]
        sql_table = table_spec['sql_table']
        key_spec = table_spec['key_spec']
        table_def = table_spec['table_def']

        if 'Expected' in args:
            raise NotImplementedError

        if args.get('ReturnValues') not in (None, 'ALL_OLD', 'ALL_NEW'):
            raise NotImplementedError

        if table_def.range_key:
            key = (utils.parse_value(args['Key']['HashKeyElement']), 
                   utils.parse_value(args['Key']['RangeKeyElement']))
            expr = sql.and_(sql_table.c.hash_key == key[0],
                              sql_table.c.range_key == key[1])
        else:
            key = (utils.parse_value(args['Key']['HashKeyElement']),)
            expr = sql_table.c.hash_key == key[0]

        # Update is one of our few transactionally important operations.  By
        # setting self.connection, our callees should use that rather than the
        # connectionless self.engine method, allowing us to control the
        # transaction directly.
        self.connection = self.engine.connect()
        txn = self.connection.begin()

        q = sql.select([sql_table], expr)
        res = list(self.connection.execute(q))
        if res:
            item = json.loads(res[0][sql_table.c.content])
        else:
            item = {}
            item.update(utils.format_key(key_spec, key))

        real_item = utils.parse_item(item)

        # Apply our updates
        for attribute, value_update in args['AttributeUpdates'].iteritems():
            if value_update['Action'] == "ADD":
                if attribute in real_item:
                    if isinstance(real_item[attribute], (int,float,list)):
                        real_item[attribute] += utils.parse_value(value_update['Value'])
                    elif isinstance(real_item[attribute], list):
                        if hasattr(value_update['Value'], '__iter__'):
                            real_item[attribute] += [utils.parse_value(v) for v in value_update['Value']]
                        else:
                            real_item[attribute].append(utils.parse_value(value_update['Value']))
                    else:
                        real_item[attribute].append(utils.parse_value(value_update['Value']))
                else:
                    real_item[attribute] = utils.parse_value(value_update['Value'])

            elif value_update['Action'] == "PUT":
                real_item[attribute] = utils.parse_value(value_update['Value'])
            elif value_update['Action'] == "DELETE":
                if attribute in real_item:
                    del real_item[attribute]
            else:
                raise ValueError(value_update['Action'])
        
        # write to the db
        self._put_item(args['TableName'], utils.format_item(real_item))

        txn.commit()
        self.connection = None

        if args.get('ReturnValues', 'NONE') == 'NONE':
            return {}
        elif args['ReturnValues'] == 'ALL_NEW':
            return {'Attributes': utils.format_item(real_item)}
        elif args['ReturnValues'] == 'ALL_OLD':
            return {'Attributes': item}
Example #10
0
 def test(self):
     item = utils.parse_item({'id': {'S': 'Rhett'}, 'value': {'N': '10.125'}})
     assert_equal(item['id'], 'Rhett')
     assert_equal(item['value'], 10.125)