示例#1
0
文件: utils.py 项目: screwnet/moto
 def __init__(
     self,
     max_results=None,
     max_results_default=None,
     starting_token=None,
     page_ending_range_keys=None,
     param_values_to_check=None,
 ):
     self._max_results = max_results if max_results else max_results_default
     self._starting_token = starting_token
     self._page_ending_range_keys = page_ending_range_keys
     self._param_values_to_check = param_values_to_check
     self._token_encoder = TokenEncoder()
     self._token_decoder = TokenDecoder()
     self._param_checksum = self._calculate_parameter_checksum()
     self._parsed_token = self._parse_starting_token()
示例#2
0
 def __init__(
     self,
     max_results=None,
     max_results_default=None,
     starting_token=None,
     unique_attribute=None,
     param_values_to_check=None,
     fail_on_invalid_token=True,
 ):
     self._max_results = max_results if max_results else max_results_default
     self._starting_token = starting_token
     self._unique_attributes = unique_attribute
     if not isinstance(unique_attribute, list):
         self._unique_attributes = [unique_attribute]
     self._param_values_to_check = param_values_to_check
     self._fail_on_invalid_token = fail_on_invalid_token
     self._token_encoder = TokenEncoder()
     self._token_decoder = TokenDecoder()
     self._param_checksum = self._calculate_parameter_checksum()
     self._parsed_token = self._parse_starting_token()
示例#3
0
class TestTokenDecoder(unittest.TestCase):
    def setUp(self):
        self.decoder = TokenDecoder()

    def test_decode(self):
        token = 'eyJmb28iOiAiYmFyIn0='
        expected = {'foo': 'bar'}
        self.assertEqual(self.decoder.decode(token), expected)

    def test_decode_with_bytes(self):
        token = (
            'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyJdXSwgImZvbyI6ICJZbUZ5In0='
        )
        expected = {'foo': b'bar'}
        self.assertEqual(self.decoder.decode(token), expected)

    def test_decode_with_nested_bytes(self):
        token = (
            'eyJmb28iOiB7ImJhciI6ICJZbUY2In0sICJib3RvX2VuY29kZWRfa2V5cyI6'
            'IFtbImZvbyIsICJiYXIiXV19'
        )
        expected = {'foo': {'bar': b'baz'}}
        self.assertEqual(self.decoder.decode(token), expected)

    def test_decode_with_listed_bytes(self):
        token = (
            'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiYXIiLCAxXV0sICJmb28i'
            'OiB7ImJhciI6IFsiYmF6IiwgIlltbHUiXX19'
        )
        expected = {'foo': {'bar': ['baz', b'bin']}}
        self.assertEqual(self.decoder.decode(token), expected)

    def test_decode_with_multiple_bytes_values(self):
        token = (
            'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiaW4iXSwgWyJmb28iLCAi'
            'YmFyIl1dLCAiZm9vIjogeyJiaW4iOiAiWW1GdCIsICJiYXIiOiAiWW1GNiJ9fQ=='
        )
        expected = {'foo': {'bar': b'baz', 'bin': b'bam'}}
        self.assertEqual(self.decoder.decode(token), expected)
示例#4
0
class Paginator(object):
    def __init__(
        self,
        max_results=None,
        max_results_default=None,
        starting_token=None,
        unique_attribute=None,
        param_values_to_check=None,
        fail_on_invalid_token=True,
    ):
        self._max_results = max_results if max_results else max_results_default
        self._starting_token = starting_token
        self._unique_attributes = unique_attribute
        if not isinstance(unique_attribute, list):
            self._unique_attributes = [unique_attribute]
        self._param_values_to_check = param_values_to_check
        self._fail_on_invalid_token = fail_on_invalid_token
        self._token_encoder = TokenEncoder()
        self._token_decoder = TokenDecoder()
        self._param_checksum = self._calculate_parameter_checksum()
        self._parsed_token = self._parse_starting_token()

    def _parse_starting_token(self):
        if self._starting_token is None:
            return None
        # The starting token is a dict passed as a base64 encoded string.
        next_token = self._starting_token
        try:
            next_token = self._token_decoder.decode(next_token)
        except (ValueError, TypeError, UnicodeDecodeError):
            self._raise_exception_if_required(next_token)
            return None
        if next_token.get("parameterChecksum") != self._param_checksum:
            raise InvalidToken("Input inconsistent with page token: {}".format(
                str(next_token)))
        return next_token

    def _raise_exception_if_required(self, token):
        if self._fail_on_invalid_token:
            if isinstance(self._fail_on_invalid_token, type):
                # we need to raise a custom exception
                func_info = inspect.getfullargspec(self._fail_on_invalid_token)
                arg_names, _, _, _, _, _, _ = func_info
                # arg_names == [self] or [self, token_argument_that_can_have_any_name]
                requires_token_arg = len(arg_names) > 1
                if requires_token_arg:
                    raise self._fail_on_invalid_token(token)
                else:
                    raise self._fail_on_invalid_token()
            raise InvalidToken("Invalid token")

    def _calculate_parameter_checksum(self):
        def freeze(o):
            if not o:
                return None
            if isinstance(o, dict):
                return frozenset({k: freeze(v) for k, v in o.items()}.items())

            if isinstance(o, (list, tuple, set)):
                return tuple([freeze(v) for v in o])

            return o

        return hash(freeze(self._param_values_to_check))

    def _check_predicate(self, item):
        if self._parsed_token is None:
            return False
        unique_attributes = self._parsed_token["uniqueAttributes"]
        predicate_values = unique_attributes.split("|")
        for (index, attr) in enumerate(self._unique_attributes):
            curr_val = item[attr] if type(item) == dict else getattr(
                item, attr, None)
            if not str(curr_val) == predicate_values[index]:
                return False
        return True

    def _build_next_token(self, next_item):
        token_dict = {}
        if self._param_checksum:
            token_dict["parameterChecksum"] = self._param_checksum
        range_keys = []
        for attr in self._unique_attributes:
            if type(next_item) == dict:
                range_keys.append(str(next_item[attr]))
            else:
                range_keys.append(str(getattr(next_item, attr)))
        token_dict["uniqueAttributes"] = "|".join(range_keys)
        return self._token_encoder.encode(token_dict)

    def paginate(self, results):
        index_start = 0
        if self._starting_token:
            try:
                index_start = next(index
                                   for (index, result) in enumerate(results)
                                   if self._check_predicate(result))
            except StopIteration:
                if self._fail_on_invalid_token:
                    raise InvalidToken("Resource not found!")
                else:
                    return [], None

        index_end = index_start + self._max_results
        if index_end > len(results):
            index_end = len(results)

        results_page = results[index_start:index_end]

        next_token = None
        if results_page and index_end < len(results):
            last_resource_on_this_page = results[index_end]
            next_token = self._build_next_token(last_resource_on_this_page)
        return results_page, next_token
示例#5
0
def test_token_encoding(token_dict):
    encoded = TokenEncoder().encode(token_dict)
    assert isinstance(encoded, six.string_types)
    decoded = TokenDecoder().decode(encoded)
    assert decoded == token_dict
示例#6
0
def assert_token_encodes_and_decodes(token_dict):
    encoded = TokenEncoder().encode(token_dict)
    assert isinstance(encoded, six.string_types)
    decoded = TokenDecoder().decode(encoded)
    assert_equal(decoded, token_dict)
示例#7
0
import os
import boto3
import uuid

from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
from botocore.paginate import TokenEncoder, TokenDecoder

TABLE_NAME = "Todos"

encoder = TokenEncoder()
decoder = TokenDecoder()
deserializer = TypeDeserializer()
serializer = TypeSerializer()


def get_todos(page_count: int):
    low_value = page_count
    max_value = low_value + 10

    response = get_dynamo_client().scan(
        TableName=TABLE_NAME,
        Limit=20,
        ExpressionAttributeValues={
            ':min': {
                "N": str(low_value)
            },
            ':max': {
                "N": str(max_value)
            }
        },
        FilterExpression="IdRange > :min AND IdRange <= :max")
示例#8
0
class Paginator(object):
    def __init__(
        self,
        max_results=None,
        max_results_default=None,
        starting_token=None,
        page_ending_range_keys=None,
        param_values_to_check=None,
        fail_on_invalid_token=True,
    ):
        self._max_results = max_results if max_results else max_results_default
        self._starting_token = starting_token
        self._page_ending_range_keys = page_ending_range_keys
        self._param_values_to_check = param_values_to_check
        self._fail_on_invalid_token = fail_on_invalid_token
        self._token_encoder = TokenEncoder()
        self._token_decoder = TokenDecoder()
        self._param_checksum = self._calculate_parameter_checksum()
        self._parsed_token = self._parse_starting_token()

    def _parse_starting_token(self):
        if self._starting_token is None:
            return None
        # The starting token is a dict passed as a base64 encoded string.
        next_token = self._starting_token
        try:
            next_token = self._token_decoder.decode(next_token)
        except (ValueError, TypeError, UnicodeDecodeError):
            if self._fail_on_invalid_token:
                raise InvalidToken("Invalid token")
            return None
        if next_token.get("parameterChecksum") != self._param_checksum:
            raise InvalidToken("Input inconsistent with page token: {}".format(
                str(next_token)))
        return next_token

    def _calculate_parameter_checksum(self):
        if not self._param_values_to_check:
            return None
        return reduce(
            lambda x, y: x ^ y,
            [hash(item) for item in self._param_values_to_check.items()],
        )

    def _check_predicate(self, item):
        if self._parsed_token is None:
            return False
        page_ending_range_key = self._parsed_token["pageEndingRangeKey"]
        predicate_values = page_ending_range_key.split("|")
        for (index, attr) in enumerate(self._page_ending_range_keys):
            curr_val = item[attr] if type(item) == dict else getattr(
                item, attr, None)
            if not curr_val == predicate_values[index]:
                return False
        return True

    def _build_next_token(self, next_item):
        token_dict = {}
        if self._param_checksum:
            token_dict["parameterChecksum"] = self._param_checksum
        range_keys = []
        for (index, attr) in enumerate(self._page_ending_range_keys):
            if type(next_item) == dict:
                range_keys.append(next_item[attr])
            else:
                range_keys.append(getattr(next_item, attr))
        token_dict["pageEndingRangeKey"] = "|".join(range_keys)
        return self._token_encoder.encode(token_dict)

    def paginate(self, results):
        index_start = 0
        if self._starting_token:
            try:
                index_start = next(index
                                   for (index, result) in enumerate(results)
                                   if self._check_predicate(result))
            except StopIteration:
                if self._fail_on_invalid_token:
                    raise InvalidToken("Resource not found!")
                else:
                    return [], None

        index_end = index_start + self._max_results
        if index_end > len(results):
            index_end = len(results)

        results_page = results[index_start:index_end]

        next_token = None
        if results_page and index_end < len(results):
            page_ending_result = results[index_end]
            next_token = self._build_next_token(page_ending_result)
        return results_page, next_token
示例#9
0
 def setUp(self):
     self.decoder = TokenDecoder()