def __init__( self, max_results=None, max_results_default=None, starting_token=None, page_ending_range_keys=None, param_values_to_check=None, fail_on_invalid_token=True, ): self._max_results = max_results if max_results else max_results_default self._starting_token = starting_token self._page_ending_range_keys = page_ending_range_keys self._param_values_to_check = param_values_to_check self._fail_on_invalid_token = fail_on_invalid_token self._token_encoder = TokenEncoder() self._token_decoder = TokenDecoder() self._param_checksum = self._calculate_parameter_checksum() self._parsed_token = self._parse_starting_token()
def _build_next_token(self, next_item): token_dict = {} if self._param_checksum: token_dict["parameterChecksum"] = self._param_checksum range_keys = [] for (index, attr) in enumerate(self._page_ending_range_keys): range_keys.append(getattr(next_item, attr)) token_dict["pageEndingRangeKey"] = "|".join(range_keys) return TokenEncoder().encode(token_dict)
def __init__( self, max_results=None, max_results_default=None, starting_token=None, unique_attribute=None, param_values_to_check=None, fail_on_invalid_token=True, ): self._max_results = max_results if max_results else max_results_default self._starting_token = starting_token self._unique_attributes = unique_attribute if not isinstance(unique_attribute, list): self._unique_attributes = [unique_attribute] self._param_values_to_check = param_values_to_check self._fail_on_invalid_token = fail_on_invalid_token self._token_encoder = TokenEncoder() self._token_decoder = TokenDecoder() self._param_checksum = self._calculate_parameter_checksum() self._parsed_token = self._parse_starting_token()
class Paginator(object): def __init__( self, max_results=None, max_results_default=None, starting_token=None, unique_attribute=None, param_values_to_check=None, fail_on_invalid_token=True, ): self._max_results = max_results if max_results else max_results_default self._starting_token = starting_token self._unique_attributes = unique_attribute if not isinstance(unique_attribute, list): self._unique_attributes = [unique_attribute] self._param_values_to_check = param_values_to_check self._fail_on_invalid_token = fail_on_invalid_token self._token_encoder = TokenEncoder() self._token_decoder = TokenDecoder() self._param_checksum = self._calculate_parameter_checksum() self._parsed_token = self._parse_starting_token() def _parse_starting_token(self): if self._starting_token is None: return None # The starting token is a dict passed as a base64 encoded string. next_token = self._starting_token try: next_token = self._token_decoder.decode(next_token) except (ValueError, TypeError, UnicodeDecodeError): self._raise_exception_if_required(next_token) return None if next_token.get("parameterChecksum") != self._param_checksum: raise InvalidToken("Input inconsistent with page token: {}".format( str(next_token))) return next_token def _raise_exception_if_required(self, token): if self._fail_on_invalid_token: if isinstance(self._fail_on_invalid_token, type): # we need to raise a custom exception func_info = inspect.getfullargspec(self._fail_on_invalid_token) arg_names, _, _, _, _, _, _ = func_info # arg_names == [self] or [self, token_argument_that_can_have_any_name] requires_token_arg = len(arg_names) > 1 if requires_token_arg: raise self._fail_on_invalid_token(token) else: raise self._fail_on_invalid_token() raise InvalidToken("Invalid token") def _calculate_parameter_checksum(self): def freeze(o): if not o: return None if isinstance(o, dict): return frozenset({k: freeze(v) for k, v in o.items()}.items()) if isinstance(o, (list, tuple, set)): return tuple([freeze(v) for v in o]) return o return hash(freeze(self._param_values_to_check)) def _check_predicate(self, item): if self._parsed_token is None: return False unique_attributes = self._parsed_token["uniqueAttributes"] predicate_values = unique_attributes.split("|") for (index, attr) in enumerate(self._unique_attributes): curr_val = item[attr] if type(item) == dict else getattr( item, attr, None) if not str(curr_val) == predicate_values[index]: return False return True def _build_next_token(self, next_item): token_dict = {} if self._param_checksum: token_dict["parameterChecksum"] = self._param_checksum range_keys = [] for attr in self._unique_attributes: if type(next_item) == dict: range_keys.append(str(next_item[attr])) else: range_keys.append(str(getattr(next_item, attr))) token_dict["uniqueAttributes"] = "|".join(range_keys) return self._token_encoder.encode(token_dict) def paginate(self, results): index_start = 0 if self._starting_token: try: index_start = next(index for (index, result) in enumerate(results) if self._check_predicate(result)) except StopIteration: if self._fail_on_invalid_token: raise InvalidToken("Resource not found!") else: return [], None index_end = index_start + self._max_results if index_end > len(results): index_end = len(results) results_page = results[index_start:index_end] next_token = None if results_page and index_end < len(results): last_resource_on_this_page = results[index_end] next_token = self._build_next_token(last_resource_on_this_page) return results_page, next_token
def test_token_encoding(token_dict): encoded = TokenEncoder().encode(token_dict) assert isinstance(encoded, six.string_types) decoded = TokenDecoder().decode(encoded) assert decoded == token_dict
def assert_token_encodes_and_decodes(token_dict): encoded = TokenEncoder().encode(token_dict) assert isinstance(encoded, six.string_types) decoded = TokenDecoder().decode(encoded) assert_equal(decoded, token_dict)
import os import boto3 import uuid from boto3.dynamodb.types import TypeDeserializer, TypeSerializer from botocore.paginate import TokenEncoder, TokenDecoder TABLE_NAME = "Todos" encoder = TokenEncoder() decoder = TokenDecoder() deserializer = TypeDeserializer() serializer = TypeSerializer() def get_todos(page_count: int): low_value = page_count max_value = low_value + 10 response = get_dynamo_client().scan( TableName=TABLE_NAME, Limit=20, ExpressionAttributeValues={ ':min': { "N": str(low_value) }, ':max': { "N": str(max_value) } }, FilterExpression="IdRange > :min AND IdRange <= :max")
class Paginator(object): def __init__( self, max_results=None, max_results_default=None, starting_token=None, page_ending_range_keys=None, param_values_to_check=None, fail_on_invalid_token=True, ): self._max_results = max_results if max_results else max_results_default self._starting_token = starting_token self._page_ending_range_keys = page_ending_range_keys self._param_values_to_check = param_values_to_check self._fail_on_invalid_token = fail_on_invalid_token self._token_encoder = TokenEncoder() self._token_decoder = TokenDecoder() self._param_checksum = self._calculate_parameter_checksum() self._parsed_token = self._parse_starting_token() def _parse_starting_token(self): if self._starting_token is None: return None # The starting token is a dict passed as a base64 encoded string. next_token = self._starting_token try: next_token = self._token_decoder.decode(next_token) except (ValueError, TypeError, UnicodeDecodeError): if self._fail_on_invalid_token: raise InvalidToken("Invalid token") return None if next_token.get("parameterChecksum") != self._param_checksum: raise InvalidToken("Input inconsistent with page token: {}".format( str(next_token))) return next_token def _calculate_parameter_checksum(self): if not self._param_values_to_check: return None return reduce( lambda x, y: x ^ y, [hash(item) for item in self._param_values_to_check.items()], ) def _check_predicate(self, item): if self._parsed_token is None: return False page_ending_range_key = self._parsed_token["pageEndingRangeKey"] predicate_values = page_ending_range_key.split("|") for (index, attr) in enumerate(self._page_ending_range_keys): curr_val = item[attr] if type(item) == dict else getattr( item, attr, None) if not curr_val == predicate_values[index]: return False return True def _build_next_token(self, next_item): token_dict = {} if self._param_checksum: token_dict["parameterChecksum"] = self._param_checksum range_keys = [] for (index, attr) in enumerate(self._page_ending_range_keys): if type(next_item) == dict: range_keys.append(next_item[attr]) else: range_keys.append(getattr(next_item, attr)) token_dict["pageEndingRangeKey"] = "|".join(range_keys) return self._token_encoder.encode(token_dict) def paginate(self, results): index_start = 0 if self._starting_token: try: index_start = next(index for (index, result) in enumerate(results) if self._check_predicate(result)) except StopIteration: if self._fail_on_invalid_token: raise InvalidToken("Resource not found!") else: return [], None index_end = index_start + self._max_results if index_end > len(results): index_end = len(results) results_page = results[index_start:index_end] next_token = None if results_page and index_end < len(results): page_ending_result = results[index_end] next_token = self._build_next_token(page_ending_result) return results_page, next_token