def get_operation_kwargs_from_instance( self, key: str = KEY, actions: Optional[Sequence[Action]] = None, condition: Optional[Condition] = None, return_values_on_condition_failure: Optional[str] = None, ) -> Dict[str, Any]: is_update = actions is not None is_delete = actions is None and key is KEY args, save_kwargs = self._get_save_args(null_check=not is_update) version_condition = self._handle_version_attribute( serialized_attributes={} if is_delete else save_kwargs, actions=actions ) if version_condition is not None: condition &= version_condition kwargs: Dict[str, Any] = dict( key=key, actions=actions, condition=condition, return_values_on_condition_failure=return_values_on_condition_failure ) if not is_update: kwargs.update(save_kwargs) elif snake_to_camel_case(RANGE_KEY) in save_kwargs: kwargs[snake_to_camel_case(RANGE_KEY)] = save_kwargs[snake_to_camel_case(RANGE_KEY)] return self._get_connection().get_operation_kwargs(*args, **kwargs)
def _handle_version_attribute(self, serialized_attributes, actions=None): """ Handles modifying the request to set or increment the version attribute. :param serialized_attributes: A dictionary mapping attribute names to serialized values. :param actions: A non-empty list when performing an update, otherwise None. """ if self._version_attribute_name is None: return version_attribute = self.get_attributes()[self._version_attribute_name] version_attribute_value = getattr(self, self._version_attribute_name) if version_attribute_value: version_condition = version_attribute == version_attribute_value if actions: actions.append(version_attribute.add(1)) elif snake_to_camel_case(ATTRIBUTES) in serialized_attributes: serialized_attributes[snake_to_camel_case(ATTRIBUTES)][version_attribute.attr_name] = self._serialize_value( version_attribute, version_attribute_value + 1 ) else: version_condition = version_attribute.does_not_exist() if actions: actions.append(version_attribute.set(1)) elif snake_to_camel_case(ATTRIBUTES) in serialized_attributes: serialized_attributes[snake_to_camel_case(ATTRIBUTES)][version_attribute.attr_name] = self._serialize_value( version_attribute, 1 ) return version_condition
def update(self, actions: Sequence[Action], condition: Optional[Condition] = None) -> Any: """ Updates an item using the UpdateItem operation. :param actions: a list of Action updates to apply :param condition: an optional Condition on which to update :raises ModelInstance.DoesNotExist: if the object to be updated does not exist :raises pynamodb.exceptions.UpdateError: if the `condition` is not met """ if not isinstance(actions, list) or len(actions) == 0: raise TypeError("the value of `actions` is expected to be a non-empty list") args, save_kwargs = self._get_save_args(null_check=False) version_condition = self._handle_version_attribute(save_kwargs, actions=actions) if version_condition is not None: condition &= version_condition kwargs: Dict[str, Any] = { snake_to_camel_case(RETURN_VALUES): ALL_NEW, } if snake_to_camel_case(RANGE_KEY) in save_kwargs: kwargs[snake_to_camel_case(RANGE_KEY)] = save_kwargs[snake_to_camel_case(RANGE_KEY)] kwargs.update(condition=condition) kwargs.update(actions=actions) data = self._get_connection().update_item(*args, **kwargs) self._deserialize(data[ATTRIBUTES]) return data
def _get_schema(cls) -> Dict: """ Returns the schema for this index """ attr_definitions = [] schema = [] for attr_name, attr_cls in cls._get_attributes().items(): attr_definitions.append({ snake_to_camel_case(ATTR_NAME): attr_cls.attr_name, snake_to_camel_case(ATTR_TYPE): ATTR_TYPE_MAP[attr_cls.attr_type] }) if attr_cls.is_hash_key: schema.append({ ATTR_NAME: attr_cls.attr_name, KEY_TYPE: HASH }) elif attr_cls.is_range_key: schema.append({ ATTR_NAME: attr_cls.attr_name, KEY_TYPE: RANGE }) return { snake_to_camel_case(KEY_SCHEMA): schema, snake_to_camel_case(ATTR_DEFINITIONS): attr_definitions }
def _get_json(self): """ Returns a Python object suitable for serialization """ kwargs = {} serialized = self._serialize(null_check=False) hash_key = serialized.get(HASH) range_key = serialized.get(RANGE, None) if range_key is not None: kwargs[snake_to_camel_case(RANGE_KEY)] = range_key kwargs[snake_to_camel_case(ATTRIBUTES)] = serialized[snake_to_camel_case(ATTRIBUTES)] return hash_key, kwargs
def _serialize(self, attr_map=False, null_check=True) -> Dict[str, Any]: """ Serializes all model attributes for use with DynamoDB :param attr_map: If True, then attributes are returned :param null_check: If True, then attributes are checked for null """ attributes = snake_to_camel_case(ATTRIBUTES) attrs: Dict[str, Dict] = {attributes: {}} for name, attr in self.get_attributes().items(): value = getattr(self, name) if isinstance(value, MapAttribute): if not value.validate(): raise ValueError("Attribute '{}' is not correctly typed".format(attr.attr_name)) serialized = self._serialize_value(attr, value, null_check) if NULL in serialized: continue if attr_map: attrs[attributes][attr.attr_name] = serialized else: if attr.is_hash_key: attrs[HASH] = serialized[ATTR_TYPE_MAP[attr.attr_type]] elif attr.is_range_key: attrs[RANGE] = serialized[ATTR_TYPE_MAP[attr.attr_type]] else: attrs[attributes][attr.attr_name] = serialized return attrs
def _get_save_args(self, attributes=True, null_check=True): """ Gets the proper *args, **kwargs for saving and retrieving this object This is used for serializing items to be saved, or for serializing just the keys. :param attributes: If True, then attributes are included. :param null_check: If True, then attributes are checked for null. """ kwargs = {} serialized = self._serialize(null_check=null_check) hash_key = serialized.get(HASH) range_key = serialized.get(RANGE, None) args = (hash_key, ) if range_key is not None: kwargs[snake_to_camel_case(RANGE_KEY)] = range_key if attributes: kwargs[snake_to_camel_case(ATTRIBUTES)] = serialized[snake_to_camel_case(ATTRIBUTES)] return args, kwargs
def commit(self) -> None: """ Writes all of the changes that are pending """ log.debug("%s committing batch operation", self.model) put_items = [] delete_items = [] attrs_name = snake_to_camel_case(ATTRIBUTES) for item in self.pending_operations: if item['action'] == PUT: put_items.append(item['item']._serialize(attr_map=True)[attrs_name]) elif item['action'] == DELETE: delete_items.append(item['item']._get_keys()) self.pending_operations = [] if not len(put_items) and not len(delete_items): return data = self.model._get_connection().batch_write_item( put_items=put_items, delete_items=delete_items ) if data is None: return retries = 0 unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name) while unprocessed_items: sleep_time = random.randint(0, self.model.Meta.base_backoff_ms * (2 ** retries)) / 1000 time.sleep(sleep_time) retries += 1 if retries >= self.model.Meta.max_retry_attempts: self.failed_operations = unprocessed_items raise PutError("Failed to batch write items: max_retry_attempts exceeded") put_items = [] delete_items = [] for item in unprocessed_items: if PUT_REQUEST in item: put_items.append(item.get(PUT_REQUEST).get(ITEM)) # type: ignore elif DELETE_REQUEST in item: delete_items.append(item.get(DELETE_REQUEST).get(KEY)) # type: ignore log.info("Resending %d unprocessed keys for batch operation after %d seconds sleep", len(unprocessed_items), sleep_time) data = self.model._get_connection().batch_write_item( put_items=put_items, delete_items=delete_items ) unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name)
def _serialize(self, null_check=True, attr_map=False) -> Dict[str, Dict[str, Any]]: """ Serializes all model attributes for use with DynamoDB :param null_check: If True, then attributes are checked for null :param attr_map: If True, then attributes are returned """ attributes = snake_to_camel_case(ATTRIBUTES) attrs: Dict[str, Dict] = {attributes: super()._serialize(null_check)} if not attr_map: hash_key_attribute = self._hash_key_attribute() hash_key_attribute_value = attrs[attributes].pop(hash_key_attribute.attr_name, None) if hash_key_attribute_value is not None: attrs[HASH] = hash_key_attribute_value[hash_key_attribute.attr_type] range_key_attribute = self._range_key_attribute() if range_key_attribute: range_key_attribute_value = attrs[attributes].pop(range_key_attribute.attr_name, None) if range_key_attribute_value is not None: attrs[RANGE] = range_key_attribute_value[range_key_attribute.attr_type] return attrs
def _from_data(cls, data): """ Reconstructs a model object from JSON. """ hash_key, attrs = data range_key = attrs.pop('range_key', None) attributes = attrs.pop(snake_to_camel_case(ATTRIBUTES)) hash_key_attribute = cls._hash_key_attribute() hash_keyname = hash_key_attribute.attr_name hash_keytype = hash_key_attribute.attr_type attributes[hash_keyname] = { hash_keytype: hash_key } if range_key is not None: range_key_attribute = cls._range_key_attribute() range_keyname = range_key_attribute.attr_name range_keytype = range_key_attribute.attr_type attributes[range_keyname] = { range_keytype: range_key } item = cls(_user_instantiated=False) item._deserialize(attributes) return item
def _get_indexes(cls): """ Returns a list of the secondary indexes """ if cls._indexes is None: cls._indexes = { snake_to_camel_case(GLOBAL_SECONDARY_INDEXES): [], snake_to_camel_case(LOCAL_SECONDARY_INDEXES): [], snake_to_camel_case(ATTR_DEFINITIONS): [] } cls._index_classes = {} for name, index in getmembers(cls, lambda o: isinstance(o, Index)): cls._index_classes[index.Meta.index_name] = index schema = index._get_schema() idx = { snake_to_camel_case(INDEX_NAME): index.Meta.index_name, snake_to_camel_case(KEY_SCHEMA): schema.get(snake_to_camel_case(KEY_SCHEMA)), snake_to_camel_case(PROJECTION): { PROJECTION_TYPE: index.Meta.projection.projection_type, }, } if isinstance(index, GlobalSecondaryIndex): if getattr(cls.Meta, 'billing_mode', None) != PAY_PER_REQUEST_BILLING_MODE: idx[snake_to_camel_case(PROVISIONED_THROUGHPUT)] = { READ_CAPACITY_UNITS: index.Meta.read_capacity_units, WRITE_CAPACITY_UNITS: index.Meta.write_capacity_units } cls._indexes[snake_to_camel_case(ATTR_DEFINITIONS)].extend(schema.get(snake_to_camel_case(ATTR_DEFINITIONS))) if index.Meta.projection.non_key_attributes: idx[snake_to_camel_case(PROJECTION)][NON_KEY_ATTRIBUTES] = index.Meta.projection.non_key_attributes if isinstance(index, GlobalSecondaryIndex): cls._indexes[snake_to_camel_case(GLOBAL_SECONDARY_INDEXES)].append(idx) else: cls._indexes[snake_to_camel_case(LOCAL_SECONDARY_INDEXES)].append(idx) return cls._indexes
def _get_schema(cls): """ Returns the schema for this table """ schema: Dict[str, List] = { snake_to_camel_case(ATTR_DEFINITIONS): [], snake_to_camel_case(KEY_SCHEMA): [] } for attr_name, attr_cls in cls.get_attributes().items(): if attr_cls.is_hash_key or attr_cls.is_range_key: schema[snake_to_camel_case(ATTR_DEFINITIONS)].append({ snake_to_camel_case(ATTR_NAME): attr_cls.attr_name, snake_to_camel_case(ATTR_TYPE): attr_cls.attr_type }) if attr_cls.is_hash_key: schema[snake_to_camel_case(KEY_SCHEMA)].append({ snake_to_camel_case(KEY_TYPE): HASH, snake_to_camel_case(ATTR_NAME): attr_cls.attr_name }) elif attr_cls.is_range_key: schema[snake_to_camel_case(KEY_SCHEMA)].append({ snake_to_camel_case(KEY_TYPE): RANGE, snake_to_camel_case(ATTR_NAME): attr_cls.attr_name }) return schema
def create_table( cls, wait: bool = False, read_capacity_units: Optional[int] = None, write_capacity_units: Optional[int] = None, billing_mode: Optional[str] = None, ignore_update_ttl_errors: bool = False, ) -> Any: """ Create the table for this model :param wait: If set, then this call will block until the table is ready for use :param read_capacity_units: Sets the read capacity units for this table :param write_capacity_units: Sets the write capacity units for this table :param billing_mode: Sets the billing mode provisioned (default) or on_demand for this table """ if not cls.exists(): schema = cls._get_schema() if hasattr(cls.Meta, snake_to_camel_case(READ_CAPACITY_UNITS)): schema[snake_to_camel_case(READ_CAPACITY_UNITS)] = cls.Meta.read_capacity_units if hasattr(cls.Meta, snake_to_camel_case(WRITE_CAPACITY_UNITS)): schema[snake_to_camel_case(WRITE_CAPACITY_UNITS)] = cls.Meta.write_capacity_units if hasattr(cls.Meta, snake_to_camel_case(STREAM_VIEW_TYPE)): schema[snake_to_camel_case(STREAM_SPECIFICATION)] = { snake_to_camel_case(STREAM_ENABLED): True, snake_to_camel_case(STREAM_VIEW_TYPE): cls.Meta.stream_view_type } if hasattr(cls.Meta, snake_to_camel_case(BILLING_MODE)): schema[snake_to_camel_case(BILLING_MODE)] = cls.Meta.billing_mode if read_capacity_units is not None: schema[snake_to_camel_case(READ_CAPACITY_UNITS)] = read_capacity_units if write_capacity_units is not None: schema[snake_to_camel_case(WRITE_CAPACITY_UNITS)] = write_capacity_units if billing_mode is not None: schema[snake_to_camel_case(BILLING_MODE)] = billing_mode index_data = cls._get_indexes() schema[snake_to_camel_case(GLOBAL_SECONDARY_INDEXES)] = index_data.get(snake_to_camel_case(GLOBAL_SECONDARY_INDEXES)) schema[snake_to_camel_case(LOCAL_SECONDARY_INDEXES)] = index_data.get(snake_to_camel_case(LOCAL_SECONDARY_INDEXES)) index_attrs = index_data.get(snake_to_camel_case(ATTR_DEFINITIONS)) attr_keys = [attr.get(snake_to_camel_case(ATTR_NAME)) for attr in schema.get(snake_to_camel_case(ATTR_DEFINITIONS))] for attr in index_attrs: attr_name = attr.get(snake_to_camel_case(ATTR_NAME)) if attr_name not in attr_keys: schema[snake_to_camel_case(ATTR_DEFINITIONS)].append(attr) attr_keys.append(attr_name) cls._get_connection().create_table( **schema ) if wait: while True: status = cls._get_connection().describe_table() if status: data = status.get(TABLE_STATUS) if data == ACTIVE: break else: time.sleep(2) else: raise TableError("No TableStatus returned for table") cls.update_ttl(ignore_update_ttl_errors)