def _serialize(self, attr_map=False, null_check=True): """ Serializes a value for use with DynamoDB :param attr_map: If True, then attributes are returned :param null_check: If True, then attributes are checked for null """ attributes = pythonic(ATTRIBUTES) attrs = OrderedDict({attributes: OrderedDict()}) for name, attr in self._get_attributes().aliased_attrs(): value = getattr(self, name) if value is None: if attr.null: continue elif null_check: raise ValueError("Attribute '{0}' cannot be None".format( attr.attr_name)) serialized = attr.serialize(value) if serialized is None: continue if attr_map: attrs[attributes][attr.attr_name] = { ATTR_TYPE_MAP[attr.attr_type]: serialized } else: if attr.is_hash_key: attrs[HASH] = serialized elif attr.is_range_key: attrs[RANGE] = serialized else: attrs[attributes][attr.attr_name] = { ATTR_TYPE_MAP[attr.attr_type]: serialized } return attrs
def _build_filters(cls, key_operator_map, non_key_operator_map=None, key_attribute_classes=None, non_key_attribute_classes=None, filters=None): """ Builds an appropriate condition map :param operator_map: The mapping of operators used for key attributes :param non_key_operator_map: The mapping of operators used for non key attributes :param filters: A list of item filters """ key_conditions = OrderedDict() query_conditions = OrderedDict() non_key_operator_map = non_key_operator_map or {} key_attribute_classes = key_attribute_classes or {} for attr_name, operator, value in cls._tokenize_filters(filters): attribute_class = key_attribute_classes.get(attr_name, None) if attribute_class is None: attribute_class = non_key_attribute_classes.get(attr_name, None) if attribute_class is None: raise ValueError("Attribute {0} specified for filter does not exist.".format(attr_name)) attribute_name = attribute_class.attr_name if operator not in key_operator_map and operator not in non_key_operator_map: raise ValueError( "{0} is not a valid filter. Must be one of {1} {2}".format( operator, key_operator_map.keys(), non_key_operator_map.keys() ) ) if key_operator_map.get(operator, '') == NULL or non_key_operator_map.get(operator, '') == NULL: if value: operator = pythonic(NULL) else: operator = pythonic(NOT_NULL) condition = {} else: if not isinstance(value, list): value = [value] value = [ {ATTR_TYPE_MAP[attribute_class.attr_type]: attribute_class.serialize(val)} for val in value ] condition = { ATTR_VALUE_LIST: value } if operator in key_operator_map and (attribute_class.is_hash_key or attribute_class.is_range_key): condition.update({COMPARISON_OPERATOR: key_operator_map.get(operator)}) key_conditions[attribute_name] = condition elif operator in non_key_operator_map and not (attribute_class.is_hash_key or attribute_class.is_range_key): condition.update({COMPARISON_OPERATOR: non_key_operator_map.get(operator)}) query_conditions[attribute_name] = condition else: raise ValueError("Invalid filter specified: {0} {1} {2}".format(attribute_name, operator, value)) return key_conditions, query_conditions
class AttributeDict(collections.MutableMapping): """ A dictionary that stores attributes by two keys """ def __init__(self, *args, **kwargs): self._values = OrderedDict() self._alt_values = OrderedDict() self.update(dict(*args, **kwargs)) def __getitem__(self, key): if key in self._alt_values: return self._alt_values[key] return self._values[key] def __setitem__(self, key, value): if value.attr_name is not None: self._values[value.attr_name] = value self._alt_values[key] = value def __delitem__(self, key): del self._values[key] def __iter__(self): return iter(self._alt_values) def __len__(self): return len(self._values) def aliased_attrs(self): return self._alt_values.items()
def from_raw_data(cls, data): """ Returns an instance of this class from the raw data :param data: A serialized DynamoDB object """ mutable_data = copy.copy(data) if mutable_data is None: raise ValueError("Received no mutable_data to construct object") hash_keyname = cls._get_meta_data().hash_keyname range_keyname = cls._get_meta_data().range_keyname hash_key_type = cls._get_meta_data().get_attribute_type(hash_keyname) hash_key = mutable_data.pop(hash_keyname).get(hash_key_type) hash_key_attr = cls._get_attributes().get(hash_keyname) hash_key = hash_key_attr.deserialize(hash_key) args = (hash_key, ) kwargs = OrderedDict() if range_keyname: range_key_attr = cls._get_attributes().get(range_keyname) range_key_type = cls._get_meta_data().get_attribute_type( range_keyname) range_key = mutable_data.pop(range_keyname).get(range_key_type) kwargs['range_key'] = range_key_attr.deserialize(range_key) for name, value in mutable_data.items(): attr = cls._get_attributes().get(name, None) if attr: kwargs[name] = attr.deserialize( value.get(ATTR_TYPE_MAP[attr.attr_type])) return cls(*args, **kwargs)
def _build_expected_values(cls, expected_values, operator_map=None): """ Builds an appropriate expected value map :param expected_values: A list of expected values """ expected_values_result = OrderedDict() attributes = cls._get_attributes() filters = {} for attr_name, attr_value in expected_values.items(): attr_cond = VALUE if attr_name.endswith("__exists"): attr_cond = EXISTS attr_name = attr_name[:-8] attr_cls = attributes.get(attr_name, None) if attr_cls is None: filters[attr_name] = attr_value else: if attr_cond == VALUE: attr_value = attr_cls.serialize(attr_value) expected_values_result[attr_cls.attr_name] = { attr_cond: attr_value } for cond, value in filters.items(): attribute = None attribute_class = None for token in cond.split('__'): if attribute is None: attribute = token attribute_class = attributes.get(attribute) if attribute_class is None: raise ValueError("Attribute {0} specified for expected value does not exist".format(attribute)) elif token in operator_map: if operator_map.get(token) == NULL: if value: value = NULL else: value = NOT_NULL condition = { COMPARISON_OPERATOR: value, } else: if not isinstance(value, list): value = [value] condition = { COMPARISON_OPERATOR: operator_map.get(token), ATTR_VALUE_LIST: [ dict([ ( ATTR_TYPE_MAP[attribute_class.attr_type], attribute_class.serialize(val)) for val in value ]) ] } expected_values_result[attributes.get(attribute).attr_name] = condition else: raise ValueError("Could not parse expected condition: {0}".format(cond)) return expected_values_result
def _get_json(self): """ Returns a Python object suitable for serialization """ kwargs = OrderedDict() serialized = self._serialize(null_check=False) hash_key = serialized.get(HASH) range_key = serialized.get(RANGE, None) if range_key: kwargs[pythonic(RANGE_KEY)] = range_key kwargs[pythonic(ATTRIBUTES)] = serialized[pythonic(ATTRIBUTES)] return hash_key, kwargs
def _get_indexes(cls): """ Returns a list of the secondary indexes """ if cls._indexes is None: cls._indexes = { pythonic(GLOBAL_SECONDARY_INDEXES): [], pythonic(LOCAL_SECONDARY_INDEXES): [], pythonic(ATTR_DEFINITIONS): [] } cls._index_classes = OrderedDict() for item in dir(cls): item_cls = getattr(getattr(cls, item), "__class__", None) if item_cls is None: continue if issubclass(item_cls, (Index, )): item_cls = getattr(cls, item) cls._index_classes[item_cls.Meta.index_name] = item_cls schema = item_cls._get_schema() idx = { pythonic(INDEX_NAME): item_cls.Meta.index_name, pythonic(KEY_SCHEMA): schema.get(pythonic(KEY_SCHEMA)), pythonic(PROJECTION): { PROJECTION_TYPE: item_cls.Meta.projection.projection_type, }, } if issubclass(item_cls.__class__, GlobalSecondaryIndex): idx[pythonic(PROVISIONED_THROUGHPUT)] = { READ_CAPACITY_UNITS: item_cls.Meta.read_capacity_units, WRITE_CAPACITY_UNITS: item_cls.Meta.write_capacity_units } cls._indexes[pythonic(ATTR_DEFINITIONS)].extend( schema.get(pythonic(ATTR_DEFINITIONS))) if item_cls.Meta.projection.non_key_attributes: idx[pythonic( PROJECTION )][NON_KEY_ATTRIBUTES] = item_cls.Meta.projection.non_key_attributes if issubclass(item_cls.__class__, GlobalSecondaryIndex): cls._indexes[pythonic( GLOBAL_SECONDARY_INDEXES)].append(idx) else: cls._indexes[pythonic(LOCAL_SECONDARY_INDEXES)].append( idx) return cls._indexes
def __init__(self, hash_key=None, range_key=None, **attrs): """ :param hash_key: Required. The hash key for this object. :param range_key: Only required if the table has a range key attribute. :param attrs: A dictionary of attributes to set on this object. """ self.attribute_values = OrderedDict() self._set_defaults() if hash_key: attrs[self._get_meta_data().hash_keyname] = hash_key if range_key: range_keyname = self._get_meta_data().range_keyname if range_keyname is None: raise ValueError( "This table has no range key, but a range key value was provided: {0}" .format(range_key)) attrs[range_keyname] = range_key self._set_attributes(**attrs)
def _get_save_args(self, attributes=True, null_check=True): """ Gets the proper *args, **kwargs for saving and retrieving this object This is used for serializing items to be saved, or for serializing just the keys. :param attributes: If True, then attributes are included. :param null_check: If True, then attributes are checked for null. """ kwargs = OrderedDict() serialized = self._serialize(null_check=null_check) hash_key = serialized.get(HASH) range_key = serialized.get(RANGE, None) args = (hash_key, ) if range_key: kwargs[pythonic(RANGE_KEY)] = range_key if attributes: kwargs[pythonic(ATTRIBUTES)] = serialized[pythonic(ATTRIBUTES)] return args, kwargs
def __init__(self, *args, **kwargs): self._values = OrderedDict() self._alt_values = OrderedDict() self.update(dict(*args, **kwargs))