Exemple #1
0
 def update_table(self,
                  table_name,
                  read_capacity_units=None,
                  write_capacity_units=None,
                  global_secondary_index_updates=None):
     """
     Performs the UpdateTable operation
     """
     operation_kwargs = {
         TABLE_NAME: table_name
     }
     if read_capacity_units and not write_capacity_units or write_capacity_units and not read_capacity_units:
         raise ValueError("read_capacity_units and write_capacity_units are required together")
     if read_capacity_units and write_capacity_units:
         operation_kwargs[PROVISIONED_THROUGHPUT] = {
             READ_CAPACITY_UNITS: read_capacity_units,
             WRITE_CAPACITY_UNITS: write_capacity_units
         }
     if global_secondary_index_updates:
         global_secondary_indexes_list = []
         for index in global_secondary_index_updates:
             global_secondary_indexes_list.append({
                 UPDATE: {
                     INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                     PROVISIONED_THROUGHPUT: {
                         READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)),
                         WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))
                     }
                 }
             })
         operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list
     try:
         return self.dispatch(UPDATE_TABLE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Failed to update table: {0}".format(e))
 def _get_schema(cls):
     """
     Returns the schema for this index
     """
     attr_definitions = []
     schema = []
     for attr_name, attr_cls in cls._get_attributes().items():
         attr_definitions.append({
             pythonic(ATTR_NAME): attr_cls.attr_name,
             pythonic(ATTR_TYPE): ATTR_TYPE_MAP[attr_cls.attr_type]
         })
         if attr_cls.is_hash_key:
             schema.append({
                 ATTR_NAME: attr_cls.attr_name,
                 KEY_TYPE: HASH
             })
         elif attr_cls.is_range_key:
             schema.append({
                 ATTR_NAME: attr_cls.attr_name,
                 KEY_TYPE: RANGE
             })
     return {
         pythonic(KEY_SCHEMA): schema,
         pythonic(ATTR_DEFINITIONS): attr_definitions
     }
Exemple #3
0
    def _build_filters(cls,
                       key_operator_map,
                       non_key_operator_map=None,
                       key_attribute_classes=None,
                       non_key_attribute_classes=None,
                       filters=None):
        """
        Builds an appropriate condition map

        :param operator_map: The mapping of operators used for key attributes
        :param non_key_operator_map: The mapping of operators used for non key attributes
        :param filters: A list of item filters
        """
        key_conditions = {}
        query_conditions = {}
        non_key_operator_map = non_key_operator_map or {}
        key_attribute_classes = key_attribute_classes or {}
        non_key_attribute_classes = non_key_attribute_classes or {}
        for attr_name, operator, value in cls._tokenize_filters(filters):
            attribute_class = key_attribute_classes.get(attr_name, None)
            if attribute_class is None:
                attribute_class = non_key_attribute_classes.get(attr_name, None)
            if attribute_class is None:
                raise ValueError("Attribute {0} specified for filter does not exist.".format(attr_name))
            attribute_name = attribute_class.attr_name
            if operator not in key_operator_map and operator not in non_key_operator_map:
                raise ValueError(
                    "{0} is not a valid filter. Must be one of {1} {2}".format(
                        operator,
                        key_operator_map.keys(), non_key_operator_map.keys()
                    )
                )
            if key_operator_map.get(operator, '') == NULL or non_key_operator_map.get(operator, '') == NULL:
                if value:
                    operator = pythonic(NULL)
                else:
                    operator = pythonic(NOT_NULL)
                condition = {}
            else:
                if not isinstance(value, list):
                    value = [value]
                value = [
                    {ATTR_TYPE_MAP[attribute_class.attr_type]: attribute_class.serialize(val)} for val in value
                ]
                condition = {
                    ATTR_VALUE_LIST: value
                }
            if operator in key_operator_map and (attribute_class.is_hash_key or attribute_class.is_range_key):
                condition.update({COMPARISON_OPERATOR: key_operator_map.get(operator)})
                key_conditions[attribute_name] = condition
            elif operator in non_key_operator_map and not (attribute_class.is_hash_key or attribute_class.is_range_key):
                condition.update({COMPARISON_OPERATOR: non_key_operator_map.get(operator)})
                query_conditions[attribute_name] = condition
            else:
                raise ValueError("Invalid filter specified: {0} {1} {2}".format(attribute_name, operator, value))
        return key_conditions, query_conditions
Exemple #4
0
 def fake_unprocessed_keys(*args, **kwargs):
     if pythonic(REQUEST_ITEMS) in kwargs:
         batch_items = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name)[1:]
         unprocessed = {
             UNPROCESSED_KEYS: {
                 UserModel.Meta.table_name: batch_items
             }
         }
         return HttpOK(unprocessed), unprocessed
     return HttpOK({}), {}
 def _get_json(self):
     """
     Returns a Python object suitable for serialization
     """
     kwargs = {}
     serialized = self._serialize(null_check=False)
     hash_key = serialized.get(HASH)
     range_key = serialized.get(RANGE, None)
     if range_key is not None:
         kwargs[pythonic(RANGE_KEY)] = range_key
     kwargs[pythonic(ATTRIBUTES)] = serialized[pythonic(ATTRIBUTES)]
     return hash_key, kwargs
Exemple #6
0
 def update_table(self,
                  table_name,
                  read_capacity_units=None,
                  write_capacity_units=None,
                  global_secondary_index_updates=None,
                  fields=None):
     """
     Performs the UpdateTable operation
     Has limits with update. 
         - Will not be able to create or delete an index. 
         - May fail if too many operations are tried at the same time. 
     TODO@rohan - Here the update operations do not account for the fact
         that dynamodb allows only one update per update operation. 
         https://botocore.readthedocs.org/en/latest/reference/services/dynamodb.html#DynamoDB.Client.update_table
         http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html
     """
     operation_kwargs = {
         TABLE_NAME: table_name
     }
     if read_capacity_units and not write_capacity_units or write_capacity_units and not read_capacity_units:
         raise ValueError("read_capacity_units and write_capacity_units are required together")
     if read_capacity_units and write_capacity_units:
         operation_kwargs[PROVISIONED_THROUGHPUT] = {
             READ_CAPACITY_UNITS: read_capacity_units,
             WRITE_CAPACITY_UNITS: write_capacity_units
         }
     if global_secondary_index_updates:
         global_secondary_indexes_list = []
         for index in global_secondary_index_updates:
             global_secondary_indexes_list.append({
                 UPDATE: {
                     INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                     PROVISIONED_THROUGHPUT: {
                         READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)),
                         WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))
                     }
                 }
             })
         operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list
     if fields:
         attr_list = []
         for field in fields:
             attr_list.append({
                 ATTR_NAME: field.get(pythonic(ATTR_NAME)),
                 ATTR_TYPE: field.get(pythonic(ATTR_TYPE))
             })
         operation_kwargs[ATTR_DEFINITIONS] = attr_list
     try:
         return self.dispatch(UPDATE_TABLE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Failed to update table: {0}".format(e))
Exemple #7
0
 def fake_batch_get(*batch_args, **kwargs):
     if pythonic(REQUEST_ITEMS) in kwargs:
         batch_item = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name).get(KEYS)[0]
         batch_items = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name).get(KEYS)[1:]
         response = {
             UNPROCESSED_KEYS: {
                 UserModel.Meta.table_name: {
                     KEYS: batch_items
                 }
             },
             RESPONSES: {
                 UserModel.Meta.table_name: [batch_item]
             }
         }
         return HttpOK(response), response
     return HttpOK({}), {}
    def _serialize(self, attr_map=False, null_check=True):
        """
        Serializes a value for use with DynamoDB

        :param attr_map: If True, then attributes are returned
        :param null_check: If True, then attributes are checked for null
        """
        attributes = pythonic(ATTRIBUTES)
        attrs = {attributes: {}}
        for name, attr in self._get_attributes().aliased_attrs():
            value = getattr(self, name)
            if value is None:
                if attr.null:
                    continue
                elif null_check:
                    raise ValueError("Attribute '{0}' cannot be None".format(attr.attr_name))
            serialized = attr.serialize(value)
            if serialized is None:
                continue
            if attr_map:
                attrs[attributes][attr.attr_name] = {
                    ATTR_TYPE_MAP[attr.attr_type]: serialized
                }
            else:
                if attr.is_hash_key:
                    attrs[HASH] = serialized
                elif attr.is_range_key:
                    attrs[RANGE] = serialized
                else:
                    attrs[attributes][attr.attr_name] = {
                        ATTR_TYPE_MAP[attr.attr_type]: serialized
                    }
        return attrs
    def _get_save_args(self, attributes=True, null_check=True):
        """
        Gets the proper *args, **kwargs for saving and retrieving this object

        This is used for serializing items to be saved, or for serializing just the keys.

        :param attributes: If True, then attributes are included.
        :param null_check: If True, then attributes are checked for null.
        """
        kwargs = {}
        serialized = self._serialize(null_check=null_check)
        hash_key = serialized.get(HASH)
        range_key = serialized.get(RANGE, None)
        args = (hash_key, )
        if range_key is not None:
            kwargs[pythonic(RANGE_KEY)] = range_key
        if attributes:
            kwargs[pythonic(ATTRIBUTES)] = serialized[pythonic(ATTRIBUTES)]
        return args, kwargs
Exemple #10
0
 def update_indexes(self, 
     table_name,
     global_secondary_indexes=None):
     """
     TODO@rohan
     """
     if global_secondary_indexes:
         operation_kwargs = {
             TABLE_NAME: table_name,
             GLOBAL_SECONDARY_INDEX_UPDATES : []
         }
         for index in global_secondary_indexes:
             operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES].append({ UPDATE : {
                 INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                 PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
             }})
         try:
             return self.dispatch(UPDATE_TABLE, operation_kwargs)
         except BOTOCORE_EXCEPTIONS as e:
             raise TableError("Failed to update table: {0}".format(e))
Exemple #11
0
 def fake_scan(*args, **kwargs):
     start_key = kwargs.get(pythonic(EXCLUSIVE_START_KEY), None)
     if start_key:
         item_idx = 0
         for scan_item in BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name):
             item_idx += 1
             if scan_item == start_key:
                 break
         scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[item_idx:item_idx+1]
     else:
         scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[:1]
     data = {
         ITEMS: scan_items,
         LAST_EVALUATED_KEY: scan_items[-1] if len(scan_items) else None
     }
     return HttpOK(data), data
 def _from_data(cls, data):
     """
     Reconstructs a model object from JSON.
     """
     hash_key, attrs = data
     range_key = attrs.pop('range_key', None)
     attributes = attrs.pop(pythonic(ATTRIBUTES))
     if range_key is not None:
         range_keyname = cls._get_meta_data().range_keyname
         range_keytype = cls._get_meta_data().get_attribute_type(range_keyname)
         attributes[range_keyname] = {
             range_keytype: range_key
         }
     item = cls(hash_key)
     item._deserialize(attributes)
     return item
 def commit(self):
     """
     Writes all of the changes that are pending
     """
     log.debug("%s committing batch operation", self.model)
     put_items = []
     delete_items = []
     attrs_name = pythonic(ATTRIBUTES)
     for item in self.pending_operations:
         if item['action'] == PUT:
             put_items.append(item['item']._serialize(attr_map=True)[attrs_name])
         elif item['action'] == DELETE:
             delete_items.append(item['item']._get_keys())
     self.pending_operations = []
     if not len(put_items) and not len(delete_items):
         return
     self.model.get_throttle().throttle()
     data = self.model._get_connection().batch_write_item(
         put_items=put_items,
         delete_items=delete_items
     )
     self.model.add_throttle_record(data.get(CONSUMED_CAPACITY, None))
     if data is None:
         return
     unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name)
     while unprocessed_items:
         put_items = []
         delete_items = []
         for item in unprocessed_items:
             if PUT_REQUEST in item:
                 put_items.append(item.get(PUT_REQUEST).get(ITEM))
             elif DELETE_REQUEST in item:
                 delete_items.append(item.get(DELETE_REQUEST).get(KEY))
         self.model.get_throttle().throttle()
         log.debug("Resending %s unprocessed keys for batch operation", len(unprocessed_items))
         data = self.model._get_connection().batch_write_item(
             put_items=put_items,
             delete_items=delete_items
         )
         self.model.add_throttle_record(data.get(CONSUMED_CAPACITY))
         unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name)
    def update_item(self, attribute, value=None, action=None, conditional_operator=None, **expected_values):
        """
        Updates an item using the UpdateItem operation.

        This should be used for updating a single attribute of an item.

        :param attribute: The name of the attribute to be updated
        :param value: The new value for the attribute.
        :param action: The action to take if this item already exists.
            See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html#DDB-UpdateItem-request-AttributeUpdate
        """
        args, save_kwargs = self._get_save_args(null_check=False)
        attribute_cls = None
        for attr_name, attr_cls in self._get_attributes().items():
            if attr_name == attribute:
                value = attr_cls.serialize(value)
                attribute_cls = attr_cls
                break
        if save_kwargs.get(pythonic(RANGE_KEY)):
            kwargs = {pythonic(RANGE_KEY): save_kwargs.get(pythonic(RANGE_KEY))}
        else:
            kwargs = {}
        if len(expected_values):
            kwargs.update(expected=self._build_expected_values(expected_values, UPDATE_FILTER_OPERATOR_MAP))
        kwargs[pythonic(ATTR_UPDATES)] = {
            attribute: {
                ACTION: action.upper() if action else None,
            }
        }
        if action is not None and action.upper() != DELETE:
            kwargs[pythonic(ATTR_UPDATES)][attribute][VALUE] = {ATTR_TYPE_MAP[attribute_cls.attr_type]: value}
        kwargs[pythonic(RETURN_VALUES)] = ALL_NEW
        kwargs.update(conditional_operator=conditional_operator)
        data = self._get_connection().update_item(
            *args,
            **kwargs
        )
        self._throttle.add_record(data.get(CONSUMED_CAPACITY))
        for name, value in data.get(ATTRIBUTES).items():
            attr = self._get_attributes().get(name, None)
            if attr:
                setattr(self, name, attr.deserialize(value.get(ATTR_TYPE_MAP[attr.attr_type])))
        return data
Exemple #15
0
    def add_field_index(self,
                  table_name,
                  fields=None,
                  global_secondary_indexes=None):
        """
        Will add fields and indexes that did not previously exist on the table.
        Only available for global secondary indexes.
        Fields or indexes or both can be added at the same time.
        """
        if not global_secondary_indexes:
            return
        operation_kwargs = {
            TABLE_NAME: table_name
        }

        if fields:
            attr_list = []
            for field in fields:
                attr_list.append({
                    ATTR_NAME: field.get(pythonic(ATTR_NAME)),
                    ATTR_TYPE: field.get(pythonic(ATTR_TYPE))
                })
            operation_kwargs[ATTR_DEFINITIONS] = attr_list

        global_secondary_indexes_list = []
        for index in global_secondary_indexes:
            global_secondary_indexes_list.append({ CREATE : {
                INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                PROJECTION: index.get(pythonic(PROJECTION)),
                PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
            }})
        operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list

        try:
            return self.dispatch(UPDATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to update table: {0}".format(e))
    def create_table(cls, wait=False, read_capacity_units=None, write_capacity_units=None):
        """
        Create the table for this model

        :param wait: If set, then this call will block until the table is ready for use
        :param read_capacity_units: Sets the read capacity units for this table
        :param write_capacity_units: Sets the write capacity units for this table
        """
        if not cls.exists():
            schema = cls._get_schema()
            if hasattr(cls.Meta, pythonic(READ_CAPACITY_UNITS)):
                schema[pythonic(READ_CAPACITY_UNITS)] = cls.Meta.read_capacity_units
            if hasattr(cls.Meta, pythonic(WRITE_CAPACITY_UNITS)):
                schema[pythonic(WRITE_CAPACITY_UNITS)] = cls.Meta.write_capacity_units
            if hasattr(cls.Meta, pythonic(STREAM_VIEW_TYPE)):
                schema[pythonic(STREAM_SPECIFICATION)] = {
                    pythonic(STREAM_ENABLED): True,
                    pythonic(STREAM_VIEW_TYPE): cls.Meta.stream_view_type
                }
            if read_capacity_units is not None:
                schema[pythonic(READ_CAPACITY_UNITS)] = read_capacity_units
            if write_capacity_units is not None:
                schema[pythonic(WRITE_CAPACITY_UNITS)] = write_capacity_units
            index_data = cls._get_indexes()
            schema[pythonic(GLOBAL_SECONDARY_INDEXES)] = index_data.get(pythonic(GLOBAL_SECONDARY_INDEXES))
            schema[pythonic(LOCAL_SECONDARY_INDEXES)] = index_data.get(pythonic(LOCAL_SECONDARY_INDEXES))
            index_attrs = index_data.get(pythonic(ATTR_DEFINITIONS))
            attr_keys = [attr.get(pythonic(ATTR_NAME)) for attr in schema.get(pythonic(ATTR_DEFINITIONS))]
            for attr in index_attrs:
                attr_name = attr.get(pythonic(ATTR_NAME))
                if attr_name not in attr_keys:
                    schema[pythonic(ATTR_DEFINITIONS)].append(attr)
                    attr_keys.append(attr_name)
            cls._get_connection().create_table(
                **schema
            )
        if wait:
            while True:
                status = cls._get_connection().describe_table()
                if status:
                    data = status.get(TABLE_STATUS)
                    if data == ACTIVE:
                        return
                    else:
                        time.sleep(2)
                else:
                    raise TableError("No TableStatus returned for table")
Exemple #17
0
 def _get_schema(cls):
     """
     Returns the schema for this table
     """
     schema = {pythonic(ATTR_DEFINITIONS): [], pythonic(KEY_SCHEMA): []}
     for attr_name, attr_cls in cls.get_attributes().items():
         if attr_cls.is_hash_key or attr_cls.is_range_key:
             schema[pythonic(ATTR_DEFINITIONS)].append({
                 pythonic(ATTR_NAME):
                 attr_cls.attr_name,
                 pythonic(ATTR_TYPE):
                 ATTR_TYPE_MAP[attr_cls.attr_type]
             })
         if attr_cls.is_hash_key:
             schema[pythonic(KEY_SCHEMA)].append({
                 pythonic(KEY_TYPE):
                 HASH,
                 pythonic(ATTR_NAME):
                 attr_cls.attr_name
             })
         elif attr_cls.is_range_key:
             schema[pythonic(KEY_SCHEMA)].append({
                 pythonic(KEY_TYPE):
                 RANGE,
                 pythonic(ATTR_NAME):
                 attr_cls.attr_name
             })
     return schema
Exemple #18
0
 def _get_indexes(cls):
     """
     Returns a list of the secondary indexes
     """
     if cls._indexes is None:
         cls._indexes = {
             pythonic(GLOBAL_SECONDARY_INDEXES): [],
             pythonic(LOCAL_SECONDARY_INDEXES): [],
             pythonic(ATTR_DEFINITIONS): []
         }
         cls._index_classes = {}
         for name, index in getmembers_issubclass(cls, Index):
             cls._index_classes[index.Meta.index_name] = index
             schema = index._get_schema()
             idx = {
                 pythonic(INDEX_NAME): index.Meta.index_name,
                 pythonic(KEY_SCHEMA): schema.get(pythonic(KEY_SCHEMA)),
                 pythonic(PROJECTION): {
                     PROJECTION_TYPE: index.Meta.projection.projection_type,
                 },
             }
             if issubclass(index.__class__, GlobalSecondaryIndex):
                 idx[pythonic(PROVISIONED_THROUGHPUT)] = {
                     READ_CAPACITY_UNITS: index.Meta.read_capacity_units,
                     WRITE_CAPACITY_UNITS: index.Meta.write_capacity_units
                 }
             cls._indexes[pythonic(ATTR_DEFINITIONS)].extend(
                 schema.get(pythonic(ATTR_DEFINITIONS)))
             if index.Meta.projection.non_key_attributes:
                 idx[pythonic(
                     PROJECTION
                 )][NON_KEY_ATTRIBUTES] = index.Meta.projection.non_key_attributes
             if issubclass(index.__class__, GlobalSecondaryIndex):
                 cls._indexes[pythonic(GLOBAL_SECONDARY_INDEXES)].append(
                     idx)
             else:
                 cls._indexes[pythonic(LOCAL_SECONDARY_INDEXES)].append(idx)
     return cls._indexes
Exemple #19
0
    def create_table(cls,
                     wait=False,
                     read_capacity_units=None,
                     write_capacity_units=None,
                     billing_mode=None):
        """
        Create the table for this model

        :param wait: If set, then this call will block until the table is ready for use
        :param read_capacity_units: Sets the read capacity units for this table
        :param write_capacity_units: Sets the write capacity units for this table
        :param billing_mode: Sets the billing mode provisioned (default) or on_demand for this table
        """
        if not cls.exists():
            schema = cls._get_schema()
            if hasattr(cls.Meta, pythonic(READ_CAPACITY_UNITS)):
                schema[pythonic(
                    READ_CAPACITY_UNITS)] = cls.Meta.read_capacity_units
            if hasattr(cls.Meta, pythonic(WRITE_CAPACITY_UNITS)):
                schema[pythonic(
                    WRITE_CAPACITY_UNITS)] = cls.Meta.write_capacity_units
            if hasattr(cls.Meta, pythonic(STREAM_VIEW_TYPE)):
                schema[pythonic(STREAM_SPECIFICATION)] = {
                    pythonic(STREAM_ENABLED): True,
                    pythonic(STREAM_VIEW_TYPE): cls.Meta.stream_view_type
                }
            if hasattr(cls.Meta, pythonic(BILLING_MODE)):
                schema[pythonic(BILLING_MODE)] = cls.Meta.billing_mode
            if read_capacity_units is not None:
                schema[pythonic(READ_CAPACITY_UNITS)] = read_capacity_units
            if write_capacity_units is not None:
                schema[pythonic(WRITE_CAPACITY_UNITS)] = write_capacity_units
            if billing_mode is not None:
                schema[pythonic(BILLING_MODE)] = billing_mode
            index_data = cls._get_indexes()
            schema[pythonic(GLOBAL_SECONDARY_INDEXES)] = index_data.get(
                pythonic(GLOBAL_SECONDARY_INDEXES))
            schema[pythonic(LOCAL_SECONDARY_INDEXES)] = index_data.get(
                pythonic(LOCAL_SECONDARY_INDEXES))
            index_attrs = index_data.get(pythonic(ATTR_DEFINITIONS))
            attr_keys = [
                attr.get(pythonic(ATTR_NAME))
                for attr in schema.get(pythonic(ATTR_DEFINITIONS))
            ]
            for attr in index_attrs:
                attr_name = attr.get(pythonic(ATTR_NAME))
                if attr_name not in attr_keys:
                    schema[pythonic(ATTR_DEFINITIONS)].append(attr)
                    attr_keys.append(attr_name)
            cls._get_connection().create_table(**schema)
        if wait:
            while True:
                status = cls._get_connection().describe_table()
                if status:
                    data = status.get(TABLE_STATUS)
                    if data == ACTIVE:
                        return
                    else:
                        time.sleep(2)
                else:
                    raise TableError("No TableStatus returned for table")
    def _get_indexes(cls):
        """
        Returns a list of the secondary indexes
        """
        if cls._indexes is None:
            cls._indexes = {
                pythonic(GLOBAL_SECONDARY_INDEXES): [],
                pythonic(LOCAL_SECONDARY_INDEXES): [],
                pythonic(ATTR_DEFINITIONS): []
            }
            cls._index_classes = {}
            for item in dir(cls):
                item_cls = getattr(getattr(cls, item), "__class__", None)
                if item_cls is None:
                    continue
                if issubclass(item_cls, (Index, )):
                    item_cls = getattr(cls, item)
                    cls._index_classes[item_cls.Meta.index_name] = item_cls
                    schema = item_cls._get_schema()
                    idx = {
                        pythonic(INDEX_NAME): item_cls.Meta.index_name,
                        pythonic(KEY_SCHEMA): schema.get(pythonic(KEY_SCHEMA)),
                        pythonic(PROJECTION): {
                            PROJECTION_TYPE: item_cls.Meta.projection.projection_type,
                        },

                    }
                    if issubclass(item_cls.__class__, GlobalSecondaryIndex):
                        idx[pythonic(PROVISIONED_THROUGHPUT)] = {
                            READ_CAPACITY_UNITS: item_cls.Meta.read_capacity_units,
                            WRITE_CAPACITY_UNITS: item_cls.Meta.write_capacity_units
                        }
                    cls._indexes[pythonic(ATTR_DEFINITIONS)].extend(schema.get(pythonic(ATTR_DEFINITIONS)))
                    if item_cls.Meta.projection.non_key_attributes:
                        idx[pythonic(PROJECTION)][NON_KEY_ATTRIBUTES] = item_cls.Meta.projection.non_key_attributes
                    if issubclass(item_cls.__class__, GlobalSecondaryIndex):
                        cls._indexes[pythonic(GLOBAL_SECONDARY_INDEXES)].append(idx)
                    else:
                        cls._indexes[pythonic(LOCAL_SECONDARY_INDEXES)].append(idx)
        return cls._indexes
 def _get_schema(cls):
     """
     Returns the schema for this table
     """
     schema = {
         pythonic(ATTR_DEFINITIONS): [],
         pythonic(KEY_SCHEMA): []
     }
     for attr_name, attr_cls in cls._get_attributes().items():
         if attr_cls.is_hash_key or attr_cls.is_range_key:
             schema[pythonic(ATTR_DEFINITIONS)].append({
                 pythonic(ATTR_NAME): attr_cls.attr_name,
                 pythonic(ATTR_TYPE): ATTR_TYPE_MAP[attr_cls.attr_type]
             })
         if attr_cls.is_hash_key:
             schema[pythonic(KEY_SCHEMA)].append({
                 pythonic(KEY_TYPE): HASH,
                 pythonic(ATTR_NAME): attr_cls.attr_name
             })
         elif attr_cls.is_range_key:
             schema[pythonic(KEY_SCHEMA)].append({
                 pythonic(KEY_TYPE): RANGE,
                 pythonic(ATTR_NAME): attr_cls.attr_name
             })
     return schema
    def _build_filters(cls,
                       key_operator_map,
                       non_key_operator_map=None,
                       key_attribute_classes=None,
                       non_key_attribute_classes=None,
                       filters=None):
        """
        Builds an appropriate condition map

        :param operator_map: The mapping of operators used for key attributes
        :param non_key_operator_map: The mapping of operators used for non key attributes
        :param filters: A list of item filters
        """
        key_conditions = {}
        query_conditions = {}
        non_key_operator_map = non_key_operator_map or {}
        key_attribute_classes = key_attribute_classes or {}
        non_key_attribute_classes = non_key_attribute_classes or {}
        for attr_name, operator, value in cls._tokenize_filters(filters):
            attribute_class = key_attribute_classes.get(attr_name, None)
            if attribute_class is None:
                attribute_class = non_key_attribute_classes.get(attr_name, None)
            if attribute_class is None:
                raise ValueError("Attribute {0} specified for filter does not exist.".format(attr_name))
            attribute_name = attribute_class.attr_name
            if operator not in key_operator_map and operator not in non_key_operator_map:
                raise ValueError(
                    "{0} is not a valid filter. Must be one of {1} {2}".format(
                        operator,
                        key_operator_map.keys(), non_key_operator_map.keys()
                    )
                )
            if attribute_name in key_conditions or attribute_name in query_conditions:
                # Before this validation logic, PynamoDB would stomp on multiple values and use only the last provided.
                # This leads to unexpected behavior. In some cases, the DynamoDB API does not allow multiple values
                # even when using the newer API (e.g. KeyConditions and KeyConditionExpression only allow a single
                # value for each member of the primary key). In other cases, moving PynamoDB to the newer API
                # (e.g. FilterExpression over ScanFilter) would allow support for multiple conditions.
                raise ValueError(
                    "Multiple values not supported for attributes in KeyConditions, QueryFilter, or ScanFilter, "
                    "multiple values provided for attribute {0}".format(attribute_name)
                )

            if key_operator_map.get(operator, '') == NULL or non_key_operator_map.get(operator, '') == NULL:
                if value:
                    operator = pythonic(NULL)
                else:
                    operator = pythonic(NOT_NULL)
                condition = {}
            else:
                if not isinstance(value, list):
                    value = [value]
                value = [
                    {ATTR_TYPE_MAP[attribute_class.attr_type]: attribute_class.serialize(val)} for val in value
                ]
                condition = {
                    ATTR_VALUE_LIST: value
                }
            if operator in key_operator_map and (attribute_class.is_hash_key or attribute_class.is_range_key):
                condition.update({COMPARISON_OPERATOR: key_operator_map.get(operator)})
                key_conditions[attribute_name] = condition
            elif operator in non_key_operator_map and not (attribute_class.is_hash_key or attribute_class.is_range_key):
                condition.update({COMPARISON_OPERATOR: non_key_operator_map.get(operator)})
                query_conditions[attribute_name] = condition
            else:
                raise ValueError("Invalid filter specified: {0} {1} {2}".format(attribute_name, operator, value))
        return key_conditions, query_conditions
Exemple #23
0
    async def update(self,
                     attributes=None,
                     actions=None,
                     condition=None,
                     conditional_operator=None,
                     **expected_values):
        """
        Updates an item using the UpdateItem operation.

        :param attributes: A dictionary of attributes to update in the following format
                            {
                                attr_name: {'value': 10, 'action': 'ADD'},
                                next_attr: {'value': True, 'action': 'PUT'},
                            }
        """
        if attributes is not None and not isinstance(attributes, dict):
            raise TypeError(
                "the value of `attributes` is expected to be a dictionary")
        if actions is not None and not isinstance(actions, list):
            raise TypeError("the value of `actions` is expected to be a list")

        self._conditional_operator_check(conditional_operator)
        args, save_kwargs = self._get_save_args(null_check=False)
        kwargs = {
            pythonic(RETURN_VALUES): ALL_NEW,
            'conditional_operator': conditional_operator,
        }

        if attributes:
            kwargs[pythonic(ATTR_UPDATES)] = {}

        if pythonic(RANGE_KEY) in save_kwargs:
            kwargs[pythonic(RANGE_KEY)] = save_kwargs[pythonic(RANGE_KEY)]

        if expected_values:
            kwargs['expected'] = self._build_expected_values(
                expected_values, UPDATE_FILTER_OPERATOR_MAP)

        attrs = self.get_attributes()
        attributes = attributes or {}
        for attr, params in attributes.items():
            attribute_cls = attrs[attr]
            action = params['action'] and params['action'].upper()
            attr_values = {ACTION: action}
            if 'value' in params:
                attr_values[VALUE] = self._serialize_value(
                    attribute_cls, params['value'])

            kwargs[pythonic(ATTR_UPDATES)][
                attribute_cls.attr_name] = attr_values

        kwargs.update(condition=condition)
        kwargs.update(actions=actions)
        data = await self._get_connection().update_item(*args, **kwargs)
        for name, value in data[ATTRIBUTES].items():
            attr_name = self._dynamo_to_python_attr(name)
            attr = self.get_attributes().get(attr_name)
            if attr:
                setattr(self, attr_name,
                        attr.deserialize(attr.get_value(value)))

        return data
Exemple #24
0
    def create_table(self,
                     table_name,
                     attribute_definitions=None,
                     key_schema=None,
                     read_capacity_units=None,
                     write_capacity_units=None,
                     global_secondary_indexes=None,
                     local_secondary_indexes=None,
                     stream_specification=None):
        """
        Performs the CreateTable operation
        """
        operation_kwargs = {
            TABLE_NAME: table_name,
            PROVISIONED_THROUGHPUT: {
                READ_CAPACITY_UNITS: read_capacity_units,
                WRITE_CAPACITY_UNITS: write_capacity_units
            }
        }
        attrs_list = []
        if attribute_definitions is None:
            raise ValueError("attribute_definitions argument is required")
        for attr in attribute_definitions:
            attrs_list.append({
                ATTR_NAME: attr.get(pythonic(ATTR_NAME)),
                ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))
            })
        operation_kwargs[ATTR_DEFINITIONS] = attrs_list

        if global_secondary_indexes:
            global_secondary_indexes_list = []
            for index in global_secondary_indexes:
                global_secondary_indexes_list.append({
                    INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION: index.get(pythonic(PROJECTION)),
                    PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
                })
            operation_kwargs[GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list

        if key_schema is None:
            raise ValueError("key_schema is required")
        key_schema_list = []
        for item in key_schema:
            key_schema_list.append({
                ATTR_NAME: item.get(pythonic(ATTR_NAME)),
                KEY_TYPE: str(item.get(pythonic(KEY_TYPE))).upper()
            })
        operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list, key=lambda x: x.get(KEY_TYPE))

        local_secondary_indexes_list = []
        if local_secondary_indexes:
            for index in local_secondary_indexes:
                local_secondary_indexes_list.append({
                    INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION: index.get(pythonic(PROJECTION)),
                })
            operation_kwargs[LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list

        if stream_specification:
            operation_kwargs[STREAM_SPECIFICATION] = {
                STREAM_ENABLED: stream_specification[pythonic(STREAM_ENABLED)],
                STREAM_VIEW_TYPE: stream_specification[pythonic(STREAM_VIEW_TYPE)]
            }

        try:
            data = self.dispatch(CREATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to create table: {0}".format(e))
        return data
Exemple #25
0
    def _build_filters(cls,
                       key_operator_map,
                       non_key_operator_map=None,
                       key_attribute_classes=None,
                       non_key_attribute_classes=None,
                       filters=None):
        """
        Builds an appropriate condition map

        :param operator_map: The mapping of operators used for key attributes
        :param non_key_operator_map: The mapping of operators used for non key attributes
        :param filters: A list of item filters
        """
        key_conditions = {}
        query_conditions = {}
        non_key_operator_map = non_key_operator_map or {}
        key_attribute_classes = key_attribute_classes or {}
        non_key_attribute_classes = non_key_attribute_classes or {}
        for attr_name, operator, value in cls._tokenize_filters(filters):
            attribute_class = key_attribute_classes.get(attr_name, None)
            if attribute_class is None:
                attribute_class = non_key_attribute_classes.get(
                    attr_name, None)
            if attribute_class is None:
                raise ValueError(
                    "Attribute {0} specified for filter does not exist.".
                    format(attr_name))
            attribute_name = attribute_class.attr_name
            if operator not in key_operator_map and operator not in non_key_operator_map:
                raise ValueError(
                    "{0} is not a valid filter. Must be one of {1} {2}".format(
                        operator, key_operator_map.keys(),
                        non_key_operator_map.keys()))
            if key_operator_map.get(operator,
                                    '') == NULL or non_key_operator_map.get(
                                        operator, '') == NULL:
                if value:
                    operator = pythonic(NULL)
                else:
                    operator = pythonic(NOT_NULL)
                condition = {}
            else:
                if not isinstance(value, list):
                    value = [value]
                value = [{
                    ATTR_TYPE_MAP[attribute_class.attr_type]:
                    attribute_class.serialize(val)
                } for val in value]
                condition = {ATTR_VALUE_LIST: value}
            if operator in key_operator_map and (attribute_class.is_hash_key or
                                                 attribute_class.is_range_key):
                condition.update(
                    {COMPARISON_OPERATOR: key_operator_map.get(operator)})
                key_conditions[attribute_name] = condition
            elif operator in non_key_operator_map and not (
                    attribute_class.is_hash_key
                    or attribute_class.is_range_key):
                condition.update(
                    {COMPARISON_OPERATOR: non_key_operator_map.get(operator)})
                query_conditions[attribute_name] = condition
            else:
                raise ValueError(
                    "Invalid filter specified: {0} {1} {2}".format(
                        attribute_name, operator, value))
        return key_conditions, query_conditions
Exemple #26
0
    async def update_item(self,
                          attribute,
                          value=None,
                          action=None,
                          condition=None,
                          conditional_operator=None,
                          **expected_values):
        """
        Updates an item using the UpdateItem operation.

        This should be used for updating a single attribute of an item.

        :param attribute: The name of the attribute to be updated
        :param value: The new value for the attribute.
        :param action: The action to take if this item already exists.
            See: http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html#DDB-UpdateItem-request-AttributeUpdate
        """
        warnings.warn(
            "`Model.update_item` is deprecated in favour of `Model.update` now"
        )

        self._conditional_operator_check(conditional_operator)
        args, save_kwargs = self._get_save_args(null_check=False)
        attribute_cls = None
        for attr_name, attr_cls in self.get_attributes().items():
            if attr_name == attribute:
                attribute_cls = attr_cls
                break
        if not attribute_cls:
            raise ValueError(f"Attribute {attr_name} specified does not exist")
        if save_kwargs.get(pythonic(RANGE_KEY)):
            kwargs = {
                pythonic(RANGE_KEY): save_kwargs.get(pythonic(RANGE_KEY))
            }
        else:
            kwargs = {}
        if len(expected_values):
            kwargs.update(expected=self._build_expected_values(
                expected_values, UPDATE_FILTER_OPERATOR_MAP))
        kwargs[pythonic(ATTR_UPDATES)] = {
            attribute_cls.attr_name: {
                ACTION: action.upper() if action else None,
            }
        }
        if value is not None:
            kwargs[pythonic(ATTR_UPDATES)][attribute_cls.attr_name][VALUE] = {
                ATTR_TYPE_MAP[attribute_cls.attr_type]:
                attribute_cls.serialize(value)
            }
        kwargs[pythonic(RETURN_VALUES)] = ALL_NEW
        kwargs.update(conditional_operator=conditional_operator)
        kwargs.update(condition=condition)
        data = await self._get_connection().update_item(*args, **kwargs)

        for name, value in data.get(ATTRIBUTES).items():
            attr_name = self._dynamo_to_python_attr(name)
            attr = self.get_attributes().get(attr_name)
            if attr:
                setattr(self, attr_name,
                        attr.deserialize(attr.get_value(value)))
        return data
Exemple #27
0
    def create_table(self,
                     table_name,
                     attribute_definitions=None,
                     key_schema=None,
                     read_capacity_units=None,
                     write_capacity_units=None,
                     global_secondary_indexes=None,
                     local_secondary_indexes=None,
                     stream_specification=None):
        """
        Performs the CreateTable operation
        """
        operation_kwargs = {
            TABLE_NAME: table_name,
            PROVISIONED_THROUGHPUT: {
                READ_CAPACITY_UNITS: read_capacity_units,
                WRITE_CAPACITY_UNITS: write_capacity_units
            }
        }
        attrs_list = []
        if attribute_definitions is None:
            raise ValueError("attribute_definitions argument is required")
        for attr in attribute_definitions:
            attrs_list.append({
                ATTR_NAME: attr.get(pythonic(ATTR_NAME)),
                ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))
            })
        operation_kwargs[ATTR_DEFINITIONS] = attrs_list

        if global_secondary_indexes:
            global_secondary_indexes_list = []
            for index in global_secondary_indexes:
                global_secondary_indexes_list.append({
                    INDEX_NAME:
                    index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA:
                    sorted(index.get(pythonic(KEY_SCHEMA)),
                           key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION:
                    index.get(pythonic(PROJECTION)),
                    PROVISIONED_THROUGHPUT:
                    index.get(pythonic(PROVISIONED_THROUGHPUT))
                })
            operation_kwargs[
                GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list

        if key_schema is None:
            raise ValueError("key_schema is required")
        key_schema_list = []
        for item in key_schema:
            key_schema_list.append({
                ATTR_NAME:
                item.get(pythonic(ATTR_NAME)),
                KEY_TYPE:
                str(item.get(pythonic(KEY_TYPE))).upper()
            })
        operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list,
                                              key=lambda x: x.get(KEY_TYPE))

        local_secondary_indexes_list = []
        if local_secondary_indexes:
            for index in local_secondary_indexes:
                local_secondary_indexes_list.append({
                    INDEX_NAME:
                    index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA:
                    sorted(index.get(pythonic(KEY_SCHEMA)),
                           key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION:
                    index.get(pythonic(PROJECTION)),
                })
            operation_kwargs[
                LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list

        if stream_specification:
            operation_kwargs[STREAM_SPECIFICATION] = {
                STREAM_ENABLED: stream_specification[pythonic(STREAM_ENABLED)],
                STREAM_VIEW_TYPE:
                stream_specification[pythonic(STREAM_VIEW_TYPE)]
            }

        try:
            data = self.dispatch(CREATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to create table: {0}".format(e))
        return data