Пример #1
0
    def create_table(
        cls,
        wait=False,
        read_capacity_units=None,
        write_capacity_units=None,
        billing_mode=None,
        ignore_update_ttl_errors=False):
        """
        Create the table for this model

        :param wait: If set, then this call will block until the table is ready for use
        :param read_capacity_units: Sets the read capacity units for this table
        :param write_capacity_units: Sets the write capacity units for this table
        :param billing_mode: Sets the billing mode provisioned (default) or on_demand for this table
        """
        if not cls.exists():
            schema = cls._get_schema()
            if hasattr(cls.Meta, pythonic(READ_CAPACITY_UNITS)):
                schema[pythonic(READ_CAPACITY_UNITS)] = cls.Meta.read_capacity_units
            if hasattr(cls.Meta, pythonic(WRITE_CAPACITY_UNITS)):
                schema[pythonic(WRITE_CAPACITY_UNITS)] = cls.Meta.write_capacity_units
            if hasattr(cls.Meta, pythonic(STREAM_VIEW_TYPE)):
                schema[pythonic(STREAM_SPECIFICATION)] = {
                    pythonic(STREAM_ENABLED): True,
                    pythonic(STREAM_VIEW_TYPE): cls.Meta.stream_view_type
                }
            if hasattr(cls.Meta, pythonic(BILLING_MODE)):
                schema[pythonic(BILLING_MODE)] = cls.Meta.billing_mode
            if read_capacity_units is not None:
                schema[pythonic(READ_CAPACITY_UNITS)] = read_capacity_units
            if write_capacity_units is not None:
                schema[pythonic(WRITE_CAPACITY_UNITS)] = write_capacity_units
            if billing_mode is not None:
                schema[pythonic(BILLING_MODE)] = billing_mode
            index_data = cls._get_indexes()
            schema[pythonic(GLOBAL_SECONDARY_INDEXES)] = index_data.get(pythonic(GLOBAL_SECONDARY_INDEXES))
            schema[pythonic(LOCAL_SECONDARY_INDEXES)] = index_data.get(pythonic(LOCAL_SECONDARY_INDEXES))
            index_attrs = index_data.get(pythonic(ATTR_DEFINITIONS))
            attr_keys = [attr.get(pythonic(ATTR_NAME)) for attr in schema.get(pythonic(ATTR_DEFINITIONS))]
            for attr in index_attrs:
                attr_name = attr.get(pythonic(ATTR_NAME))
                if attr_name not in attr_keys:
                    schema[pythonic(ATTR_DEFINITIONS)].append(attr)
                    attr_keys.append(attr_name)
            cls._get_connection().create_table(
                **schema
            )
        if wait:
            while True:
                status = cls._get_connection().describe_table()
                if status:
                    data = status.get(TABLE_STATUS)
                    if data == ACTIVE:
                        break
                    else:
                        time.sleep(2)
                else:
                    raise TableError("No TableStatus returned for table")

        cls.update_ttl(ignore_update_ttl_errors)
Пример #2
0
 def update_table(self,
                  table_name,
                  read_capacity_units=None,
                  write_capacity_units=None,
                  global_secondary_index_updates=None):
     """
     Performs the UpdateTable operation
     """
     operation_kwargs = {
         TABLE_NAME: table_name
     }
     if read_capacity_units and not write_capacity_units or write_capacity_units and not read_capacity_units:
         raise ValueError("read_capacity_units and write_capacity_units are required together")
     if read_capacity_units and write_capacity_units:
         operation_kwargs[PROVISIONED_THROUGHPUT] = {
             READ_CAPACITY_UNITS: read_capacity_units,
             WRITE_CAPACITY_UNITS: write_capacity_units
         }
     if global_secondary_index_updates:
         global_secondary_indexes_list = []
         for index in global_secondary_index_updates:
             global_secondary_indexes_list.append({
                 UPDATE: {
                     INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                     PROVISIONED_THROUGHPUT: {
                         READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)),
                         WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))
                     }
                 }
             })
         operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list
     try:
         return self.dispatch(UPDATE_TABLE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Failed to update table: {}".format(e), e)
Пример #3
0
 def update_table(self,
                  table_name,
                  read_capacity_units=None,
                  write_capacity_units=None,
                  global_secondary_index_updates=None):
     """
     Performs the UpdateTable operation
     """
     operation_kwargs = {
         pythonic(TABLE_NAME): table_name
     }
     if read_capacity_units and not write_capacity_units or write_capacity_units and not read_capacity_units:
         raise ValueError("read_capacity_units and write_capacity_units are required together")
     if read_capacity_units and write_capacity_units:
         operation_kwargs[pythonic(PROVISIONED_THROUGHPUT)] = {
             READ_CAPACITY_UNITS: read_capacity_units,
             WRITE_CAPACITY_UNITS: write_capacity_units
         }
     if global_secondary_index_updates:
         global_secondary_indexes_list = []
         for index in global_secondary_index_updates:
             global_secondary_indexes_list.append({
                 UPDATE: {
                     INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                     PROVISIONED_THROUGHPUT: {
                         READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)),
                         WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))
                     }
                 }
             })
         operation_kwargs[pythonic(GLOBAL_SECONDARY_INDEX_UPDATES)] = global_secondary_indexes_list
     response, data = self.dispatch(UPDATE_TABLE, operation_kwargs)
     if not response.ok:
         raise TableError("Failed to update table: {0}".format(response.content))
Пример #4
0
 def get_identifier_map(self, table_name, hash_key, range_key=None, key=KEY):
     """
     Builds the identifier map that is common to several operations
     """
     tbl = self.get_meta_table(table_name)
     if tbl is None:
         raise TableError("No such table {0}".format(table_name))
     return tbl.get_identifier_map(hash_key, range_key=range_key, key=key)
Пример #5
0
 def get_exclusive_start_key_map(self, table_name, exclusive_start_key):
     """
     Builds the exclusive start key attribute map
     """
     tbl = self.get_meta_table(table_name)
     if tbl is None:
         raise TableError("No such table {0}".format(table_name))
     return tbl.get_exclusive_start_key_map(exclusive_start_key)
Пример #6
0
 def get_attribute_type(self, table_name, attribute_name, value=None):
     """
     Returns the proper attribute type for a given attribute name
     :param value: The attribute value an be supplied just in case the type is already included
     """
     tbl = self.get_meta_table(table_name)
     if tbl is None:
         raise TableError("No such table {0}".format(table_name))
     return tbl.get_attribute_type(attribute_name, value=value)
Пример #7
0
 def delete_table(self, table_name):
     """
     Performs the DeleteTable operation
     """
     operation_kwargs = {TABLE_NAME: table_name}
     try:
         data = self.dispatch(DELETE_TABLE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Failed to delete table: {0}".format(e))
     return data
Пример #8
0
 def get_item_attribute_map(self, table_name, attributes, item_key=ITEM, pythonic_key=True):
     """
     Builds up a dynamodb compatible AttributeValue map
     """
     tbl = self.get_meta_table(table_name)
     if tbl is None:
         raise TableError("No such table {0}".format(table_name))
     return tbl.get_item_attribute_map(
         attributes,
         item_key=item_key,
         pythonic_key=pythonic_key)
Пример #9
0
 def delete_table(self, table_name):
     """
     Performs the DeleteTable operation
     """
     operation_kwargs = {
         pythonic(TABLE_NAME): table_name
     }
     response, data = self.dispatch(DELETE_TABLE, operation_kwargs)
     if response.status_code != HTTP_OK:
         raise TableError("Failed to delete table: {0}".format(response.content))
     return data
Пример #10
0
 def update_table(self,
                  table_name,
                  read_capacity_units=None,
                  write_capacity_units=None,
                  global_secondary_index_updates=None,
                  fields=None):
     """
     Performs the UpdateTable operation
     Has limits with update. 
         - Will not be able to create or delete an index. 
         - May fail if too many operations are tried at the same time. 
     TODO@rohan - Here the update operations do not account for the fact
         that dynamodb allows only one update per update operation. 
         https://botocore.readthedocs.org/en/latest/reference/services/dynamodb.html#DynamoDB.Client.update_table
         http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html
     """
     operation_kwargs = {
         TABLE_NAME: table_name
     }
     if read_capacity_units and not write_capacity_units or write_capacity_units and not read_capacity_units:
         raise ValueError("read_capacity_units and write_capacity_units are required together")
     if read_capacity_units and write_capacity_units:
         operation_kwargs[PROVISIONED_THROUGHPUT] = {
             READ_CAPACITY_UNITS: read_capacity_units,
             WRITE_CAPACITY_UNITS: write_capacity_units
         }
     if global_secondary_index_updates:
         global_secondary_indexes_list = []
         for index in global_secondary_index_updates:
             global_secondary_indexes_list.append({
                 UPDATE: {
                     INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                     PROVISIONED_THROUGHPUT: {
                         READ_CAPACITY_UNITS: index.get(pythonic(READ_CAPACITY_UNITS)),
                         WRITE_CAPACITY_UNITS: index.get(pythonic(WRITE_CAPACITY_UNITS))
                     }
                 }
             })
         operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list
     if fields:
         attr_list = []
         for field in fields:
             attr_list.append({
                 ATTR_NAME: field.get(pythonic(ATTR_NAME)),
                 ATTR_TYPE: field.get(pythonic(ATTR_TYPE))
             })
         operation_kwargs[ATTR_DEFINITIONS] = attr_list
     try:
         return self.dispatch(UPDATE_TABLE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Failed to update table: {0}".format(e))
Пример #11
0
 def list_tables(self, exclusive_start_table_name=None, limit=None):
     """
     Performs the ListTables operation
     """
     operation_kwargs = {}
     if exclusive_start_table_name:
         operation_kwargs.update(
             {EXCLUSIVE_START_TABLE_NAME: exclusive_start_table_name})
     if limit is not None:
         operation_kwargs.update({LIMIT: limit})
     try:
         return self.dispatch(LIST_TABLES, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError("Unable to list tables: {0}".format(e))
Пример #12
0
 def get_meta_table(self, table_name, refresh=False):
     """
     Returns a MetaTable
     """
     if table_name not in self._tables or refresh:
         operation_kwargs = {TABLE_NAME: table_name}
         try:
             data = self.dispatch(DESCRIBE_TABLE, operation_kwargs)
             self._tables[table_name] = MetaTable(data.get(TABLE_KEY))
         except BotoCoreError as e:
             raise TableError("Unable to describe table: {0}".format(e))
         except ClientError as e:
             if e.response['Error']['Code'] == 'ResourceNotFoundException':
                 raise TableDoesNotExist(e.response['Error']['Message'])
     return self._tables[table_name]
Пример #13
0
 def get_meta_table(self, table_name, refresh=False):
     """
     Returns a MetaTable
     """
     if table_name not in self._tables or refresh:
         operation_kwargs = {
             pythonic(TABLE_NAME): table_name
         }
         response, data = self.dispatch(DESCRIBE_TABLE, operation_kwargs)
         if not response.ok:
             if response.status_code == HTTP_BAD_REQUEST:
                 return None
             else:
                 raise TableError("Unable to describe table: {0}".format(response.content))
         self._tables[table_name] = MetaTable(data.get(TABLE_KEY))
     return self._tables[table_name]
Пример #14
0
 async def update_time_to_live(self, table_name, ttl_attribute_name):
     """
     Performs the UpdateTimeToLive operation
     """
     operation_kwargs = {
         TABLE_NAME: table_name,
         TIME_TO_LIVE_SPECIFICATION: {
             ATTR_NAME: ttl_attribute_name,
             ENABLED: True,
         }
     }
     try:
         return await self.dispatch(UPDATE_TIME_TO_LIVE, operation_kwargs)
     except BOTOCORE_EXCEPTIONS as e:
         raise TableError(
             "Failed to update TTL on table: {}".format(e)) from e
Пример #15
0
 def list_tables(self, exclusive_start_table_name=None, limit=None):
     """
     Performs the ListTables operation
     """
     operation_kwargs = {}
     if exclusive_start_table_name:
         operation_kwargs.update({
             pythonic(EXCLUSIVE_START_TABLE_NAME): exclusive_start_table_name
         })
     if limit is not None:
         operation_kwargs.update({
             pythonic(LIMIT): limit
         })
     response, data = self.dispatch(LIST_TABLES, operation_kwargs)
     if not response.ok:
         raise TableError("Unable to list tables: {0}".format(response.content))
     return data
Пример #16
0
 def delete_indexes(self, 
     table_name,
     global_secondary_indexes=None):
     """
     TODO@rohan
     """
     if global_secondary_indexes:
         operation_kwargs = {
             TABLE_NAME: table_name,
             GLOBAL_SECONDARY_INDEX_UPDATES : []
         }
         for index in global_secondary_indexes:
             operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES].append({ DYNAMO_DELETE : {
                 INDEX_NAME: index
             }})
         try:
             return self.dispatch(UPDATE_TABLE, operation_kwargs)
         except BOTOCORE_EXCEPTIONS as e:
             raise TableError("Failed to update table: {0}".format(e))
Пример #17
0
 def update_indexes(self, 
     table_name,
     global_secondary_indexes=None):
     """
     TODO@rohan
     """
     if global_secondary_indexes:
         operation_kwargs = {
             TABLE_NAME: table_name,
             GLOBAL_SECONDARY_INDEX_UPDATES : []
         }
         for index in global_secondary_indexes:
             operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES].append({ UPDATE : {
                 INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                 PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
             }})
         try:
             return self.dispatch(UPDATE_TABLE, operation_kwargs)
         except BOTOCORE_EXCEPTIONS as e:
             raise TableError("Failed to update table: {0}".format(e))
Пример #18
0
    def add_field_index(self,
                  table_name,
                  fields=None,
                  global_secondary_indexes=None):
        """
        Will add fields and indexes that did not previously exist on the table.
        Only available for global secondary indexes.
        Fields or indexes or both can be added at the same time.
        """
        if not global_secondary_indexes:
            return
        operation_kwargs = {
            TABLE_NAME: table_name
        }

        if fields:
            attr_list = []
            for field in fields:
                attr_list.append({
                    ATTR_NAME: field.get(pythonic(ATTR_NAME)),
                    ATTR_TYPE: field.get(pythonic(ATTR_TYPE))
                })
            operation_kwargs[ATTR_DEFINITIONS] = attr_list

        global_secondary_indexes_list = []
        for index in global_secondary_indexes:
            global_secondary_indexes_list.append({ CREATE : {
                INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                PROJECTION: index.get(pythonic(PROJECTION)),
                PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
            }})
        operation_kwargs[GLOBAL_SECONDARY_INDEX_UPDATES] = global_secondary_indexes_list

        try:
            return self.dispatch(UPDATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to update table: {0}".format(e))
Пример #19
0
    def query(self,
              table_name,
              hash_key,
              attributes_to_get=None,
              consistent_read=False,
              exclusive_start_key=None,
              index_name=None,
              key_conditions=None,
              query_filters=None,
              conditional_operator=None,
              limit=None,
              return_consumed_capacity=None,
              scan_index_forward=None,
              select=None):
        """
        Performs the Query operation and returns the result
        """
        operation_kwargs = {TABLE_NAME: table_name}
        if attributes_to_get:
            operation_kwargs[ATTRS_TO_GET] = attributes_to_get
        if consistent_read:
            operation_kwargs[CONSISTENT_READ] = True
        if exclusive_start_key:
            operation_kwargs.update(
                self.get_exclusive_start_key_map(table_name,
                                                 exclusive_start_key))
        if index_name:
            operation_kwargs[INDEX_NAME] = index_name
        if limit is not None:
            operation_kwargs[LIMIT] = limit
        if return_consumed_capacity:
            operation_kwargs.update(
                self.get_consumed_capacity_map(return_consumed_capacity))
        if query_filters:
            operation_kwargs.update(
                self.get_query_filter_map(table_name, query_filters))
        if conditional_operator:
            operation_kwargs.update(
                self.get_conditional_operator(conditional_operator))
        if select:
            if select.upper() not in SELECT_VALUES:
                raise ValueError("{0} must be one of {1}".format(
                    SELECT, SELECT_VALUES))
            operation_kwargs[SELECT] = str(select).upper()
        if scan_index_forward is not None:
            operation_kwargs[SCAN_INDEX_FORWARD] = scan_index_forward
        tbl = self.get_meta_table(table_name)
        if tbl is None:
            raise TableError("No such table: {0}".format(table_name))
        if index_name:
            hash_keyname = tbl.get_index_hash_keyname(index_name)
            if not hash_keyname:
                raise ValueError(
                    "No hash key attribute for index: {0}".format(index_name))
        else:
            hash_keyname = tbl.hash_keyname
        operation_kwargs[KEY_CONDITIONS] = {
            hash_keyname: {
                ATTR_VALUE_LIST: [{
                    self.get_attribute_type(table_name, hash_keyname):
                    hash_key,
                }],
                COMPARISON_OPERATOR:
                EQ
            },
        }
        if key_conditions is not None:
            for key, condition in key_conditions.items():
                attr_type = self.get_attribute_type(table_name, key)
                operator = condition.get(COMPARISON_OPERATOR)
                if operator not in COMPARISON_OPERATOR_VALUES:
                    raise ValueError("{0} must be one of {1}".format(
                        COMPARISON_OPERATOR, COMPARISON_OPERATOR_VALUES))
                operation_kwargs[KEY_CONDITIONS][key] = {
                    ATTR_VALUE_LIST: [{
                        attr_type: self.parse_attribute(value)
                    } for value in condition.get(ATTR_VALUE_LIST)],
                    COMPARISON_OPERATOR:
                    operator
                }

        try:
            return self.dispatch(QUERY, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise QueryError("Failed to query items: {0}".format(e))
Пример #20
0
    def create_table(
        cls,
        wait: bool = False,
        read_capacity_units: Optional[int] = None,
        write_capacity_units: Optional[int] = None,
        billing_mode: Optional[str] = None,
        ignore_update_ttl_errors: bool = False,
    ) -> Any:
        """
        Create the table for this model

        :param wait: If set, then this call will block until the table is ready for use
        :param read_capacity_units: Sets the read capacity units for this table
        :param write_capacity_units: Sets the write capacity units for this table
        :param billing_mode: Sets the billing mode provisioned (default) or on_demand for this table
        """
        if not cls.exists():
            schema = cls._get_schema()
            if hasattr(cls.Meta, 'read_capacity_units'):
                schema['read_capacity_units'] = cls.Meta.read_capacity_units
            if hasattr(cls.Meta, 'write_capacity_units'):
                schema['write_capacity_units'] = cls.Meta.write_capacity_units
            if hasattr(cls.Meta, 'stream_view_type'):
                schema['stream_specification'] = {
                    'stream_enabled': True,
                    'stream_view_type': cls.Meta.stream_view_type
                }
            if hasattr(cls.Meta, 'billing_mode'):
                schema['billing_mode'] = cls.Meta.billing_mode
            if read_capacity_units is not None:
                schema['read_capacity_units'] = read_capacity_units
            if write_capacity_units is not None:
                schema['write_capacity_units'] = write_capacity_units
            if billing_mode is not None:
                schema['billing_mode'] = billing_mode
            index_data = cls._get_indexes()
            schema['global_secondary_indexes'] = index_data.get(
                'global_secondary_indexes')
            schema['local_secondary_indexes'] = index_data.get(
                'local_secondary_indexes')
            index_attrs = index_data.get('attribute_definitions')
            attr_keys = [
                attr.get('attribute_name')
                for attr in schema.get('attribute_definitions')
            ]
            for attr in index_attrs:
                attr_name = attr.get('attribute_name')
                if attr_name not in attr_keys:
                    schema['attribute_definitions'].append(attr)
                    attr_keys.append(attr_name)
            cls._get_connection().create_table(**schema)
        if wait:
            while True:
                status = cls._get_connection().describe_table()
                if status:
                    data = status.get(TABLE_STATUS)
                    if data == ACTIVE:
                        break
                    else:
                        time.sleep(2)
                else:
                    raise TableError("No TableStatus returned for table")

        cls.update_ttl(ignore_update_ttl_errors)
Пример #21
0
    def create_table(self,
                     table_name,
                     attribute_definitions=None,
                     key_schema=None,
                     read_capacity_units=None,
                     write_capacity_units=None,
                     global_secondary_indexes=None,
                     local_secondary_indexes=None):
        """
        Performs the CreateTable operation
        """
        operation_kwargs = {
            TABLE_NAME: table_name,
            PROVISIONED_THROUGHPUT: {
                READ_CAPACITY_UNITS: read_capacity_units,
                WRITE_CAPACITY_UNITS: write_capacity_units
            }
        }
        attrs_list = []
        if attribute_definitions is None:
            raise ValueError("attribute_definitions argument is required")
        for attr in attribute_definitions:
            attrs_list.append({
                ATTR_NAME: attr.get(pythonic(ATTR_NAME)),
                ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))
            })
        operation_kwargs[ATTR_DEFINITIONS] = attrs_list

        if global_secondary_indexes:
            global_secondary_indexes_list = []
            for index in global_secondary_indexes:
                global_secondary_indexes_list.append({
                    INDEX_NAME:
                    index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA:
                    sorted(index.get(pythonic(KEY_SCHEMA)),
                           key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION:
                    index.get(pythonic(PROJECTION)),
                    PROVISIONED_THROUGHPUT:
                    index.get(pythonic(PROVISIONED_THROUGHPUT))
                })
            operation_kwargs[
                GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list

        if key_schema is None:
            raise ValueError("key_schema is required")
        key_schema_list = []
        for item in key_schema:
            key_schema_list.append({
                ATTR_NAME:
                item.get(pythonic(ATTR_NAME)),
                KEY_TYPE:
                str(item.get(pythonic(KEY_TYPE))).upper()
            })
        operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list,
                                              key=lambda x: x.get(KEY_TYPE))

        local_secondary_indexes_list = []
        if local_secondary_indexes:
            for index in local_secondary_indexes:
                local_secondary_indexes_list.append({
                    INDEX_NAME:
                    index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA:
                    sorted(index.get(pythonic(KEY_SCHEMA)),
                           key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION:
                    index.get(pythonic(PROJECTION)),
                })
            operation_kwargs[
                LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list
        try:
            data = self.dispatch(CREATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to create table: {0}".format(e))
        return data
Пример #22
0
    def query(self,
              table_name,
              hash_key,
              range_key_condition=None,
              filter_condition=None,
              attributes_to_get=None,
              consistent_read=False,
              exclusive_start_key=None,
              index_name=None,
              limit=None,
              return_consumed_capacity=None,
              scan_index_forward=None,
              select=None):
        """
        Performs the Query operation and returns the result
        """
        self._check_condition('range_key_condition', range_key_condition)
        self._check_condition('filter_condition', filter_condition)

        operation_kwargs = {TABLE_NAME: table_name}
        name_placeholders = {}
        expression_attribute_values = {}

        tbl = self.get_meta_table(table_name)
        if tbl is None:
            raise TableError("No such table: {}".format(table_name))
        if index_name:
            if not tbl.has_index_name(index_name):
                raise ValueError("Table {} has no index: {}".format(table_name, index_name))
            hash_keyname = tbl.get_index_hash_keyname(index_name)
            if not hash_keyname:
                raise ValueError("No hash key attribute for index: {}".format(index_name))
            range_keyname = tbl.get_index_range_keyname(index_name)
        else:
            hash_keyname = tbl.hash_keyname
            range_keyname = tbl.range_keyname

        hash_condition_value = {self.get_attribute_type(table_name, hash_keyname, hash_key): self.parse_attribute(hash_key)}
        key_condition = getattr(Path([hash_keyname]), '__eq__')(hash_condition_value)

        if range_key_condition is not None:
            if range_key_condition.is_valid_range_key_condition(range_keyname):
                key_condition = key_condition & range_key_condition
            elif filter_condition is None:
                # Try to gracefully handle the case where a user passed in a filter as a range key condition
                (filter_condition, range_key_condition) = (range_key_condition, None)
            else:
                raise ValueError("{} is not a valid range key condition".format(range_key_condition))

        operation_kwargs[KEY_CONDITION_EXPRESSION] = key_condition.serialize(
            name_placeholders, expression_attribute_values)
        if filter_condition is not None:
            filter_expression = filter_condition.serialize(name_placeholders, expression_attribute_values)
            # FilterExpression does not allow key attributes. Check for hash and range key name placeholders
            hash_key_placeholder = name_placeholders.get(hash_keyname)
            range_key_placeholder = range_keyname and name_placeholders.get(range_keyname)
            if (
                hash_key_placeholder in filter_expression or
                (range_key_placeholder and range_key_placeholder in filter_expression)
            ):
                raise ValueError("'filter_condition' cannot contain key attributes")
            operation_kwargs[FILTER_EXPRESSION] = filter_expression
        if attributes_to_get:
            projection_expression = create_projection_expression(attributes_to_get, name_placeholders)
            operation_kwargs[PROJECTION_EXPRESSION] = projection_expression
        if consistent_read:
            operation_kwargs[CONSISTENT_READ] = True
        if exclusive_start_key:
            operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key))
        if index_name:
            operation_kwargs[INDEX_NAME] = index_name
        if limit is not None:
            operation_kwargs[LIMIT] = limit
        if return_consumed_capacity:
            operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity))
        if select:
            if select.upper() not in SELECT_VALUES:
                raise ValueError("{} must be one of {}".format(SELECT, SELECT_VALUES))
            operation_kwargs[SELECT] = str(select).upper()
        if scan_index_forward is not None:
            operation_kwargs[SCAN_INDEX_FORWARD] = scan_index_forward
        if name_placeholders:
            operation_kwargs[EXPRESSION_ATTRIBUTE_NAMES] = self._reverse_dict(name_placeholders)
        if expression_attribute_values:
            operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values

        try:
            return self.dispatch(QUERY, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise QueryError("Failed to query items: {}".format(e), e)
Пример #23
0
    def create_table(self,
                     table_name,
                     attribute_definitions=None,
                     key_schema=None,
                     read_capacity_units=None,
                     write_capacity_units=None,
                     global_secondary_indexes=None,
                     local_secondary_indexes=None,
                     stream_specification=None,
                     billing_mode=DEFAULT_BILLING_MODE):
        """
        Performs the CreateTable operation
        """
        operation_kwargs = {
            TABLE_NAME: table_name,
            BILLING_MODE: billing_mode,
            PROVISIONED_THROUGHPUT: {
                READ_CAPACITY_UNITS: read_capacity_units,
                WRITE_CAPACITY_UNITS: write_capacity_units,
            }
        }
        attrs_list = []
        if attribute_definitions is None:
            raise ValueError("attribute_definitions argument is required")
        for attr in attribute_definitions:
            attrs_list.append({
                ATTR_NAME: attr.get(pythonic(ATTR_NAME)),
                ATTR_TYPE: attr.get(pythonic(ATTR_TYPE))
            })
        operation_kwargs[ATTR_DEFINITIONS] = attrs_list

        if billing_mode not in AVAILABLE_BILLING_MODES:
            raise ValueError("incorrect value for billing_mode, available modes: {}".format(AVAILABLE_BILLING_MODES))
        if billing_mode == PAY_PER_REQUEST_BILLING_MODE:
            del operation_kwargs[PROVISIONED_THROUGHPUT]

        if global_secondary_indexes:
            global_secondary_indexes_list = []
            for index in global_secondary_indexes:
                global_secondary_indexes_list.append({
                    INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION: index.get(pythonic(PROJECTION)),
                    PROVISIONED_THROUGHPUT: index.get(pythonic(PROVISIONED_THROUGHPUT))
                })
            operation_kwargs[GLOBAL_SECONDARY_INDEXES] = global_secondary_indexes_list

        if key_schema is None:
            raise ValueError("key_schema is required")
        key_schema_list = []
        for item in key_schema:
            key_schema_list.append({
                ATTR_NAME: item.get(pythonic(ATTR_NAME)),
                KEY_TYPE: str(item.get(pythonic(KEY_TYPE))).upper()
            })
        operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list, key=lambda x: x.get(KEY_TYPE))

        local_secondary_indexes_list = []
        if local_secondary_indexes:
            for index in local_secondary_indexes:
                local_secondary_indexes_list.append({
                    INDEX_NAME: index.get(pythonic(INDEX_NAME)),
                    KEY_SCHEMA: sorted(index.get(pythonic(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
                    PROJECTION: index.get(pythonic(PROJECTION)),
                })
            operation_kwargs[LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list

        if stream_specification:
            operation_kwargs[STREAM_SPECIFICATION] = {
                STREAM_ENABLED: stream_specification[pythonic(STREAM_ENABLED)],
                STREAM_VIEW_TYPE: stream_specification[pythonic(STREAM_VIEW_TYPE)]
            }

        try:
            data = self.dispatch(CREATE_TABLE, operation_kwargs)
        except BOTOCORE_EXCEPTIONS as e:
            raise TableError("Failed to create table: {}".format(e), e)
        return data
Пример #24
0
    def query(self,
              table_name,
              hash_key,
              attributes_to_get=None,
              consistent_read=False,
              exclusive_start_key=None,
              index_name=None,
              key_conditions=None,
              query_filters=None,
              limit=None,
              return_consumed_capacity=None,
              scan_index_forward=None,
              select=None):
        """
        Performs the Query operation and returns the result
        """
        operation_kwargs = {pythonic(TABLE_NAME): table_name}
        if attributes_to_get:
            operation_kwargs[pythonic(ATTRS_TO_GET)] = attributes_to_get
        if consistent_read:
            operation_kwargs[pythonic(CONSISTENT_READ)] = True
        if exclusive_start_key:
            operation_kwargs.update(self.get_exclusive_start_key_map(table_name, exclusive_start_key))
        if index_name:
            operation_kwargs[pythonic(INDEX_NAME)] = index_name
        if limit is not None:
            operation_kwargs[pythonic(LIMIT)] = limit
        if return_consumed_capacity:
            operation_kwargs.update(self.get_consumed_capacity_map(return_consumed_capacity))
        if query_filters:
            operation_kwargs.update(self.get_query_filter_map(table_name, query_filters))
        if select:
            if select.upper() not in SELECT_VALUES:
                raise ValueError("{0} must be one of {1}".format(SELECT, SELECT_VALUES))
            operation_kwargs[pythonic(SELECT)] = str(select).upper()
        if scan_index_forward is not None:
            operation_kwargs[pythonic(SCAN_INDEX_FORWARD)] = scan_index_forward
        tbl = self.get_meta_table(table_name)
        if tbl is None:
            raise TableError("No such table: {0}".format(table_name))
        if index_name:
            hash_keyname = tbl.get_index_hash_keyname(index_name)
            if not hash_keyname:
                raise ValueError("No hash key attribute for index: {0}".format(index_name))
        else:
            hash_keyname = tbl.hash_keyname
        operation_kwargs[pythonic(KEY_CONDITIONS)] = {
            hash_keyname: {
                ATTR_VALUE_LIST: [
                    {
                        self.get_attribute_type(table_name, hash_keyname): hash_key,
                    }
                ],
                COMPARISON_OPERATOR: EQ
            },
        }
        # key_conditions = {'key': {'ComparisonOperator': 'EQ', 'AttributeValueList': ['value']}
        if key_conditions:
            for key, condition in key_conditions.items():
                attr_type = self.get_attribute_type(table_name, key)
                operator = condition.get(COMPARISON_OPERATOR)
                if operator not in COMPARISON_OPERATOR_VALUES:
                    raise ValueError("{0} must be one of {1}".format(COMPARISON_OPERATOR, COMPARISON_OPERATOR_VALUES))
                operation_kwargs[pythonic(KEY_CONDITIONS)][key] = {
                    ATTR_VALUE_LIST: [
                        {
                            attr_type: self.parse_attribute(value)
                        } for value in condition.get(ATTR_VALUE_LIST)
                    ],
                    COMPARISON_OPERATOR: operator
                }

        response, data = self.dispatch(QUERY, operation_kwargs)
        if not response.ok:
            raise QueryError("Failed to query items: {0}".format(response.content))
        return data