class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None, session_cls=None, request_timeout_seconds=None, max_retry_attempts=None, base_backoff_ms=None, aws_access_key_id=None, aws_secret_access_key=None, dax_write_endpoints=[], dax_read_endpoints=[]): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection( region=region, host=host, session_cls=session_cls, request_timeout_seconds=request_timeout_seconds, max_retry_attempts=max_retry_attempts, base_backoff_ms=base_backoff_ms, dax_write_endpoints=dax_write_endpoints, dax_read_endpoints=dax_read_endpoints) if aws_access_key_id and aws_secret_access_key: self.connection.session.set_credentials(aws_access_key_id, aws_secret_access_key) def get_meta_table(self, refresh=False): """ Returns a MetaTable """ return self.connection.get_meta_table(self.table_name, refresh=refresh) def delete_item(self, hash_key, range_key=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, condition=condition, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, actions=None, attribute_updates=None, condition=None, expected=None, conditional_operator=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, actions=actions, attribute_updates=attribute_updates, condition=condition, expected=expected, conditional_operator=conditional_operator, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, condition=condition, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item(self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def rate_limited_scan( self, filter_condition=None, attributes_to_get=None, page_size=None, limit=None, conditional_operator=None, scan_filter=None, segment=None, total_segments=None, exclusive_start_key=None, timeout_seconds=None, read_capacity_to_consume_per_second=None, allow_rate_limited_scan_without_consumed_capacity=None, max_sleep_between_retry=None, max_consecutive_exceptions=None, consistent_read=None, index_name=None): """ Performs the scan operation with rate limited """ return self.connection.rate_limited_scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, page_size=page_size, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, timeout_seconds=timeout_seconds, read_capacity_to_consume_per_second= read_capacity_to_consume_per_second, allow_rate_limited_scan_without_consumed_capacity= allow_rate_limited_scan_without_consumed_capacity, max_sleep_between_retry=max_sleep_between_retry, max_consecutive_exceptions=max_consecutive_exceptions, consistent_read=consistent_read, index_name=index_name) def scan(self, filter_condition=None, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None, consistent_read=None, index_name=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, consistent_read=consistent_read, index_name=index_name) def query(self, hash_key, range_key_condition=None, filter_condition=None, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, conditional_operator=None, select=None): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, range_key_condition=range_key_condition, filter_condition=filter_condition, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, key_conditions=key_conditions, query_filters=query_filters, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, conditional_operator=conditional_operator, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification)
class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None, session_cls=None, request_timeout_seconds=None, max_retry_attempts=None, base_backoff_ms=None): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection(region=region, host=host, session_cls=session_cls, request_timeout_seconds=request_timeout_seconds, max_retry_attempts=max_retry_attempts, base_backoff_ms=base_backoff_ms) def delete_item(self, hash_key, range_key=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, condition=condition, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, attribute_updates=None, condition=None, expected=None, conditional_operator=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None ): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, attribute_updates=attribute_updates, condition=condition, expected=expected, conditional_operator=conditional_operator, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, condition=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, condition=condition, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item( self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def rate_limited_scan(self, filter_condition=None, attributes_to_get=None, page_size=None, limit=None, conditional_operator=None, scan_filter=None, segment=None, total_segments=None, exclusive_start_key=None, timeout_seconds=None, read_capacity_to_consume_per_second=None, allow_rate_limited_scan_without_consumed_capacity=None, max_sleep_between_retry=None, max_consecutive_exceptions=None, consistent_read=None): """ Performs the scan operation with rate limited """ return self.connection.rate_limited_scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, page_size=page_size, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, timeout_seconds=timeout_seconds, read_capacity_to_consume_per_second=read_capacity_to_consume_per_second, allow_rate_limited_scan_without_consumed_capacity=allow_rate_limited_scan_without_consumed_capacity, max_sleep_between_retry=max_sleep_between_retry, max_consecutive_exceptions=max_consecutive_exceptions, consistent_read=consistent_read) def scan(self, filter_condition=None, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None, consistent_read=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, consistent_read=consistent_read) def query(self, hash_key, range_key_condition=None, filter_condition=None, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, conditional_operator=None, select=None ): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, range_key_condition=range_key_condition, filter_condition=filter_condition, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, key_conditions=key_conditions, query_filters=query_filters, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, conditional_operator=conditional_operator, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification )
class TableConnection: """ A higher level abstraction over botocore """ def __init__(self, table_name: str, region: Optional[str] = None, host: Optional[str] = None, connect_timeout_seconds: Optional[float] = None, read_timeout_seconds: Optional[float] = None, max_retry_attempts: Optional[int] = None, base_backoff_ms: Optional[int] = None, max_pool_connections: Optional[int] = None, extra_headers: Optional[Mapping[str, str]] = None, aws_access_key_id: Optional[str] = None, aws_secret_access_key: Optional[str] = None, aws_session_token: Optional[str] = None, dax_write_endpoints: Optional[List[str]] = None, dax_read_endpoints: Optional[List[str]] = None) -> None: self._hash_keyname = None self._range_keyname = None self.table_name = table_name if not dax_read_endpoints: dax_read_endpoints = [] if not dax_write_endpoints: dax_write_endpoints = [] self.connection = Connection( region=region, host=host, connect_timeout_seconds=connect_timeout_seconds, read_timeout_seconds=read_timeout_seconds, max_retry_attempts=max_retry_attempts, base_backoff_ms=base_backoff_ms, max_pool_connections=max_pool_connections, extra_headers=extra_headers, dax_write_endpoints=dax_write_endpoints, dax_read_endpoints=dax_read_endpoints) if aws_access_key_id and aws_secret_access_key: self.connection.session.set_credentials(aws_access_key_id, aws_secret_access_key, aws_session_token) def get_meta_table(self, refresh: bool = False) -> MetaTable: """ Returns a MetaTable """ return self.connection.get_meta_table(self.table_name, refresh=refresh) def get_operation_kwargs( self, hash_key: str, range_key: Optional[str] = None, key: str = KEY, attributes: Optional[Any] = None, attributes_to_get: Optional[Any] = None, actions: Optional[Sequence[Action]] = None, condition: Optional[Condition] = None, consistent_read: Optional[bool] = None, return_values: Optional[str] = None, return_consumed_capacity: Optional[str] = None, return_item_collection_metrics: Optional[str] = None, return_values_on_condition_failure: Optional[str] = None, ) -> Dict: return self.connection.get_operation_kwargs( self.table_name, hash_key, range_key=range_key, key=key, attributes=attributes, attributes_to_get=attributes_to_get, actions=actions, condition=condition, consistent_read=consistent_read, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values_on_condition_failure= return_values_on_condition_failure) def delete_item( self, hash_key: str, range_key: Optional[str] = None, condition: Optional[Condition] = None, return_values: Optional[str] = None, return_consumed_capacity: Optional[str] = None, return_item_collection_metrics: Optional[str] = None, ) -> Dict: """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, condition=condition, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item( self, hash_key: str, range_key: Optional[str] = None, actions: Optional[Sequence[Action]] = None, condition: Optional[Condition] = None, return_consumed_capacity: Optional[str] = None, return_item_collection_metrics: Optional[str] = None, return_values: Optional[str] = None, ) -> Dict: """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, actions=actions, condition=condition, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item( self, hash_key: str, range_key: Optional[str] = None, attributes: Optional[Any] = None, condition: Optional[Condition] = None, return_values: Optional[str] = None, return_consumed_capacity: Optional[str] = None, return_item_collection_metrics: Optional[str] = None, ) -> Dict: """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, condition=condition, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item( self, put_items: Optional[Any] = None, delete_items: Optional[Any] = None, return_consumed_capacity: Optional[str] = None, return_item_collection_metrics: Optional[str] = None, ) -> Dict: """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item( self, keys: Sequence[str], consistent_read: Optional[bool] = None, return_consumed_capacity: Optional[str] = None, attributes_to_get: Optional[Any] = None, ) -> Dict: """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item( self, hash_key: str, range_key: Optional[str] = None, consistent_read: bool = False, attributes_to_get: Optional[Any] = None, ) -> Dict: """ Performs the GetItem operation and returns the result """ return self.connection.get_item(self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def scan( self, filter_condition: Optional[Any] = None, attributes_to_get: Optional[Any] = None, limit: Optional[int] = None, return_consumed_capacity: Optional[str] = None, segment: Optional[int] = None, total_segments: Optional[int] = None, exclusive_start_key: Optional[str] = None, consistent_read: Optional[bool] = None, index_name: Optional[str] = None, ) -> Dict: """ Performs the scan operation """ return self.connection.scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, limit=limit, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, consistent_read=consistent_read, index_name=index_name) def query( self, hash_key: str, range_key_condition: Optional[Condition] = None, filter_condition: Optional[Any] = None, attributes_to_get: Optional[Any] = None, consistent_read: bool = False, exclusive_start_key: Optional[Any] = None, index_name: Optional[str] = None, limit: Optional[int] = None, return_consumed_capacity: Optional[str] = None, scan_index_forward: Optional[bool] = None, select: Optional[str] = None, ) -> Dict: """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, range_key_condition=range_key_condition, filter_condition=filter_condition, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, select=select) def describe_table(self) -> Dict: """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self) -> Dict: """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_time_to_live(self, ttl_attr_name: str) -> Dict: """ Performs the UpdateTimeToLive operation and returns the result """ return self.connection.update_time_to_live(self.table_name, ttl_attr_name) def update_table( self, read_capacity_units: Optional[int] = None, write_capacity_units: Optional[int] = None, global_secondary_index_updates: Optional[Any] = None, ) -> Dict: """ Performs the UpdateTable operation and returns the result """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def create_table( self, attribute_definitions: Optional[Any] = None, key_schema: Optional[Any] = None, read_capacity_units: Optional[int] = None, write_capacity_units: Optional[int] = None, global_secondary_indexes: Optional[Any] = None, local_secondary_indexes: Optional[Any] = None, stream_specification: Optional[Dict] = None, billing_mode: str = DEFAULT_BILLING_MODE, ) -> Dict: """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification, billing_mode=billing_mode)
class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None, aws_access_key_id=None, aws_secret_access_key=None): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection(region=region, host=host, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) def delete_item(self, hash_key, range_key=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, attribute_updates=None, expected=None, conditional_operator=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None ): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, attribute_updates=attribute_updates, expected=expected, conditional_operator=conditional_operator, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item( self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def scan(self, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, attributes_to_get=attributes_to_get, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key) def query(self, hash_key, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, conditional_operator=None, select=None ): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, key_conditions=key_conditions, query_filters=query_filters, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, conditional_operator=conditional_operator, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result Has limits with update. - Will not be able to create or delete an index. - May fail if too many operations are tried at the same time. TODO@rohan - Here the update operations do not account for the fact that dynamodb allows only one update per update operation. https://botocore.readthedocs.org/en/latest/reference/services/dynamodb.html#DynamoDB.Client.update_table http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def add_field_index(self, fields=None, global_secondary_indexes=None): """ Will add fields and indexes that did not previously exist on the table. Only available for global secondary indexes. Fields or indexes or both can be added at the same time. """ return self.connection.add_field_index( self.table_name, fields=fields, global_secondary_indexes=global_secondary_indexes) def update_indexes(self, global_secondary_indexes=None): """ TODO@rohan """ return self.connection.update_indexes( self.table_name, global_secondary_indexes=global_secondary_indexes) def delete_indexes(self, global_secondary_indexes=None): """ TODO@rohan """ return self.connection.delete_indexes( self.table_name, global_secondary_indexes=global_secondary_indexes) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification )
class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None, connect_timeout_seconds=None, read_timeout_seconds=None, max_retry_attempts=None, base_backoff_ms=None, max_pool_connections=None, extra_headers=None, aws_access_key_id=None, aws_secret_access_key=None): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection( region=region, host=host, connect_timeout_seconds=connect_timeout_seconds, read_timeout_seconds=read_timeout_seconds, max_retry_attempts=max_retry_attempts, base_backoff_ms=base_backoff_ms, max_pool_connections=max_pool_connections, extra_headers=extra_headers) if aws_access_key_id and aws_secret_access_key: self.connection.session.set_credentials(aws_access_key_id, aws_secret_access_key) def get_meta_table(self, refresh=False): """ Returns a MetaTable """ return self.connection.get_meta_table(self.table_name, refresh=refresh) def delete_item(self, hash_key, range_key=None, condition=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, condition=condition, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, actions=None, condition=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, actions=actions, condition=condition, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, condition=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, condition=condition, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item(self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def scan(self, filter_condition=None, attributes_to_get=None, limit=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None, consistent_read=None, index_name=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, filter_condition=filter_condition, attributes_to_get=attributes_to_get, limit=limit, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key, consistent_read=consistent_read, index_name=index_name) def query(self, hash_key, range_key_condition=None, filter_condition=None, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, select=None): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, range_key_condition=range_key_condition, filter_condition=filter_condition, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None, billing_mode=DEFAULT_BILLING_MODE): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification, billing_mode=billing_mode)
class PynamodbTest(TracerTestCase): TEST_SERVICE = "pynamodb" def setUp(self): patch() self.conn = Connection(region="us-east-1") self.conn.session.set_credentials("aws-access-key", "aws-secret-access-key", "session-token") super(PynamodbTest, self).setUp() Pin.override(self.conn, tracer=self.tracer) def tearDown(self): super(PynamodbTest, self).tearDown() unpatch() @mock_dynamodb def test_list_tables(self): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") list_result = self.conn.list_tables() spans = self.get_spans() assert spans span = spans[0] assert span.name == "pynamodb.command" assert span.service == "pynamodb" assert span.resource == "ListTables" assert len(spans) == 1 assert_is_measured(span) assert span.span_type == "http" assert span.get_tag("aws.operation") == "ListTables" assert span.get_tag("aws.region") == "us-east-1" assert span.get_tag("aws.agent") == "pynamodb" assert span.duration >= 0 assert span.error == 0 assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" @mock_dynamodb def test_delete_table(self): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") delete_result = self.conn.delete_table("Test") spans = self.get_spans() assert spans span = spans[0] assert span.name == "pynamodb.command" assert span.service == "pynamodb" assert span.resource == "DeleteTable Test" assert len(spans) == 1 assert_is_measured(span) assert span.span_type == "http" assert span.get_tag("aws.operation") == "DeleteTable" assert span.get_tag("aws.region") == "us-east-1" assert span.get_tag("aws.agent") == "pynamodb" assert span.duration >= 0 assert span.error == 0 assert delete_result["Table"]["TableName"] == "Test" assert len(self.conn.list_tables()["TableNames"]) == 0 @mock_dynamodb def test_scan(self): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") scan_result = self.conn.scan("Test") spans = self.get_spans() assert spans span = spans[0] assert span.name == "pynamodb.command" assert span.service == "pynamodb" assert span.resource == "Scan Test" assert len(spans) == 1 assert_is_measured(span) assert span.span_type == "http" assert span.get_tag("aws.operation") == "Scan" assert span.get_tag("aws.region") == "us-east-1" assert span.get_tag("aws.agent") == "pynamodb" assert span.duration >= 0 assert span.error == 0 assert scan_result["ScannedCount"] == 0 assert len(scan_result["Items"]) == 0 @mock_dynamodb def test_scan_on_error(self): with pytest.raises(pynamodb.exceptions.ScanError): self.conn.scan("OtherTable") spans = self.get_spans() assert spans span = spans[0] assert span.name == "pynamodb.command" assert span.service == "pynamodb" assert span.resource == "Scan OtherTable" assert len(spans) == 1 assert_is_measured(span) assert span.span_type == "http" assert span.get_tag("aws.operation") == "Scan" assert span.get_tag("aws.region") == "us-east-1" assert span.get_tag("aws.agent") == "pynamodb" assert span.duration >= 0 assert span.error == 1 assert span.meta["error.type"] != "" @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) @mock_dynamodb def test_user_specified_service(self): from ddtrace import config assert config.service == "mysvc" dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "pynamodb" assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" @TracerTestCase.run_in_subprocess( env_overrides=dict(DD_PYNAMODB_SERVICE="mypynamodb")) @mock_dynamodb def test_env_user_specified_pynamodb_service(self): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "mypynamodb", span.service assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" self.reset() # Global config with self.override_config("pynamodb", dict(service="cfg-pynamodb")): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "cfg-pynamodb", span.service assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" self.reset() # Manual override dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") Pin.override(self.conn, service="mypynamodb", tracer=self.tracer) list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "mypynamodb", span.service assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" @TracerTestCase.run_in_subprocess(env_overrides=dict( DD_SERVICE="app-svc", DD_PYNAMODB_SERVICE="env-pynamodb")) @mock_dynamodb def test_service_precedence(self): dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "env-pynamodb", span.service assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test" self.reset() # Manual overide dynamodb_backend.create_table("Test", hash_key_attr="content", hash_key_type="S") Pin.override(self.conn, service="override-pynamodb", tracer=self.tracer) list_result = self.conn.list_tables() span = self.get_spans()[0] assert span.service == "override-pynamodb", span.service assert len(list_result["TableNames"]) == 1 assert list_result["TableNames"][0] == "Test"
class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection(region=region, host=host) def delete_item(self, hash_key, range_key=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, attribute_updates=None, expected=None, conditional_operator=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, attribute_updates=attribute_updates, expected=expected, conditional_operator=conditional_operator, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item(self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def scan(self, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, attributes_to_get=attributes_to_get, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key) def query(self, hash_key, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, conditional_operator=None, select=None): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, key_conditions=key_conditions, query_filters=query_filters, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, conditional_operator=conditional_operator, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification)
class TableConnection(object): """ A higher level abstraction over botocore """ def __init__(self, table_name, region=None, host=None, aws_access_key_id=None, aws_secret_access_key=None): self._hash_keyname = None self._range_keyname = None self.table_name = table_name self.connection = Connection( region=region, host=host, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) def delete_item(self, hash_key, range_key=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the DeleteItem operation and returns the result """ return self.connection.delete_item( self.table_name, hash_key, range_key=range_key, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def update_item(self, hash_key, range_key=None, attribute_updates=None, expected=None, conditional_operator=None, return_consumed_capacity=None, return_item_collection_metrics=None, return_values=None): """ Performs the UpdateItem operation """ return self.connection.update_item( self.table_name, hash_key, range_key=range_key, attribute_updates=attribute_updates, expected=expected, conditional_operator=conditional_operator, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics, return_values=return_values) def put_item(self, hash_key, range_key=None, attributes=None, expected=None, conditional_operator=None, return_values=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the PutItem operation and returns the result """ return self.connection.put_item( self.table_name, hash_key, range_key=range_key, attributes=attributes, expected=expected, conditional_operator=conditional_operator, return_values=return_values, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_write_item(self, put_items=None, delete_items=None, return_consumed_capacity=None, return_item_collection_metrics=None): """ Performs the batch_write_item operation """ return self.connection.batch_write_item( self.table_name, put_items=put_items, delete_items=delete_items, return_consumed_capacity=return_consumed_capacity, return_item_collection_metrics=return_item_collection_metrics) def batch_get_item(self, keys, consistent_read=None, return_consumed_capacity=None, attributes_to_get=None): """ Performs the batch get item operation """ return self.connection.batch_get_item( self.table_name, keys, consistent_read=consistent_read, return_consumed_capacity=return_consumed_capacity, attributes_to_get=attributes_to_get) def get_item(self, hash_key, range_key=None, consistent_read=False, attributes_to_get=None): """ Performs the GetItem operation and returns the result """ return self.connection.get_item(self.table_name, hash_key, range_key=range_key, consistent_read=consistent_read, attributes_to_get=attributes_to_get) def scan(self, attributes_to_get=None, limit=None, conditional_operator=None, scan_filter=None, return_consumed_capacity=None, segment=None, total_segments=None, exclusive_start_key=None): """ Performs the scan operation """ return self.connection.scan( self.table_name, attributes_to_get=attributes_to_get, limit=limit, conditional_operator=conditional_operator, scan_filter=scan_filter, return_consumed_capacity=return_consumed_capacity, segment=segment, total_segments=total_segments, exclusive_start_key=exclusive_start_key) def query(self, hash_key, attributes_to_get=None, consistent_read=False, exclusive_start_key=None, index_name=None, key_conditions=None, query_filters=None, limit=None, return_consumed_capacity=None, scan_index_forward=None, conditional_operator=None, select=None): """ Performs the Query operation and returns the result """ return self.connection.query( self.table_name, hash_key, attributes_to_get=attributes_to_get, consistent_read=consistent_read, exclusive_start_key=exclusive_start_key, index_name=index_name, key_conditions=key_conditions, query_filters=query_filters, limit=limit, return_consumed_capacity=return_consumed_capacity, scan_index_forward=scan_index_forward, conditional_operator=conditional_operator, select=select) def describe_table(self): """ Performs the DescribeTable operation and returns the result """ return self.connection.describe_table(self.table_name) def delete_table(self): """ Performs the DeleteTable operation and returns the result """ return self.connection.delete_table(self.table_name) def update_table(self, read_capacity_units=None, write_capacity_units=None, global_secondary_index_updates=None): """ Performs the UpdateTable operation and returns the result Has limits with update. - Will not be able to create or delete an index. - May fail if too many operations are tried at the same time. TODO@rohan - Here the update operations do not account for the fact that dynamodb allows only one update per update operation. https://botocore.readthedocs.org/en/latest/reference/services/dynamodb.html#DynamoDB.Client.update_table http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html """ return self.connection.update_table( self.table_name, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_index_updates=global_secondary_index_updates) def add_field_index(self, fields=None, global_secondary_indexes=None): """ Will add fields and indexes that did not previously exist on the table. Only available for global secondary indexes. Fields or indexes or both can be added at the same time. """ return self.connection.add_field_index( self.table_name, fields=fields, global_secondary_indexes=global_secondary_indexes) def update_indexes(self, global_secondary_indexes=None): """ TODO@rohan """ return self.connection.update_indexes( self.table_name, global_secondary_indexes=global_secondary_indexes) def delete_indexes(self, global_secondary_indexes=None): """ TODO@rohan """ return self.connection.delete_indexes( self.table_name, global_secondary_indexes=global_secondary_indexes) def create_table(self, attribute_definitions=None, key_schema=None, read_capacity_units=None, write_capacity_units=None, global_secondary_indexes=None, local_secondary_indexes=None, stream_specification=None): """ Performs the CreateTable operation and returns the result """ return self.connection.create_table( self.table_name, attribute_definitions=attribute_definitions, key_schema=key_schema, read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, global_secondary_indexes=global_secondary_indexes, local_secondary_indexes=local_secondary_indexes, stream_specification=stream_specification)