def __init__(self, *args, **kwargs): """Create a new DynamoDB model This supports both: >>> DynamoModel(table, "hash_key", "range_key", attrs) >>> DynamoModel("hash_key", "range_key") as well as using keyword args: >>> DynamoModel(table=table, hash_key="hash_key", range_key="range_key") >>> DynamoModel(hash_key="hash_key", range_key="range_key") >>> DynamoModel(table, hash_key="hash_key", range_key="range_key") This could be coming from Layer2, or it could be just called directly by us.""" if len(args) > 0: first_arg = args[0] else: first_arg = kwargs.get("table") if first_arg and isinstance(first_arg, Table): # If the first argment is a Table, then # we assume this came from Layer2, so just pass this # on up the chain return Item.__init__(self, *args, **kwargs) else: # Otherwise, we're probably being called directly # so we should auto-set the table return Item.__init__(self, self.get_table(), *args, **kwargs)
def __setitem__(self, key, value): """Overwrite the setter to automatically convert types to DynamoDB supported types""" from datetime import datetime if isinstance(value, datetime): value = value.strftime("%Y-%m-%dT%H:%M:%S") elif isinstance(value, list): value = set(value) return Item.__setitem__(self, key, value)
def delete(self, *args, **kwargs): '''Intercept the delete function to also remove this record from CloudSearch if it is indexed''' if self._cs_document_endpoint: from boto.cloudsearch.document import DocumentServiceConnection conn = DocumentServiceConnection(endpoint=self._cs_document_endpoint) doc_id = self.id doc_id = b32encode(doc_id).lower().replace('=', '_') conn.delete(doc_id, int(time.time())) conn.commit() return Item.delete(self, *args, **kwargs)
def get_many(cls, keys, attributes_to_get=None): """ Returns a list of :class:`PersistentObject` identical in length to the list of keys provided. If a key could not be found, it's slot will be `None` This operation performs `BatchGetItem` on the DynamoDB store. This method is typically limited to 100 items. Depending on your configured capacity, this can easily outstrip it. This method will retry in a loop until all the keys you asked for are satisfied. `keys` is not limited to 100 items. :type keys: list :param keys: A list of keys """ cls._load_meta() keys = map(cls.prepare_key, keys) t1 = time.time() # get the items items, unprocessed, consumed_capacity = cls._fetch_batch_queue( cls._get_batch_queue(keys)) # if there are unprocessed items, create a batch from them if len(unprocessed): unprocessed_queue = cls._get_batch_queue(unprocessed) else: unprocessed_queue = [] # and continue fetching unprocessed items until there are no more while len(unprocessed_queue): new_items, new_unprocessed, new_consumed = cls._fetch_batch_queue( unprocessed_queue) consumed_capacity += new_consumed items.extend(new_items) if len(new_unprocessed): unprocessed_queue = cls._get_batch_queue(new_unprocessed) else: unprocessed_queue = [] # create a hash out of the values' keys for quick reordering h = dict( (item[cls._hash_key_name], idx) for idx, item in enumerate(items)) ret = [] for key in keys: if key in h: ret.append(cls(Item(cls._table, key, None, items[h[key]]))) else: ret.append(None) logger.info( 'Got %i of %s in %s ConsumedCapacityUnits=%f' % (len(items), cls.__name__, time.time() - t1, consumed_capacity)) return ret
def get_item(self, table, hash_key, range_key=None, attributes_to_get=None, consistent_read=False): """ Retrieve an existing item from the table. :type table: :class:`boto.dynamodb.table.Table` :param table: The Table object from which the item is retrieved. :type hash_key: int|long|float|str|unicode :param hash_key: The HashKey of the requested item. The type of the value must match the type defined in the schema for the table. :type range_key: int|long|float|str|unicode :param range_key: The optional RangeKey of the requested item. The type of the value must match the type defined in the schema for the table. :type attributes_to_get: list :param attributes_to_get: A list of attribute names. If supplied, only the specified attribute names will be returned. Otherwise, all attributes will be returned. :type consistent_read: bool :param consistent_read: If True, a consistent read request is issued. Otherwise, an eventually consistent request is issued. """ key = self.build_key_from_values(table.schema, hash_key, range_key) response = self.layer1.get_item(table.name, key, attributes_to_get, consistent_read, object_hook=item_object_hook) item = Item(table, hash_key, range_key, response['Item']) if 'ConsumedCapacityUnits' in response: item.consumed_units = response['ConsumedCapacityUnits'] return item
def new_item(self, hash_key, range_key=None, attrs=None): """ Return an new, unsaved Item which can later be PUT to Amazon DynamoDB. """ return Item(self, hash_key, range_key, attrs)
def save(self, *args, **kwargs): self.on_save_or_update() Item.save(self, *args, **kwargs) self.after_save_or_update()
def __setitem__(self, key, value): '''Overwrite the setter to automatically convert types to DynamoDB supported types''' return Item.__setitem__(self, key, self.convert(key, value))
def save(self, *args, **kwargs): self.on_save_or_update() if self._cs_document_endpoint: self.save_to_cloudsearch() Item.save(self, *args, **kwargs) self.after_save_or_update()
def __setattr__(self, name, val): prop = self.find_property(name) if prop: self[name] = val else: Item.__setattr__(self, name, val)