def get_many(cls, keys, attributes_to_get=None): """ Returns a list of :class:`PersistentObject` identical in length to the list of keys provided. If a key could not be found, it's slot will be `None` This operation performs `BatchGetItem` on the DynamoDB store. This method is typically limited to 100 items. Depending on your configured capacity, this can easily outstrip it. This method will retry in a loop until all the keys you asked for are satisfied. `keys` is not limited to 100 items. :type keys: list :param keys: A list of keys """ cls._load_meta() keys = map(cls.prepare_key, keys) t1 = time.time() # get the items items, unprocessed, consumed_capacity = cls._fetch_batch_queue( cls._get_batch_queue(keys)) # if there are unprocessed items, create a batch from them if len(unprocessed): unprocessed_queue = cls._get_batch_queue(unprocessed) else: unprocessed_queue = [] # and continue fetching unprocessed items until there are no more while len(unprocessed_queue): new_items, new_unprocessed, new_consumed = cls._fetch_batch_queue( unprocessed_queue) consumed_capacity += new_consumed items.extend(new_items) if len(new_unprocessed): unprocessed_queue = cls._get_batch_queue(new_unprocessed) else: unprocessed_queue = [] # create a hash out of the values' keys for quick reordering h = dict( (item[cls._hash_key_name], idx) for idx, item in enumerate(items)) ret = [] for key in keys: if key in h: ret.append(cls(Item(cls._table, key, None, items[h[key]]))) else: ret.append(None) logger.info( 'Got %i of %s in %s ConsumedCapacityUnits=%f' % (len(items), cls.__name__, time.time() - t1, consumed_capacity)) return ret
def new_item(self, hash_key, range_key=None, attrs=None): """ Return an new, unsaved Item which can later be PUT to Amazon DynamoDB. """ return Item(self, hash_key, range_key, attrs)