Exemple #1
0
 def _fetch_batch_queue(cls, batch_queue, attributes_to_get=None):
     results = []
     unprocessed = []
     consumed_capacity = 0.0
     while len(batch_queue):
         batch_keys = batch_queue.pop()
         if not len(batch_keys):
             continue
         batch = BatchList(Configure.get_connection())
         batch.add_batch(cls._table,
                         [cls._hash_key_proto(k) for k in batch_keys],
                         attributes_to_get=attributes_to_get)
         try:
             batch_ret = batch.submit()
         except DynamoDBKeyNotFoundError:
             continue
         # import pprint
         # pprint.pprint(batch_ret)
         if ('UnprocessedKeys' in batch_ret
                 and cls._full_table_name in batch_ret['UnprocessedKeys']):
             u = batch_ret['UnprocessedKeys'][cls._full_table_name]
             u = [k['HashKeyElement'] for k in u['Keys']]
             unprocessed.extend(u)
         if ('Responses' in batch_ret
                 and cls._full_table_name in batch_ret['Responses']):
             tbl = batch_ret['Responses'][cls._full_table_name]
             results.extend(tbl['Items'])
             consumed_capacity += tbl['ConsumedCapacityUnits']
     return results, unprocessed, consumed_capacity
Exemple #2
0
 def _fetch_batch_queue(cls, batch_queue, attributes_to_get=None):
     results = []
     unprocessed = []
     consumed_capacity = 0.0
     while len(batch_queue):
         batch_keys = batch_queue.pop()
         if not len(batch_keys):
             continue
         batch = BatchList(Configure.get_connection())
         batch.add_batch(cls._table, [cls._hash_key_proto(k) 
                                      for k in batch_keys],
                         attributes_to_get=attributes_to_get)
         try:
             batch_ret = batch.submit()
         except DynamoDBKeyNotFoundError:
             continue
         # import pprint
         # pprint.pprint(batch_ret)
         if ('UnprocessedKeys' in batch_ret and cls._full_table_name 
                 in batch_ret['UnprocessedKeys']):
             u = batch_ret['UnprocessedKeys'][cls._full_table_name]
             u = [k['HashKeyElement'] for k in u['Keys']]
             unprocessed.extend(u)
         if ('Responses' in batch_ret and cls._full_table_name 
                 in batch_ret['Responses']):
             tbl = batch_ret['Responses'][cls._full_table_name]
             results.extend(tbl['Items'])
             consumed_capacity += tbl['ConsumedCapacityUnits']
     return results, unprocessed, consumed_capacity
Exemple #3
0
    def __iter__(self):
        while self.keys:
            # Build the next batch
            batch = BatchList(self.table.layer2)
            batch.add_batch(self.table, self.keys[:100], self.attributes_to_get)
            res = batch.submit()

            # parse the results
            if not self.table.name in res[u'Responses']:
                continue
            self.consumed_units += res[u'Responses'][self.table.name][u'ConsumedCapacityUnits']
            for elem in res[u'Responses'][self.table.name][u'Items']:
                yield elem

            # re-queue un processed keys
            self.keys = self.keys[100:]
            self._queue_unprocessed(res)
Exemple #4
0
    def __iter__(self):
        while self.keys:
            # Build the next batch
            batch = BatchList(self.table.layer2)
            batch.add_batch(self.table, self.keys[:100],
                            self.attributes_to_get)
            res = batch.submit()

            # parse the results
            if not self.table.name in res['Responses']:
                continue
            self.consumed_units += res['Responses'][self.table.name]['ConsumedCapacityUnits']
            for elem in res['Responses'][self.table.name]['Items']:
                yield elem

            # re-queue un processed keys
            self.keys = self.keys[100:]
            self._queue_unprocessed(res)