def iterate_batch_get_item(connection, table, *keys): """ Make as many :class:`.BatchGetItem` actions as needed to iterate over all specified items. Including processing :attr:`.BatchGetItemResponse.unprocessed_keys`. .. Warning, this is NOT doctest. Because doctests aren't stable because items order changes. :: >>> for item in iterate_batch_get_item(connection, table, {"h": 0}, {"h": 1}, {"h": 2}): ... print item {u'h': 1, u'gr': 0, u'gh': 0} {u'h': 2, u'gr': 0, u'gh': 0} {u'h': 0, u'gr': 0, u'gh': 0} Note that items are returned in an unspecified order. """ keys = list(keys) unprocessed_keys = [] while len(keys) != 0: r = connection(_lv.BatchGetItem().table(table).keys(keys[:100])) keys = keys[100:] if isinstance(r.unprocessed_keys, dict) and table in r.unprocessed_keys and "Keys" in r.unprocessed_keys[table]: unprocessed_keys.extend(r.unprocessed_keys[table]["Keys"]) for item in r.responses.get(table, []): yield item while len(unprocessed_keys) != 0: r = connection(_lv.BatchGetItem().previous_unprocessed_keys({table: {"Keys": unprocessed_keys[:100]}})) unprocessed_keys = unprocessed_keys[100:] if isinstance(r.unprocessed_keys, dict) and table in r.unprocessed_keys and "Keys" in r.unprocessed_keys[table]: unprocessed_keys.extend(r.unprocessed_keys[table]["Keys"]) for item in r.responses.get(table, []): yield item
def test_simple_batch_get(self): self.connection(_lv.BatchWriteItem().table("Aaa").put( { "h": "1", "a": "xxx" }, { "h": "2", "a": "yyy" }, { "h": "3", "a": "zzz" }, )) r = self.connection(_lv.BatchGetItem().table("Aaa").keys({"h": "1"}, {"h": "2"}, {"h": "3"})) self.assertEqual(list(r.responses.keys()), ["Aaa"]) self.assertEqual(sorted(r.responses["Aaa"], key=lambda i: i["h"]), [{ "h": "1", "a": "xxx" }, { "h": "2", "a": "yyy" }, { "h": "3", "a": "zzz" }])
def test_get_unexisting_keys(self): self.connection(_lv.BatchWriteItem().table("Aaa").put( { "h": "1", "a": "xxx" }, { "h": "2", "a": "yyy" }, )) r = self.connection(_lv.BatchGetItem().table("Aaa").keys({"h": "1"}, {"h": "2"}, {"h": "3"})) self.assertEqual(sorted(r.responses["Aaa"], key=lambda i: i["h"]), [{ "h": "1", "a": "xxx" }, { "h": "2", "a": "yyy" }]) self.assertEqual(r.unprocessed_keys, {})
def test_return_consumed_capacity_total(self): r = self.connection(_lv.BatchGetItem().table(self.table).keys( self.tab_key).return_consumed_capacity_total()) self.assertEqual(r.consumed_capacity[0].capacity_units, 0.5) self.assertEqual(r.consumed_capacity[0].global_secondary_indexes, None) self.assertEqual(r.consumed_capacity[0].local_secondary_indexes, None) self.assertEqual(r.consumed_capacity[0].table, None) self.assertEqual(r.consumed_capacity[0].table_name, self.table)
def test_get_without_unprocessed_keys(self): _lv.batch_put_item(self.connection, "Aaa", [{ "h": str(i) } for i in range(100)]) r = self.connection(_lv.BatchGetItem().table("Aaa").keys( {"h": str(i)} for i in range(100))) self.assertEqual(r.unprocessed_keys, {}) self.assertEqual(len(r.responses["Aaa"]), 100)
def test_get_with_unprocessed_keys(self): _lv.batch_put_item( self.connection, "Aaa", [{ "h": str(i), "xs": "x" * 300000 } for i in range(100)] ) # 300kB items ensure a single BatchGetItem will return at most 55 items r1 = self.connection(_lv.BatchGetItem().table("Aaa").keys( {"h": str(i)} for i in range(100))) self.assertEqual(len(r1.unprocessed_keys["Aaa"]["Keys"]), 45) self.assertEqual(len(r1.responses["Aaa"]), 55)
def test_batch_get_with_projections(self): self.connection(_lv.BatchWriteItem().table("Aaa").put( { "h": "1", "a": "a1", "b": "b1", "c": "c1" }, { "h": "2", "a": "a2", "b": "b2", "c": "c2" }, { "h": "3", "a": "a3", "b": "b3", "c": "c3" }, )) r = self.connection(_lv.BatchGetItem().table("Aaa").keys({ "h": "1" }, { "h": "2" }, { "h": "3" }).expression_attribute_name("p", "b").project("h").project("a", ["#p"])) self.assertEqual(sorted(r.responses["Aaa"], key=lambda i: i["h"]), [{ "h": "1", "a": "a1", "b": "b1" }, { "h": "2", "a": "a2", "b": "b2" }, { "h": "3", "a": "a3", "b": "b3" }])