async def test_result_key_iters(s3_client, bucket_name, create_object): for i in range(5): key_name = 'key/%s/%s' % (i, i) await create_object(key_name) key_name2 = 'key/%s' % i await create_object(key_name2) paginator = s3_client.get_paginator('list_objects') generator = paginator.paginate(MaxKeys=2, Prefix='key/', Delimiter='/', Bucket=bucket_name) iterators = generator.result_key_iters() response = defaultdict(list) key_names = [i.result_key for i in iterators] # adapt to aioitertools ideas iterators = [itr.__aiter__() for itr in iterators] async for vals in aioitertools.zip_longest(*iterators): pass for k, val in zip(key_names, vals): response.setdefault(k.expression, []) response[k.expression].append(val) assert 'Contents' in response assert 'CommonPrefixes' in response
async def test_zip_longest_range(self): a = range(3) b = range(5) it = ait.zip_longest(a, b) for k in [(0, 0), (1, 1), (2, 2), (None, 3), (None, 4)]: self.assertEqual(await ait.next(it), k) with self.assertRaises(StopAsyncIteration): await ait.next(it)
async def chunker(iterable, chunk_size: int): """Asynchronous chunks generator """ aiterable = aioitertools.enumerate(iterable) args = [aiterable] * chunk_size async for chunk in aioitertools.zip_longest(*args, fillvalue=None): chunk = tuple(filter(None, chunk)) if chunk: chunk = tuple( [v for _, v in sorted(chunk, key=operator.itemgetter(0))]) yield chunk
async def test_zip_longest_exception(self): async def gen(): yield 1 yield 2 raise Exception("fake error") a = gen() b = ait.repeat(5) it = ait.zip_longest(a, b) for k in [(1, 5), (2, 5)]: self.assertEqual(await ait.next(it), k) with self.assertRaisesRegex(Exception, "fake error"): await ait.next(it)
async def test_zip_longest_fillvalue(self): async def gen(): yield 1 yield 4 yield 9 yield 16 a = gen() b = range(5) it = ait.zip_longest(a, b, fillvalue=42) for k in [(1, 0), (4, 1), (9, 2), (16, 3), (42, 4)]: self.assertEqual(await ait.next(it), k) with self.assertRaises(StopAsyncIteration): await ait.next(it)