def test_timeout_limit_context(): now = time.time() @asyncio.coroutine def test(): with TimeoutLimit(timeout=0.2): yield from coro(delay=1) with pytest.raises(asyncio.TimeoutError): run(test()) assert time.time() - now >= 0.2
def test_timeout_coroutine_object(): now = time.time() with pytest.raises(asyncio.TimeoutError): @asyncio.coroutine def _run(): task = timeout(coro(delay=1), timeout=0.2) return (yield from task) run(_run()) assert time.time() - now >= 0.2
def test_pipe_async_generator(): class AsyncGenerator(object): def __init__(self, values=None): self.pos = 0 self.values = values or [1, 2, 3] @asyncio.coroutine def __aiter__(self): self.pos = 0 return self @asyncio.coroutine def __anext__(self): if self.pos == len(self.values): raise StopAsyncIteration # noqa value = self.values[self.pos] self.pos += 1 return value @asyncio.coroutine def task(numbers): return (yield from (AsyncGenerator(numbers) | paco.map(mapper) | paco.reduce(reducer, initializer=0))) result = paco.run(task([1, 2, 3, 4, 5])) assert result == 30
def test_pipe_operator_overload(): @asyncio.coroutine def task(numbers): return (yield from (numbers | paco.filter(filterer) | paco.map(mapper) | paco.dropwhile(drop) | paco.reduce(reducer, initializer=0))) result = paco.run(task((1, 2, 3, 4, 5, 6, 7, 8, 9, 10))) assert result == 36
except Exception as e: return 0, str(e) async def request_post(url, data, timeout=1, num_retry=3, **kwargs): i = num_retry err_msg = [] while i > 0: st, res = await __request_post(url, data, timeout=timeout, **kwargs) if not st: i -= 1 err_msg.append(res) continue return 1, res else: return 0, err_msg async def main(): st, ct = await request_get('http://www.baidu.com/') print(st) print(ct) if __name__ == '__main__': import paco paco.run(main())
import paco import aiohttp async def fetch(url): async with aiohttp.ClientSession() as session: async with session.get(url) as res: return res async def fetch_urls(): urls = [ 'https://www.google.com', 'https://www.yahoo.com', 'https://www.bing.com', 'https://www.baidu.com', 'https://duckduckgo.com', ] # Map concurrent executor with concurrent limit of 3 responses = await paco.map(fetch, urls, limit=3) for res in responses: print('Status:', res.status) # Run in event loop paco.run(fetch_urls())
# Define HTTP mocks to simulate failed scenarios pook.get('server.com').times(4).reply(503) pook.get('server.com').times(1).reply(200).json({'hello': 'world'}) # Retry evaluator function used to determine if the operated failed or not async def evaluator(status): if status != 200: return Exception('failed request with status {}'.format(status)) return False # On retry even subcriptor async def on_retry(err, next_try): print('Operation error: {}'.format(err)) print('Next try in {}ms'.format(next_try)) # Register retriable operation with custom evaluator @retry(evaluator=evaluator, on_retry=on_retry) async def fetch(url): async with aiohttp.ClientSession() as session: async with session.get(url) as response: return response.status # Run request status = paco.run(fetch('http://server.com')) print('Response status:', status)
box.user.id, 'profile_url': author_url_tmpl.format(box.user.id), 'profile_avatar': box.user.image, 'profile_name': box.user.nickname, "post_time": maya.parse(box.time, timezone="Asia/Shanghai").epoch, } task.parse_result = item if item['comments'] == 2 or item['comments'] == 1: write_data(item) logger.info(f'{item=}') return item def write_data(item: dict): with open('data.json', 'a+') as f: f.write(json.dumps(item) + '\n') if __name__ == '__main__': task = Task() fetch = Fetcher() paco.run(fetch.run(task))
def test_run_loop(): loop = asyncio.get_event_loop() assert run(coro(2), loop=loop) == 4
def test_run(): assert run(coro(2)) == 4
def fetch_urls(urls): return paco.run(_fetch_urls(urls))
import paco async def filterer(x): return x < 8 async def mapper(x): return x * 2 async def drop(x): return x < 10 async def reducer(acc, x): return acc + x async def task(numbers): return await (numbers | paco.filter(filterer) | paco.map(mapper) | paco.dropwhile(drop) | paco.reduce(reducer, initializer=0)) # noqa # Run in event loop number = paco.run(task((1, 2, 3, 4, 5, 6, 7, 8, 9, 10))) print('Number:', number) # => 36
from crawler_tmpl.task import Task from crawler_tmpl.douyin_fetcher import DouyinFetcher import paco profile_url = 'https://www.iesdouyin.com/share/user/3496035207873751' if __name__ == "__main__": task = Task(profile_url) douyin_fetcher = DouyinFetcher() paco.run(douyin_fetcher.run(task))
from crawler_tmpl.task import Task from crawler_tmpl.weibo_fetcher import WeiboFetcher import paco # profile_url = 'https://m.weibo.cn/api/container/getIndex?jumpfrom=weibocom&type=uid&value={uid}' profile_url = 'https://m.weibo.cn/api/container/getIndex?jumpfrom=weibocom&type=uid&value=7283431349' if __name__ == "__main__": task = Task(profile_url) wfetcher = WeiboFetcher() paco.run(wfetcher.run(task))