async def _download_file(uri: str, out_path: str): async with aiohttp.ClientSession() as session: async with session.get(uri) as response: check_http_status(response.status, uri) # TODO: Perform chunked writing.; with open(out_path, "wb+") as out: out.write(await response.read())
async def _collect_posts_thread(board: str, thread: str): async with aiohttp.ClientSession() as session: uri = _thread_url(thread) async with session.get(uri) as response: check_http_status(response.status, uri) for post in await response.json(): _tidy_post_fields(post) yield post
async def _collect_threads(board: str): uri = _catalog_url(board) async with aiohttp.ClientSession() as session: async with session.get(uri) as response: check_http_status(response.status, uri) for page in await response.json(): if "threads" not in page: continue for thread in _threads_from_page(page): yield thread
async def collect_posts(target: str): uri = API_BASE + "/posts.json" params = {"tags": target} async with aiohttp.ClientSession() as session: for i in itertools.count(): params["page"] = i async with session.get(uri, params=params) as response: check_http_status(response.status, uri) posts = await response.json() # Empty page - stop searching. if len(posts) == 0: break for post in posts: _tidy_post_fields(post) yield post