def get_comicbook_from_cache(site, comicid=None): comicbook = ComicBook(site=site, comicid=comicid) proxy_config = current_app.config.get(ConfigKey.CRAWLER_PROXY, {}) proxy = proxy_config.get(site) if proxy: SessionMgr.set_proxy(site=site, proxy=proxy) cookies_path = get_cookies_path(site=site) if os.path.exists(cookies_path): SessionMgr.load_cookies(site=site, path=cookies_path) return comicbook
def _test_crawl_comicbook(site, comicid=None, chapter_number=1, proxy=None, test_search=True): comicbook = ComicBook(site=site, comicid=comicid) if proxy: SessionMgr.set_proxy(site=site, proxy=proxy) comicbook.start_crawler() chapter = comicbook.Chapter(chapter_number=chapter_number) assert len(chapter.image_urls) > 0 logger.info(chapter.to_dict()) logger.info(comicbook.to_dict()) if test_search: result = comicbook.search(name=comicbook.crawler.DEFAULT_SEARCH_NAME) assert len(result.to_dict()) > 0 return comicbook, chapter
def get_comicbook_from_cache(site, comicid=None): comicbook = ComicBook(site=site, comicid=comicid) return comicbook
def get_comicbook_from_cache(site, comicid=None): if site in const.NOT_SUPPORT_SITES: raise SiteNotSupport() comicbook = ComicBook(site=site, comicid=comicid) return comicbook