def _get_detail(self, detail_page_url: str) -> AnimeDetailInfo: """引擎管理器负责调用, 捕获异常""" try: return self.get_detail(detail_page_url) except Exception as e: logger.exception(e) return AnimeDetailInfo()
async def _parse(self, play_url: str) -> DanmakuDetail: try: await self.init_session() return await self.parse(play_url) except Exception as e: logger.exception(e) return DanmakuDetail()
def _get_detail(self, play_page_url: str) -> DanmakuCollection: """引擎管理器负责调用, 捕获异常""" try: return self.get_detail(play_page_url) except Exception as e: logger.exception(e) return DanmakuCollection()
def _get_danmaku(self, cid: str) -> Dict: """引擎管理器负责调用, 捕获异常""" try: return self.get_danmaku(cid) except Exception as e: logger.exception(e) return {}
def _search(self, keyword: str) -> Iterator[DanmakuMetaInfo]: """引擎管理器负责调用, 捕获异常""" try: yield from self.search(keyword) except Exception as e: logger.exception(e) return
def post(self) -> Json: try: options = self.get_opts() return jsonify(fetch_articles(**self.schema.dump(options))) except Exception as err: logger.exception(err) return self._get_message(err)
async def _parse(self, cid: str) -> DanmakuData: """引擎管理器负责调用, 捕获异常""" try: await self.init_session() return await self.parse(cid) except Exception as e: logger.exception(e) return DanmakuData()
def translate(txt, src, target): try: return translator.translate(txt, source=src, target=target, return_all=False) except Exception as err: logger.exception(err)
def _make_request(self, params): resp = requests.post(self.ENDPOINT, json=params) try: return resp.json() except Exception as err: logger.exception(err) logger.debug(resp.text) raise err
async def _search(self, keyword: str) -> AsyncIterator[DanmakuMeta]: """引擎管理器负责调用, 捕获异常""" try: await self.init_session() async for item in self.search(keyword): yield item except Exception as e: logger.exception(e) return
def xml_xpath(xml_text: Any, xpath: str) -> Optional[etree.Element]: """支持 XPath 方便处理 Xml""" if not xml_text: return None try: return etree.XML(xml_text).xpath(xpath) except Exception as e: logger.exception(e) return None
async def _parse(self, detail_url: str) -> AnimeDetail: """本方法由引擎管理器负责调用, 创建 session, 捕获异常并记录""" try: await self._before_init() await self.init_session() return await self.parse(detail_url) except Exception as e: logger.exception(e) return AnimeDetail()
def xpath(html: str, xpath: str) -> Optional[etree.Element]: """支持 XPath 方便处理网页""" if not html: return None try: return etree.HTML(html).xpath(xpath) except Exception as e: logger.exception(e) return None
def get_opts(self): json_data = request.get_json(force=True) if not json_data: raise EmptyJSONException("No data provided") try: return self.schema.load(json_data) except Exception as err: logger.exception(err) raise MalformedJSONException("Wrong parameters passed")
async def _search(self, keyword: str) -> AsyncIterator[AnimeMeta]: """本方法由引擎管理器负责调用, 创建 session, 捕获异常并记录""" try: await self._before_init() await self.init_session() async for item in self.search(keyword): yield item except Exception as e: logger.exception(e) return
def post(self) -> Json: try: options_dict = self.schema.dump(self.get_opts()) input_text = options_dict.pop("input_text") return jsonify( TextAnalyser(**options_dict).fit(input_text).to_dict()) except Exception as err: logger.exception(err) return self._get_message(err)
def post(url: str, data=None, html_encoding="utf-8", **kwargs) -> requests.Response: """"封装 POST 方法, 默认网页编码为 utf-8""" try: logger.debug(f"url: {url}, data: {data}") kwargs.setdefault("timeout", 5) kwargs.setdefault("headers", HtmlParseHelper._headers) ret = requests.post(url, data, verify=False, **kwargs) ret.encoding = html_encoding return ret except requests.RequestException as e: logger.exception(e) return requests.Response()
def get(url: str, params=None, html_encoding="utf-8", **kwargs) -> requests.Response: """封装 GET 方法, 默认网页编码为 utf-8""" try: logger.debug(f"url: {url}, params: {params}") kwargs.setdefault("timeout", 5) kwargs.setdefault("headers", HtmlParseHelper._headers) ret = requests.get(url, params, verify=False, **kwargs) ret.encoding = html_encoding # 有些网页仍然使用 gb2312/gb18030 之类的编码, 需要单独设置 return ret except requests.RequestException as e: logger.exception(e) return requests.Response()
async def _parse(self, raw_url: str) -> AnimeInfo: """解析直链, 捕获引擎模块未处理的异常""" try: await self._before_init() await self.init_session() info = await self.parse(raw_url) if not isinstance(info, AnimeInfo): info = AnimeInfo(info) # 方便 parse 直接返回字符串链接 await info.detect_more_info() if info.is_available(): # 解析成功 logger.info(f"Parse success: {info}") logger.info(f"Real url: {info.real_url}") return info logger.error(f"Parse failed: {info}") return AnimeInfo() except Exception as e: logger.exception(e) return AnimeInfo()
def head(url: str, params=None, allow_redirects=True, **kwargs) -> requests.Response: """封装 HEAD 方法, 默认开启 302 重定向, 用于获取目标直链""" try: logger.debug( f"url: {url}, params: {params}, allow_redirects: {allow_redirects}" ) kwargs.setdefault("timeout", 5) kwargs.setdefault("headers", HtmlParseHelper._headers) return requests.head(url, params=params, verify=False, allow_redirects=allow_redirects, **kwargs) except requests.RequestException as e: logger.exception(e) return requests.Response()
def wrapper(*args, **kwargs) -> Any: try: return func(*args, **kwargs) except Exception as e: logger.exception(e) raise exc(str(e))