def __refresh_new_target(self, resp): warning("Refresh Checking") Parser.get_next_target next_path = Parser.get_next_target(resp) self.__sid = Parser.get_sid(resp, self.__sid) if next_path: parse_res = urlparse(next_path) domain = parse_res.netloc query = parse_res.query if domain and self.__domain != domain: self.__domain = domain info(f"Find New Domain: {self.__domain}") Cursor.create_new_domain(self.__domain) return self.session.get(self.__main_url) if query: query = dict((item.split("=") for item in query.split("&"))) if "autim" in query: self.__autim = int(query["autim"]) info(f"autim: {self.__autim}") self.__make_links() next_url = urljoin(resp.url, next_path) warning(f"Refresh To: {next_url}") return self.__refresh_new_target(self.session.get(next_url)) return resp
def refresh_new_target(self, resp): warning("refresh checking") next_path = Parser.get_next_target(resp) self.sid = Parser.get_sid(resp, self.sid) if next_path: parse_res = urlparse(next_path) domain = parse_res.netloc query = parse_res.query if domain: if self.domain != domain: self.domain = domain success(f"new domain: {self.domain}") self.update_cookie_domain() Cursor.create_new_domain(self.domain) if query: query = dict((item.split("=") for item in query.split("&"))) self.make_links() next_url = urljoin(resp.url, next_path) warning(f"refresh to: {next_url}") return self.refresh_new_target(self.session.get(next_url)) return resp