def out_of_traffic(self): #: Check if user logged in m = re.search(self.USER_CREDIT_PATTERN, self.data) if m is None: self.account.relogin() self.data = self.load(self.pyfile.url) m = re.search(self.USER_CREDIT_PATTERN, self.data) if m is None: return True #: Check user credit try: credit = parse.bytesize(m.group(1).replace(" ", ""), m.group(2)) self.log_info( self._("Premium download for {} KiB of Credit").format( self.pyfile.size >> 10)) self.log_info( self._("User {} has {} KiB left").format( self.account.user, credit >> 10)) if credit < self.pyfile.size: self.log_info( self._("Not enough credit to download file: {}").format( self.pyfile.name)) return True except Exception as exc: #: let's continue and see what happens... self.log_error(exc, exc_info=self.pyload.debug > 1, stack_info=self.pyload.debug > 2) return False
def handle_premium(self, pyfile): json_data = self.load( "https://leech360.com/generate", get={"token": self.account.info["data"]["token"], "link": pyfile.url}, ) api_data = json.loads(json_data) if api_data["error"]: self.fail(api_data["error_message"]) pyfile.name = api_data.get("filename", "") or pyfile.name pyfile.size = parse.bytesize(api_data.get("message", "0")) self.link = api_data["download_url"]
def _get_info(self, url): html = get_url(self.URLS[1], post={"urls": url}) file_info = [] for li in re.finditer(self.LINKCHECK_TR, html, re.S): try: cols = re.findall(self.LINKCHECK_TD, li.group(1)) if cols: file_info.append(( cols[1] if cols[1] != "--" else cols[0], parse.bytesize(cols[2]) if cols[2] != "--" else 0, 2 if cols[3].startswith("Available") else 1, cols[0], )) except Exception: continue return file_info
def handle_premium(self, pyfile): json_data = self.api_response("link/unlock", link=pyfile.url, token=self.account.info["data"]["token"]) if json_data.get("error", False): if json_data.get("errorCode", 0) in (12, 31): self.offline() else: self.log_warning(json_data["error"]) self.temp_offline() else: pyfile.name = json_data["infos"]["filename"] pyfile.size = parse.bytesize(json_data["infos"]["filesize"]) self.link = json_data["infos"]["link"]
def handle_premium(self, pyfile): data = self.account.get_data() page = self.load( "https://api.over-load.me/getdownload.php", get={"auth": data["password"], "link": pyfile.url}, ) data = json.loads(page) self.log_debug(data) if data["error"] == 1: self.log_warning(data["msg"]) self.temp_offline() else: self.link = data["downloadlink"] if pyfile.name and pyfile.name.endswith(".tmp") and data["filename"]: pyfile.name = data["filename"] pyfile.size = parse.bytesize(data["filesize"])
def wait_for_server_dl(self, torrent_id): """ Show progress while the server does the download """ api_data = self.api_response("torrents/STATUS", tid=torrent_id) if api_data['status'] != "OK": self.fail(api_data['error']) if api_data['return']['status'] == "ERROR": self.fail(api_data['return']['error']) self.pyfile.name = api_data['return']['name'] self.pyfile.set_custom_status("torrent") self.pyfile.set_progress(0) if api_data['return']['status'] != "FINISHED": api_data = self.api_response("torrents/START", tid=torrent_id) if api_data['status'] != "OK": if api_data[ 'error'] == "Magnet URI processing in progress. Please wait.": for _i in range(8): self.sleep(3) api_data = self.api_response("torrents/START", tid=torrent_id) if api_data['status'] == "OK": break else: self.fail(api_data['error']) elif api_data['error'] != "Already started.": self.fail(api_data['error']) while True: api_data = self.api_response("torrents/STATUS", tid=torrent_id) if api_data['status'] != "OK": self.fail(api_data['error']) if api_data['return']['status'] == "ERROR": self.fail(api_data['return']['error']) torrent_size = api_data['return'].get('getSize') if torrent_size is not None and self.pyfile.size == 0: self.pyfile.size = parse.bytesize(torrent_size) progress = int(api_data['return']['percentDone']) self.pyfile.set_progress(progress) if api_data['return']['status'] == "FINISHED": break self.sleep(2) self.pyfile.set_progress(100) self.sleep(1) self.pyfile.set_custom_status("makezip") self.pyfile.set_progress(0) while True: api_data = self.api_response("torrents/GENZIP", torrentid=torrent_id) if api_data['status'] == "ERROR": self.fail(api_data['error']) elif api_data['status'] == "PENDING": self.sleep(2) else: break self.pyfile.set_progress(100) return api_data['return']
def get_info(cls, url="", html=""): info = super(SimpleDownloader, cls).get_info(url) info.update(cls.api_info(url)) if not html and info["status"] != 2: if not url: info["error"] = "missing url" info["status"] = 1 elif info["status"] in (3, 7): try: html = get_url(url, cookies=cls.COOKIES, decode=cls.TEXT_ENCODING) except BadHeader as exc: info["error"] = "{}: {}".format(exc.code, exc.content) except Exception: pass if html: if cls.OFFLINE_PATTERN and re.search(cls.OFFLINE_PATTERN, html) is not None: info["status"] = 1 elif (cls.TEMP_OFFLINE_PATTERN and re.search(cls.TEMP_OFFLINE_PATTERN, html) is not None): info["status"] = 6 else: for pattern in ( "INFO_PATTERN", "NAME_PATTERN", "SIZE_PATTERN", "HASHSUM_PATTERN", ): try: attr = getattr(cls, pattern) pdict = re.search(attr, html).groupdict() if all(True for k in pdict if k not in info["pattern"]): info["pattern"].update(pdict) except Exception: continue else: info["status"] = 2 if "N" in info["pattern"]: name = replace_patterns(info["pattern"]["N"], cls.NAME_REPLACEMENTS) info["name"] = parse_name(name) if "S" in info["pattern"]: size = replace_patterns( info["pattern"]["S"] + info["pattern"]["U"] if "U" in info["pattern"] else info["pattern"]["S"], cls.SIZE_REPLACEMENTS, ) info["size"] = parse.bytesize(size) elif isinstance(info["size"], str): unit = info["units"] if "units" in info else "" info["size"] = parse.bytesize(info["size"], unit) if "H" in info["pattern"]: hash_type = info["pattern"]["H"].strip("-").upper() info["hash"][hash_type] = info["pattern"]["D"] return info
def parse_traffic(self, size, unit=None): #: returns bytes self.log_debug(f"Size: {size}", f"Unit: {unit or 'N/D'}") return parse.bytesize(size, unit or "byte")
def parse_traffic(self, size, unit=None): # NOTE: Returns kilobytes only in 0.5.0 self.log_debug(f"Size: {size}", "Unit: {unit or 'N/D'}") # TODO: Remove `>> 10` in 0.6.x return parse.bytesize(size, unit or "byte") >> 10