def notify(self, event, msg="", key=self.getConfig('apikey')): if not key: return if self.core.isClientConnected() and not self.getConfig('ignoreclient'): return elapsed_time = time.time() - self.last_notify if elapsed_time < self.getConf("sendtimewait"): return if elapsed_time > 60: self.notifications = 0 elif self.notifications >= self.getConf("sendpermin"): return getURL("http://www.notifymyandroid.com/publicapi/notify", get={'apikey' : key, 'application': "pyLoad", 'event' : event, 'description': msg}) self.last_notify = time.time() self.notifications += 1
def notify(self, event, msg="", key=self.getConfig('apikey')): if not key: return if self.core.isClientConnected( ) and not self.getConfig('ignoreclient'): return elapsed_time = time.time() - self.last_notify if elapsed_time < self.getConf("sendtimewait"): return if elapsed_time > 60: self.notifications = 0 elif self.notifications >= self.getConf("sendpermin"): return getURL("http://www.notifymyandroid.com/publicapi/notify", get={ 'apikey': key, 'application': "pyLoad", 'event': event, 'description': msg }) self.last_notify = time.time() self.notifications += 1
def getInfo(urls): for url in urls: h = getURL(url, just_header=True) m = re.search(r'Location: (.+)\r\n', h) if m and not re.match(m.group(1), FilefactoryCom.__pattern): #: It's a direct link! Skipping yield (url, 0, 3, url) else: #: It's a standard html page yield parseFileInfo(FilefactoryCom, url, getURL(url))
def respond(ticket, value): conf = join(expanduser("~"), "ct.conf") f = open(conf, "rb") try: getURL("http://captchatrader.com/api/respond", post={"is_correct": value, "username": f.readline().strip(), "password": f.readline().strip(), "ticket": ticket}) except Exception, e : print "CT Exception:", e log(DEBUG, str(e))
def respond(ticket, value): conf = join(expanduser("~"), "ct.conf") f = open(conf, "rb") try: getURL("http://captchatrader.com/api/respond", post={ "is_correct": value, "username": f.readline().strip(), "password": f.readline().strip(), "ticket": ticket }) except Exception, e: print "CT Exception:", e log(DEBUG, str(e))
def getInfo(cls, url="", html=""): info = { 'name': urlparse(unquote(url)).path.split('/')[-1] or _("Unknown"), 'size': 0, 'status': 3 if url else 1, 'url': url } if url: info['pattern'] = re.match(cls.__pattern, url).groupdict() field = getURL("http://api.share-online.biz/linkcheck.php", get={ 'md5': "1" }, post={ 'links': info['pattern']['ID'] }, decode=True).split(";") if field[1] == "OK": info['fileid'] = field[0] info['status'] = 2 info['name'] = field[2] info['size'] = field[3] #: in bytes info['md5'] = field[4].strip().lower().replace("\n\n", "") #: md5 elif field[1] in ("DELETED", "NOT FOUND"): info['status'] = 1 return info
def respond(self, ticket, success): try: res = getURL( self.RESPOND_URL, post={"task_id": ticket, "key": self.getConfig("passkey"), "cv": 1 if success else 0} ) except BadHeader, e: self.logError(_("Could not send response"), e)
def getInfo(urls): result = [] regex = re.compile(DailymotionCom.__pattern) apiurl = "https://api.dailymotion.com/video/%s" request = {"fields": "access_error,status,title"} for url in urls: id = regex.match(url).group('ID') html = getURL(apiurl % id, get=request) info = json_loads(html) name = info['title'] + ".mp4" if "title" in info else url if "error" in info or info['access_error']: status = "offline" else: status = info['status'] if status in ("ready", "published"): status = "online" elif status in ("waiting", "processing"): status = "temp. offline" else: status = "offline" result.append((name, 0, statusMap[status], url)) return result
def getInfo(cls, url="", html=""): info = { "name": urlparse.urlparse(urllib.unquote(url)).path.split("/")[-1] or _("Unknown"), "size": 0, "status": 3 if url else 1, "url": url, } if url: info["pattern"] = re.match(cls.__pattern, url).groupdict() field = getURL( "http://api.share-online.biz/linkcheck.php", get={"md5": "1"}, post={"links": info["pattern"]["ID"]}, decode=True, ).split(";") if field[1] == "OK": info["fileid"] = field[0] info["status"] = 2 info["name"] = field[2] info["size"] = field[3] #: in bytes info["md5"] = field[4].strip().lower().replace("\n\n", "") #: md5 elif field[1] in ("DELETED", "NOT FOUND"): info["status"] = 1 return info
def getInfo(urls): for url in urls: html = getURL("http://www.fshare.vn/check_link.php", post={'action': "check_link", 'arrlinks': url}, decode=True) yield parseFileInfo(FshareVn, url, html)
def getInfo(cls, url="", html=""): info = cls.apiInfo(url) online = info['status'] == 2 try: info['pattern'] = re.match(cls.__pattern, url).groupdict() #: pattern groups will be saved here except Exception: info['pattern'] = {} if not html and not online: if not url: info['error'] = "missing url" info['status'] = 1 elif info['status'] is 3 and not getFileURL(None, url): try: html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING) if isinstance(cls.TEXT_ENCODING, basestring): html = unicode(html, cls.TEXT_ENCODING) except BadHeader, e: info['error'] = "%d: %s" % (e.code, e.content) if e.code is 404: info['status'] = 1 elif e.code is 503: info['status'] = 6
def _captchaResponse(self, task, correct): type = "correct" if correct else "refund" if 'ticket' not in task.data: self.logDebug("No CaptchaID for %s request (task: %s)" % (type, task)) return passkey = self.getConfig('passkey') for _i in xrange(3): res = getURL(self.API_URL, get={'action' : "usercaptchacorrectback", 'apikey' : passkey, 'api_key': passkey, 'correct': "1" if correct else "2", 'pyload' : "1", 'source' : "pyload", 'id' : task.data['ticket']}) self.logDebug("Request %s: %s" % (type, res)) if res == "OK": break time.sleep(5) else: self.logDebug("Could not send %s request: %s" % (type, res))
def apiInfo(cls, url="", get={}, post={}): info = super(UploadedTo, cls).apiInfo(url) for _i in xrange(5): html = getURL("http://uploaded.net/api/filemultiple", get={ "apikey": cls.API_KEY, 'id_0': re.match(cls.__pattern, url).group('ID') }, decode=True) if html != "can't find request": api = html.split(",", 4) if api[0] == "online": info.update({ 'name': api[4].strip(), 'size': api[2], 'status': 2 }) else: info['status'] = 1 break else: time.sleep(3) return info
def _captchaResponse(self, task, correct): type = "correct" if correct else "refund" if 'ticket' not in task.data: self.logDebug("No CaptchaID for %s request (task: %s)" % (type, task)) return passkey = self.getConfig('passkey') for _i in xrange(3): res = getURL(self.API_URL, get={ 'action': "usercaptchacorrectback", 'apikey': passkey, 'api_key': passkey, 'correct': "1" if correct else "2", 'pyload': "1", 'source': "pyload", 'id': task.data['ticket'] }) self.logDebug("Request %s: %s" % (type, res)) if res == "OK": break time.sleep(5) else: self.logDebug("Could not send %s request: %s" % (type, res))
def getInfo(cls, url="", html=""): info = cls.apiInfo(url) online = info['status'] == 2 try: info['pattern'] = re.match( cls.__pattern, url).groupdict() #: pattern groups will be saved here except Exception: info['pattern'] = {} if not html and not online: if not url: info['error'] = "missing url" info['status'] = 1 elif info['status'] is 3 and not getFileURL(None, url): try: html = getURL(url, cookies=cls.COOKIES, decode=not cls.TEXT_ENCODING) if isinstance(cls.TEXT_ENCODING, basestring): html = unicode(html, cls.TEXT_ENCODING) except BadHeader, e: info['error'] = "%d: %s" % (e.code, e.content) if e.code is 404: info['status'] = 1 elif e.code is 503: info['status'] = 6
def api_response(self, api, ticket): res = getURL("%s%s.aspx" % (self.API_URL, api), get={"username": self.getConfig('username'), "password": self.getConfig('passkey'), "captchaID": ticket}) if not res.startswith("OK"): raise CaptchaBrotherhoodException("Unknown response: %s" % res) return res
def server_response(self): try: return getURL(self.SERVER_URL, get={ 'v': self.core.api.getServerVersion() }).splitlines() except Exception: self.logWarning(_("Unable to retrieve server to get updates"))
def getCredits(self): res = getURL(self.API_URL, post={"key": self.getConfig('passkey'), "action": "balance"}) if res.isdigit(): self.logInfo(_("%s credits left") % res) self.info['credits'] = credits = int(res) return credits else: self.logError(res) return 0
def getCredits(self): res = getURL(self.API_URL + "askCredits.aspx", get={"username": self.getConfig('username'), "password": self.getConfig('passkey')}) if not res.startswith("OK"): raise CaptchaBrotherhoodException(res) else: credits = int(res[3:]) self.logInfo(_("%d credits left") % credits) self.info['credits'] = credits return credits
def captchaInvalid(self, task): if "ticket" in task.data: try: res = getURL(self.API_URL, post={'action': "refund", 'key': self.getConfig('passkey'), 'gen_task_id': task.data['ticket']}) self.logInfo(_("Request refund"), res) except BadHeader, e: self.logError(_("Could not send refund request"), e)
def getInfo(urls): for url in urls: html = getURL("http://www.fshare.vn/check_link.php", post={ 'action': "check_link", 'arrlinks': url }, decode=True) yield parseFileInfo(FshareVn, url, html)
def respond(self, ticket, success): try: res = getURL(self.RESPOND_URL, post={ "task_id": ticket, "key": self.getConfig('passkey'), "cv": 1 if success else 0 }) except BadHeader, e: self.logError(_("Could not send response"), e)
def captchaTask(self, task): if self.getConfig('captcha') and task.isTextual(): task.handler.append(self) task.setWaiting(60) html = getURL("http://www.freeimagehosting.net/upload.php", post={"attached": (pycurl.FORM_FILE, task.captchaFile)}, multipart=True) url = re.search(r"\[img\]([^\[]+)\[/img\]\[/url\]", html).group(1) self.response(_("New Captcha Request: %s") % url) self.response(_("Answer with 'c %s text on the captcha'") % task.id)
def getInfo(urls): result = [] for url in urls: html = getURL(url) if re.search(StreamCz.OFFLINE_PATTERN, html): # File offline result.append((url, 0, 1, url)) else: result.append((url, 0, 2, url)) yield result
def captchaInvalid(self, task): if task.data['service'] == self.__class__.__name__ and "ticket" in task.data: res = getURL(self.RESPOND_URL, post={'action': "SETBADIMAGE", 'username': self.getConfig('username'), 'password': self.getConfig('passkey'), 'imageid': task.data['ticket']}) if res == "SUCCESS": self.logInfo(_("Bad captcha solution received, requested refund")) else: self.logError(_("Bad captcha solution received, refund request failed"), res)
def captchaTask(self, task): if not task.isTextual() and not task.isPositional(): return if not self.getConfig('passkey'): return if self.core.isClientConnected() and not self.getConfig('force'): return credits = self.getCredits() if not credits: self.logError( _("Your captcha 9kw.eu account has not enough credits")) return queue = min(self.getConfig('queue'), 999) timeout = min(max(self.getConfig('timeout'), 300), 3999) pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1) for _i in xrange(5): servercheck = getURL("http://www.9kw.eu/grafik/servercheck.txt") if queue < re.search(r'queue=(\d+)', servercheck).group(1): break time.sleep(10) else: self.fail(_("Too many captchas in queue")) for opt in str(self.getConfig('hoster_options').split('|')): details = map(str.strip, opt.split(':')) if not details or details[0].lower() != pluginname.lower(): continue for d in details: hosteroption = d.split("=") if len(hosteroption) > 1 \ and hosteroption[0].lower() == 'timeout' \ and hosteroption[1].isdigit(): timeout = int(hosteroption[1]) break task.handler.append(self) task.setWaiting(timeout) self._processCaptcha(task)
def parseURLs(self, html=None, url=None): """Parses html content or any arbitaty text for links and returns result of `checkURLs` :param html: html source :return: """ urls = [] if html: urls += [x[0] for x in urlmatcher.findall(html)] if url: page = getURL(url) urls += [x[0] for x in urlmatcher.findall(page)] # remove duplicates return self.checkURLs(set(urls))
def checkHTML(self, html, url): """Parses html content or any arbitrary text for links and returns result of `checkURLs` :param html: html source :return: """ urls = [] if html: urls += [x[0] for x in urlmatcher.findall(html)] if url: page = getURL(url) urls += [x[0] for x in urlmatcher.findall(page)] return self.checkLinks(uniqify(urls))
def checkFile(plugin, urls): html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True) file_info = [] for li in re.finditer(plugin.LINKCHECK_TR, html, re.S): try: cols = re.findall(plugin.LINKCHECK_TD, li.group(1)) if cols: file_info.append( (cols[1] if cols[1] != '--' else cols[0], parseFileSize(cols[2]) if cols[2] != '--' else 0, 2 if cols[3].startswith('Available') else 1, cols[0])) except Exception, e: continue
def handle_free(self, pyfile): wst = self.account.infos['wst'] if self.account and 'wst' in self.account.infos else "" api_data = getURL('https://webshare.cz/api/file_link/', post={'ident': self.info['pattern']['ID'], 'wst': wst}, decode=True) self.logDebug("API data: " + api_data) m = re.search('<link>(.+)</link>', api_data) if m is None: self.error(_("Unable to detect direct link")) self.link = m.group(1)
def getCredits(self): res = getURL(self.API_URL, get={'apikey': self.getConfig('passkey'), 'pyload': "1", 'source': "pyload", 'action': "usercaptchaguthaben"}) if res.isdigit(): self.logInfo(_("%s credits left") % res) credits = self.info['credits'] = int(res) return credits else: self.logError(res) return 0
def captchaTask(self, task): if not task.isTextual() and not task.isPositional(): return if not self.getConfig('passkey'): return if self.core.isClientConnected() and not self.getConfig('force'): return credits = self.getCredits() if not credits: self.logError(_("Your captcha 9kw.eu account has not enough credits")) return queue = min(self.getConfig('queue'), 999) timeout = min(max(self.getConfig('timeout'), 300), 3999) pluginname = re.search(r'_([^_]*)_\d+.\w+', task.captchaFile).group(1) for _i in xrange(5): servercheck = getURL("http://www.9kw.eu/grafik/servercheck.txt") if queue < re.search(r'queue=(\d+)', servercheck).group(1): break time.sleep(10) else: self.fail(_("Too many captchas in queue")) for opt in str(self.getConfig('hoster_options').split('|')): details = map(str.strip, opt.split(':')) if not details or details[0].lower() != pluginname.lower(): continue for d in details: hosteroption = d.split("=") if len(hosteroption) > 1 \ and hosteroption[0].lower() == 'timeout' \ and hosteroption[1].isdigit(): timeout = int(hosteroption[1]) break task.handler.append(self) task.setWaiting(timeout) self._processCaptcha(task)
def handleFree(self, pyfile): wst = self.account.infos['wst'] if self.account and 'wst' in self.account.infos else "" api_data = getURL('https://webshare.cz/api/file_link/', post={'ident': self.info['pattern']['ID'], 'wst': wst}, decode=True) self.logDebug("API data: " + api_data) m = re.search('<link>(.+)</link>', api_data) if m is None: self.error(_("Unable to detect direct link")) self.link = m.group(1)
def checkFile(plugin, urls): html = getURL(plugin.URLS[1], post={"urls": "\n".join(urls)}, decode=True) file_info = [] for li in re.finditer(plugin.LINKCHECK_TR, html, re.S): try: cols = re.findall(plugin.LINKCHECK_TD, li.group(1)) if cols: file_info.append(( cols[1] if cols[1] != '--' else cols[0], parseFileSize(cols[2]) if cols[2] != '--' else 0, 2 if cols[3].startswith('Available') else 1, cols[0])) except Exception, e: continue
def getCredits(self): res = getURL(self.GETCREDITS_URL, post={'action': "REQUESTBALANCE", 'username': self.getConfig('username'), 'password': self.getConfig('passkey')}) if res.startswith('ERROR'): raise ImageTyperzException(res) try: balance = float(res) except Exception: raise ImageTyperzException("Invalid response") self.logInfo(_("Account balance: $%s left") % res) return balance
def getCredits(self): res = getURL(self.API_URL, get={ 'apikey': self.getConfig('passkey'), 'pyload': "1", 'source': "pyload", 'action': "usercaptchaguthaben" }) if res.isdigit(): self.logInfo(_("%s credits left") % res) credits = self.info['credits'] = int(res) return credits else: self.logError(res) return 0
def getIP(self): """retrieve current ip""" services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"), ("http://checkip.dyndns.org/", ".*Current IP Address: (\S+)</body>.*")] ip = "" for i in range(10): try: sv = choice(services) ip = getURL(sv[0]) ip = re.match(sv[1], ip).group(1) break except: ip = "" sleep(1) return ip
def getIP(self): """retrieve current ip""" services = [("http://automation.whatismyip.com/n09230945.asp", "(\S+)"), ("http://checkip.dyndns.org/", ".*Current IP Address: (\S+)</body>.*")] ip = "" for i in range(10): try: sv = choice(services) ip = getURL(sv[0]) ip = re.match(sv[1], ip).group(1) break except Exception: ip = "" sleep(1) return ip
def getInfo(urls): ## returns list of tupels (name, size (in bytes), status (see database.File), url) apiurl = "http://api.netload.in/info.php" id_regex = re.compile(NetloadIn.__pattern) urls_per_query = 80 for chunk in chunks(urls, urls_per_query): ids = "" for url in chunk: match = id_regex.search(url) if match: ids = ids + match.group('ID') + ";" api = getURL(apiurl, get={'auth' : "Zf9SnQh9WiReEsb18akjvQGqT0I830e8", 'bz' : 1, 'md5' : 1, 'file_id': ids}, decode=True) if api is None or len(api) < 10: self.logDebug("Prefetch failed") return if api.find("unknown_auth") >= 0: self.logDebug("Outdated auth code") return result = [] for i, r in enumerate(api.splitlines()): try: tmp = r.split(";") try: size = int(tmp[2]) except Exception: size = 0 result.append((tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] )) except Exception: self.logDebug("Error while processing response: %s" % r) yield result
def captchaInvalid(self, task): if task.data[ 'service'] == self.__class__.__name__ and "ticket" in task.data: res = getURL(self.RESPOND_URL, post={ 'action': "SETBADIMAGE", 'username': self.getConfig('username'), 'password': self.getConfig('passkey'), 'imageid': task.data['ticket'] }) if res == "SUCCESS": self.logInfo( _("Bad captcha solution received, requested refund")) else: self.logError( _("Bad captcha solution received, refund request failed"), res)
def getInfo(cls, url="", html=""): info = super(WebshareCz, cls).getInfo(url, html) if url: info['pattern'] = re.match(cls.__pattern, url).groupdict() api_data = getURL("https://webshare.cz/api/file_info/", post={'ident': info['pattern']['ID']}, decode=True) if 'File not found' in api_data: info['status'] = 1 else: info['status'] = 2 info['name'] = re.search('<name>(.+)</name>', api_data).group(1) or info['name'] info['size'] = re.search('<size>(.+)</size>', api_data).group(1) or info['size'] return info
def getCredits(self): res = getURL(self.GETCREDITS_URL, post={ 'action': "REQUESTBALANCE", 'username': self.getConfig('username'), 'password': self.getConfig('passkey') }) if res.startswith('ERROR'): raise ImageTyperzException(res) try: balance = float(res) except Exception: raise ImageTyperzException("Invalid response") self.logInfo(_("Account balance: $%s left") % res) return balance
def getInfo(cls, url="", html=""): info = super(WebshareCz, cls).getInfo(url, html) if url: info['pattern'] = re.match(cls.__pattern, url).groupdict() api_data = getURL("https://webshare.cz/api/file_info/", post={'ident': info['pattern']['ID']}, decode=True) if 'File not found' in api_data: info['status'] = 1 else: info["status"] = 2 info['name'] = re.search('<name>(.+)</name>', api_data).group(1) or info['name'] info['size'] = re.search('<size>(.+)</size>', api_data).group(1) or info['size'] return info
def process(self, pyfile): name = re.search(self.NAME_PATTERN, pyfile.url).group(1) pyfile.name = urllib.unquote_plus(name) session = re.search(self.SESSION_PATTERN, pyfile.url).group(1) url = "http://flyfiles.net" # get download URL parsed_url = getURL(url, post={"getDownLink": session}) self.logDebug("Parsed URL: %s" % parsed_url) if parsed_url == '#downlink|' or parsed_url == "#downlink|#": self.logWarning(_("Could not get the download URL. Please wait 10 minutes")) self.wait(10 * 60, True) self.retry() self.link = parsed_url.replace('#downlink|', '')
def process(self, pyfile): name = re.search(self.NAME_PATTERN, pyfile.url).group(1) pyfile.name = unquote_plus(name) session = re.search(self.SESSION_PATTERN, pyfile.url).group(1) url = "http://flyfiles.net" # get download URL parsed_url = getURL(url, post={"getDownLink": session}) self.logDebug("Parsed URL: %s" % parsed_url) if parsed_url == '#downlink|' or parsed_url == "#downlink|#": self.logWarning(_("Could not get the download URL. Please wait 10 minutes")) self.wait(10 * 60, True) self.retry() self.link = parsed_url.replace('#downlink|', '')
def apiInfo(cls, url="", get={}, post={}): info = super(UploadedTo, cls).apiInfo(url) for _i in xrange(5): html = getURL("http://uploaded.net/api/filemultiple", get={"apikey": cls.API_KEY, 'id_0': re.match(cls.__pattern, url).group('ID')}, decode=True) if html != "can't find request": api = html.split(",", 4) if api[0] == "online": info.update({'name': api[4].strip(), 'size': api[2], 'status': 2}) else: info['status'] = 1 break else: time.sleep(3) return info
def getInfo(urls): result = [] for chunk in chunks(urls, 10): for url in chunk: html = getURL(url) if r'<div class="errorMessage mb10">' in html: result.append((url, 0, 1, url)) elif r'Page cannot be displayed' in html: result.append((url, 0, 1, url)) else: try: url_pattern = '<a href="(.+?)" onclick="return Act\(this\, \'dlink\'\, event\)">(.+?)</a>' file_name = re.search(url_pattern, html).group(0).split(', event)">')[1].split('</a>')[0] result.append((file_name, 0, 2, url)) except Exception: pass # status 1=OFFLINE, 2=OK, 3=UNKNOWN # result.append((#name,#size,#status,#url)) yield result
def getRtUpdate(self): rtUpdate = self.getStorage("rtUpdate") if not rtUpdate: if self.getStorage("version") != self.__version \ or int(self.getStorage("timestamp", 0)) + 86400000 < timestamp(): # that's right, we are even using jdownloader updates rtUpdate = getURL("http://update0.jdownloader.org/pluginstuff/tbupdate.js") rtUpdate = self.decrypt(rtUpdate.splitlines()[1]) # but we still need to fix the syntax to work with other engines than rhino rtUpdate = re.sub(r'for each\(var (\w+) in(\[[^\]]+\])\)\{', r'zza=\2;for(var zzi=0;zzi<zza.length;zzi++){\1=zza[zzi];', rtUpdate) rtUpdate = re.sub(r"for\((\w+)=", r"for(var \1=", rtUpdate) self.setStorage("rtUpdate", rtUpdate) self.setStorage("timestamp", timestamp()) self.setStorage("version", self.__version) else: self.logError(_("Unable to download, wait for update...")) self.tempOffline() return rtUpdate