def getInfo(urls): for chunk in chunks(urls, 80): result = [] api = getAPIData(chunk) for data in api.itervalues(): if data[0] == "online": result.append((html_unescape(data[2]), data[1], 2, data[4])) elif data[0] == "offline": result.append((data[4], 0, 1, data[4])) yield result
def getInfo(urls): api_url_base = "http://api.hotfile.com/" for chunk in chunks(urls, 90): api_param_file = {"action":"checklinks","links": ",".join(chunk),"fields":"id,status,name,size"} #api only supports old style links src = getURL(api_url_base, post=api_param_file) result = [] for i, res in enumerate(src.split("\n")): if not res: continue fields = res.split(",") if fields[1] in ("1", "2"): status = 2 else: status = 1 result.append((fields[2], int(fields[3]), status, chunk[i])) yield result
def getInfo(urls): api_url_base = "http://api.hotfile.com/" for chunk in chunks(urls, 90): api_param_file = {"action": "checklinks", "links": ",".join(chunk), "fields": "id,status,name,size"} #api only supports old style links src = getURL(api_url_base, post=api_param_file, decode=True) result = [] for i, res in enumerate(src.split("\n")): if not res: continue fields = res.split(",") if fields[1] in ("1", "2"): status = 2 else: status = 1 result.append((fields[2], int(fields[3]), status, chunk[i])) yield result
def getInfo(urls): ## returns list of tuples (name, size (in bytes), status (see FileDatabase), url) apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id=" id_regex = re.compile(NetloadIn.__pattern__) urls_per_query = 80 for chunk in chunks(urls, urls_per_query): ids = "" for url in chunk: match = id_regex.search(url) if match: ids = ids + match.group(1) +";" api = getURL(apiurl+ids, decode = True) if api is None or len(api) < 10: print "Netload prefetch: failed " return if api.find("unknown_auth") >= 0: print "Netload prefetch: Outdated auth code " return result = [] for i, r in enumerate(api.splitlines()): try: tmp = r.split(";") try: size = int(tmp[2]) except: size = 0 result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i] ) ) except: print "Netload prefetch: Error while processing response: " print r yield result
def getInfo(urls): ## returns list of tupels (name, size (in bytes), status (see FileDatabase), url) apiurl = "http://api.netload.in/info.php?auth=Zf9SnQh9WiReEsb18akjvQGqT0I830e8&bz=1&md5=1&file_id=" id_regex = re.compile(NetloadIn.__pattern__) urls_per_query = 80 for chunk in chunks(urls, urls_per_query): ids = "" for url in chunk: match = id_regex.search(url) if match: ids = ids + match.group(1) + ";" api = getURL(apiurl + ids, decode=True) if api is None or len(api) < 10: print "Netload prefetch: failed " return if api.find("unknown_auth") >= 0: print "Netload prefetch: Outdated auth code " return result = [] for i, r in enumerate(api.splitlines()): try: tmp = r.split(";") try: size = int(tmp[2]) except: size = 0 result.append( (tmp[1], size, 2 if tmp[3] == "online" else 1, chunk[i])) except: print "Netload prefetch: Error while processing response: " print r yield result