def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 links = provider.parse_json(data) if not links.has_key("error"): for link in links["torrent_results"]: name = link["filename"] # name magnet = link["download"] # magnet if filters.verify(name, None): cont += 1 results.append( { "name": name.strip(), "uri": magnet, "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, } ) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info(">>>>>>" + str(cont) + " torrents sent to Quasar<<<<<<<") return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.select('ul.clearfix li') for link in links: a = link.div.select('a')[1] name = a.text # name page = settings.value["url_address"] + a["href"] # page size = link.find('div', class_="coll-4").text # size seeds = link.find('div', class_="coll-2").text # seeds peers = link.find('div', class_="coll-3").text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): magnet = common.getlinks(page) # magnet cont += 1 results.append({"name": name.strip(), "uri": magnet, # "info_hash": info_magnet.hash, "size": size.strip(), "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.findAll('div', {'class': ['ligne0', 'ligne1']}) for link in links: name = link.a.text # name magnet = "%s/telechargement/%s" % (settings.value["url_address"], link.a["href"].rpartition('/')[2].replace(".html",".torrent")) # magnet size = link.find('div', class_="poid").text # size seeds = link.find('div', class_="up").span.text # seeds peers = link.find('div', class_="down").text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 results.append({"name": name.strip(), "uri": magnet, #"info_hash": info_magnet.hash, "size": size.strip(), "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "fr"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 links = provider.parse_json(data) if not links.has_key("error"): for link in links["torrent_results"]: name = link["filename"] # name magnet = link["download"] # magnet if filters.verify(name, None): cont += 1 results.append({ "name": name.strip(), "uri": magnet, "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.find("table", class_="fichas-listado") if links is not None: links = links.tbody.findAll('tr') for link in links: columns = link.findAll('td') if len(columns) == 5: a = columns[0].findAll('a', class_='nombre') name = "" for item in a: name = item.text.strip() # name a = columns[0].findAll('a', class_='icono-bajar') magnet = "" for item in a: magnet = settings.value["url_address"] + item['href'] size = None seeds = columns[2].text # seeds peers = columns[3].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(magnet) results.append({ "name": name, "uri": magnet, # "info_hash": info_magnet.hash, # "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "es"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get( "max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') try: links = soup.table.tbody.findAll('tr') except: links = [] for link in links: columns = link.findAll('td') if len(columns) == 4: name = columns[1].div.text.strip() # name magnet = columns[1].select('div + a')[0]["href"] # magnet size = columns[1].font.text.split(',')[1].replace( 'Size', '').replace(' ', ' ') # size size = common.Filtering.normalize(size).strip() seeds = columns[2].text # seeds peers = columns[3].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(magnet) results.append({ "name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 provider.log.info(data) if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.table.tbody.find_all('tr', {'class': ['odd', 'even']}) for link in links: columns = link.select('td') if len(columns) == 6: name = columns[0].find( 'a', class_='cellMainLink').text.strip() # name magnet = columns[0].find( 'a', {'title': 'Torrent magnet link'})['href'] # magnet size = columns[1].text.strip() # size seeds = columns[4].text # seeds peers = columns[5].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(page) # magnet results.append({ "name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent # if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets if cont >= 100: # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): sint = common.ignore_exception(ValueError)(int) results = [] threads = [] q = Queue.Queue() cont = 0 if data is not None: filters.information() # print filters settings links = data['torrents'] for link in links: name = link['name'] # name magnet = '%s/torrents/download/%s' % ( settings.value["url_address"], link['id']) # magnet fsize = int(link['size']) / 1000000 # size if fsize > 1000: size = "%0.2f Go" % (fsize / 1000.00) else: size = "%0.2f Mo" % fsize seeds = link['seeders'] # seeds peers = link['leechers'] # peers if filters.verify(name, size): cont += 1 result = { "name": name.strip(), "uri": magnet, "size": size.strip(), "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "fr"), "provider": settings.name, "icon": settings.icon, } thread = Thread(target=torrent2magnet, args=(result, q)) thread.start() threads.append(thread) if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) for thread in threads: thread.join() while not q.empty(): results.append(q.get()) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.select("ul.buscar-list li a") for link in links: if link.h2 is not None: name = link.h2.text.strip() # Name url = link["href"] pos = url.find('/', len(settings.value["url_address"])) url = settings.value["url_address"] + "/descarga-torrent" + url[pos:] browser.open(url) soup = BeautifulSoup(browser.content) links_magnets = soup.select("div#tab1 a.btn-torrent") if len(links_magnets) > 0: magnet = links_magnets[0].get("href", "") # magnet links_size = soup.select("span.imp") if len(links_size) > 1: size = links_size[1].text.replace("Size: ", "") # size else: size = None seeds = 0 # seeds peers = 0 # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(magnet) results.append({"name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, # "seeds": sint(seeds), # "peers": sint(peers), "language": settings.value.get("language", "es"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def new_extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: jsondata = json.loads(data) # provider.log.info(jsondata) for link in jsondata['list']: if link is not None: name = link['title'].encode('utf-8').strip() provider.log.info(name) magnet = 'magnet:?xt=urn:btih:' + link['hash'] size = str(link['size']) seeds = sint(link['seeds']) peers = sint(link['leechs']) # info_magnet = common.Magnet(magnet) # if filters.verify(name, size): if name is not None: cont += 1 # magnet = common.getlinks(page) # magnet results.append({ "name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, # "seeds": sint(seeds), # "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent # if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets if cont >= 100: # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') try: links = soup.table.tbody.findAll('tr') except: links = [] for link in links: columns = link.findAll('td') if len(columns) == 4: name = columns[1].div.text.strip() # name magnet = columns[1].select('div + a')[0]["href"] # magnet size = columns[1].font.text.split(',')[1].replace('Size', '').replace(' ', ' ') # size size = common.Filtering.normalize(size).strip() seeds = columns[2].text # seeds peers = columns[3].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(magnet) results.append({"name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.findAll('div', {'class': ['ligne0', 'ligne1']}) for link in links: name = link.a.text # name magnet = "%s/telechargement/%s" % ( settings.value["url_address"], link.a["href"].rpartition('/')[2].replace( ".html", ".torrent")) # magnet size = link.find('div', class_="poid").text # size seeds = link.find('div', class_="up").span.text # seeds peers = link.find('div', class_="down").text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 results.append({ "name": name.strip(), "uri": magnet, #"info_hash": info_magnet.hash, "size": size.strip(), "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "fr"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.find('table', class_="tl").findAll('tr') for link in links: columns = link.findAll('td') if len(columns) == 7: name = columns[2].text.strip() # name magnet = settings.value["url_address"] + columns[2].a["href"] # magnet size = columns[3].text.strip() # size seeds = columns[4].text # seeds peers = columns[5].text # peers size = common.Filtering.normalize(size) # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 magnet = common.getlinks(magnet) results.append({"name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): print data filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.find("ul", class_="peliculas-box").findAll('li') for link in links: if link.a is not None: name = ' '.join(link.a.text.split()).replace('Espa', ' Espa').strip() magnet = link.a['href'].replace('descargar/', 'torrent/') size = None seeds = 0 # seeds peers = 0 # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): # magnet = common.getlinks(magnet) cont += 1 results.append({"name": name, "uri": magnet, # "info_hash": info_magnet.hash, # "size": size, # "seeds": sint(seeds), # "peers": sint(peers), "language": settings.value.get("language", "es"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): filters.information() # print filters settings sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: soup = BeautifulSoup(data, 'html5lib') links = soup.table.tbody.find_all('tr', {'class': ['odd', 'even']}) for link in links: columns = link.select('td') if len(columns) == 6: name = columns[0].find('a', class_='cellMainLink').text.strip() # name magnet = columns[0].find('a', {'title': 'Torrent magnet link'})['href'] # magnet size = columns[1].text.strip() # size seeds = columns[4].text # seeds peers = columns[5].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 # magnet = common.getlinks(page) # magnet results.append({"name": name, "uri": magnet, # "info_hash": info_magnet.hash, "size": size, "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "en"), "provider": settings.name, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results
def extract_torrents(data): sint = common.ignore_exception(ValueError)(int) results = [] cont = 0 if data is not None: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.findAll('tr', class_=['odd', 'odd2']) for link in links: columns = link.findAll('td') if len(columns) == 7: name = columns[1].text # name info_hash = columns[3].input['value'] magnet = 'http://torcache.net/torrent/%s.torrent' % info_hash # magnet size = columns[2].text # size seeds = columns[5].text # seeds peers = columns[5].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 results.append({"name": name.strip(), "uri": magnet, "info_hash": info_hash, "size": size.strip(), "seeds": sint(seeds), "peers": sint(peers), "language": settings.value.get("language", "it"), "provider": settings.cleanName, "icon": settings.icon, }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results