def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data).replace('<td data-href="magnet:?', '') lname = re.findall('<td data-href="/torrent/(.*?)/(.*?)"', data) # list the size size = re.findall('<td class="size_td">(.*?)</td>', data) # list the size seeds = re.findall('<td class="seed_td">(.*?)</td>', data) # list the seeds peers = re.findall('<td class="leech_td">(.*?)</td>', data) # list the seeds cont = 0 results = [] for cm, magnet in enumerate(re.findall(r'magnet:\?[^\'"\s<>\[\]]+', data)): info_magnet = common.Magnet(magnet) name = size[cm] + ' - ' + lname[cm][1].replace('-', ' ') + ' - ' + settings.name_provider #find name in the torrent if filters.verify(name,size[cm]): results.append({"name": name, "uri": magnet, "info_hash": info_magnet.hash, "size": common.size_int(size[cm]), "seeds": int(seeds[cm]), "peers": int(peers[cm]), "language": settings.language }) # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)
def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data) size = re.findall('class="nobr center">(.*?)B', data) # list the size #seeds = re.findall('green center">(.*?)<', data) # list the seeds #peers = re.findall('red lasttd center">(.*?)<', data) # list the peers cont = 0 results = [] for cm, magnet in enumerate( re.findall(r'magnet:\?[^\'"\s<>\[\]]+', data)): info_magnet = common.Magnet(magnet) size[cm] = size[cm].replace('<span>', '') name = size[ cm] + 'B' + ' - ' + info_magnet.name + ' - ' + settings.name_provider if filters.verify(name, size[cm]): results.append({ "name": name, "uri": magnet, "info_hash": info_magnet.hash }) # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)
def search_episode(info): title= ' S%02dE%02d' % (info['season'], info['episode']) if settings.time_noti > 0 : provider.notify(message='Searching: ' + info['title'].encode("utf-8").title() + title +'...', header=None, time=settings.time_noti, image=settings.icon) url_search = "%s/show/%s" % (settings.url ,info['imdb_id']) provider.log.info(url_search) response = provider.GET(url_search) results=[] if str(response.data)!='': filters.use_TV() filters.information() items = provider.parse_json(response.data) for episode in items['episodes']: if (episode['episode']==info['episode'] and episode['season']==info['season']): for resolution in episode['torrents']: resASCII =resolution.encode('utf-8') name = resASCII + ' - ' + items['title'] + ' - ' + episode['title'].encode('utf-8') + ' - ' + 'S%02dE%02d'% (info['season'], info['episode']) if filters.included(resASCII, filters.quality_allow) and not filters.included(resASCII, filters.quality_deny): res_val=values3[resASCII] magnet = episode['torrents'][resolution]['url'] if magnet[:4].lower()=='http': magnet = common.TorrentToMagnet(magnet,items['title'],info['season'],info['episode']) if magnet is not None: info_magnet = common.Magnet(magnet) results.append({'name': name + ' - ' + settings.name_provider, 'uri': magnet}) else: provider.log.warning(name + ' ***Blocked File by Keyword, Name or Size***') return results
def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data) size = re.findall('Size:</span>(.*?)<span class="opt-desc">(.*?)<', data, re.S) # list the size cont = 0 results = [] for cm, magnet in enumerate( re.findall(r'magnet:\?[^\'"\s<>\[\]]+', data)): info = common.Magnet(magnet) name = size[cm][ 1] + ' - ' + info.name + ' - ' + settings.name_provider if filters.verify(name, size[cm][1]): results.append({ "name": name, "uri": magnet, "info_hash": info.hash }) # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)
def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data) size = re.findall('Size (.*?)B', data) # list the size seedsPeers = re.findall('<td align="right">(.*?)</td>', data) # list the size seeds = seedsPeers[0:][::2] peers = seedsPeers[1:][::2] cont = 0 results = [] for cm, magnet in enumerate( re.findall(r'magnet:\?[^\'"\s<>\[\]]+', data)): info = common.Magnet(magnet) name = size[cm].replace( ' ', ' ') + 'B' + ' - ' + info.name + ' - ' + settings.name_provider if filters.verify(name, size[cm].replace(' ', ' ')): results.append({ "name": name, "uri": magnet, "info_hash": info.hash, "size": common.size_int(size[cm].replace(' ', ' ')), "seeds": int(seeds[cm]), "peers": int(peers[cm]), "language": settings.language }) # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)
def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data) title = [] lmagnet = [] size = [] seeds = [] peers = [] for row in re.findall('<tr(.*?)>(.*?)</tr>', data, re.S): # get each row in the table columns = re.findall('<td(.*?)>(.*?)</td>', row[1], re.S) # get each column for the row if len(columns) > 0: size.append(columns[3][1]) seeds.append(columns[4][1]) peers.append(columns[5][1]) aref = re.findall('<a(.*?)href="(.*?)"(.*?)>(.*?)<', columns[1][1]) # get the aref title.append(aref[0][3]) lmagnet.append(aref[2][1]) cont = 0 results = [] for cm, magnet in enumerate(lmagnet): info_magnet = common.Magnet(magnet) name = size[cm] + ' - ' + title[cm] + ' - ' + settings.name_provider if filters.verify(name, size[cm]): results.append({ "name": name, "uri": magnet, "info_hash": info_magnet.hash }) # # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)
def extract_torrents(data): try: filters.information() # print filters settings data = common.clean_html(data) rows = re.findall( 'fa fa-download(.*?)</td>(.*?)</td>(.*?)</td>(.*?)</td>(.*?)</td>(.*?)</td>(.*?)</td>(.*?)</tr>', data, re.S) size = [ s[2].replace('\n <td>', '') for s in rows ] #seeds = [s[5].replace('\n <td>', '') for s in rows] #peers = [s[6].replace('\n <td>', '') for s in rows] lname = re.findall('torrent-filename">(.*?)>(.*?)<', data, re.S) # list the name cont = 0 results = [] for cm, magnet in enumerate( re.findall(r'magnet:\?[^\'"\s<>\[\]]+', data)): info_magnet = common.Magnet(magnet) name = size[cm] + 'B - ' + unquote_plus(lname[cm][1]).replace( '.', ' ').title() + ' - ' + settings.name_provider if filters.verify(name, size[cm]): results.append({ "name": name, "uri": magnet, "info_hash": info_magnet.hash }) # return le torrent cont += 1 else: provider.log.warning(filters.reason) if cont == settings.max_magnets: # limit magnets break provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Pulsar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon)