def call(method='', params=None): global USER_CREDENTIALS, USER_CREDENTIALS_RETRY provider.log.info("Call T411 API: %s%s" % (_API_, method)) if method != '/auth': req = provider.GET( '%s%s' % (_API_, method), headers={'Authorization': USER_CREDENTIALS['token']}) else: req = provider.POST('%s%s' % (_API_, method), data=provider.urlencode(params)) try: if req.getcode() == 200: resp = req.json() # provider.log.debug('Resp T411 API %s' % resp) if 'error' in resp: provider.notify( message=resp['error'].encode('utf-8', 'ignore'), header="Quasar [COLOR FF18F6F3]t411[/COLOR] Provider", time=3000, image=_ICON_) if (resp['code'] == 202 or resp['code'] == 201 ) and method != '/auth' and USER_CREDENTIALS_RETRY > 0: # Force re auth USER_CREDENTIALS_RETRY -= 1 provider.log.info('Force re auth T411 API') if _auth(_USERNAME_, _PASSWORD_): return call(method, params) if 'torrents' in resp: return resp['torrents'] return resp else: provider.log.error(req) except Exception as e: provider.log.error('Resp ERROR API %s' % e) return []
def search_movie(info): query = info['title'].encode('utf-8') filters.title = query filters.use_movie() if settings.time_noti > 0: provider.notify(message='Searching: ' + info['title'].title().encode("utf-8") + '...', header=None, time=settings.time_noti, image=settings.icon) urlSearch = settings.url provider.log.info(urlSearch) # new code response = browser.get(urlSearch) soup = bs4.BeautifulSoup(response.text) itemToken = soup.select("div#mobile-search-input input") token = itemToken[0]["value"] # hidden token # Read provider.log.info(urlSearch) payload = { "keyword": query, "_token": token, "quality": "all", "genre": "all", "rating": "0", "order_by": "seeds", } provider.log.info(payload) response = browser.post(urlSearch + "/search-movies", data=payload) return extract_magnets(response.text)
def extract_torrents(data): try: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.find("table", id="torrents") cont = 0 results = [] if links is not None: links = links.tbody.findAll('tr') for link in links: try: columns = link.findAll('td') if len(columns) == 6: name = columns[1].text # name magnet = columns[0].a["href"] # torrent size = columns[3].text # size seeds = columns[4].text # seeds peers = columns[5].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(filters.title, size): cont += 1 results.append({ "name": name.strip(), "uri": magnet, # "info_hash": info_magnet.hash, "size": size.strip(), "seeds": int(seeds), "peers": int(peers), "language": settings.value.get("language", "en"), "provider": settings.name }) # return the torrent if cont >= int( settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) except: continue provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon) return []
def search_general(info): info["extra"] = settings.value.get("extra", "") # add the extra information query = filters.type_filtering(info, '+') # check type filter and set-up filters.title url_search = "%s/search/%s/0/99/200" % (settings.value["url_address"], query) provider.log.info(url_search) if browser.open(url_search): results = extract_torrents(browser.content) else: provider.log.error('>>>>>>>%s<<<<<<<' % browser.status) provider.notify(message=browser.status, header=None, time=5000, image=settings.icon) results = [] return results
def search(query, cat_id=CAT_MOVIE, terms=None, episode = False, season = False): provider.notify(message=str(query).replace('+',' ').title(), header="Quasar AlexP's [COLOR FF18F6F3]t411[/COLOR] Provider" , time=3000, image=icon) result = [] threads = [] q = Queue.Queue() provider.log.debug("QUERY : %s" % query) query = query.replace('+','%20') response = call('/torrents/search/%s&?limit=15&cid=%s%s' % (query, cat_id, terms)) if episode or season: # search for animation series too response_tv_anim = response = call('/torrents/search/%s&?limit=15&cid=%s%s' % (query, CAT_TV_ANIME, terms)) response['torrents'] = response['torrents'] + response_tv_anim['torrents'] if episode and serie_en_plus == 'true': terms2 = terms[:-3] + '936' response2 = call('/torrents/search/%s&?limit=15&cid=%s%s' % (query, cat_id, terms2)) response3 = call('/torrents/search/%s&?limit=15&cid=%s%s' % (query, CAT_TV_ANIME, terms2)) response['torrents'] = response['torrents'] + response2['torrents'] + response3['torrents'] provider.log.debug("Search results : %s" % response) # quasar send GET requests & t411 api needs POST # Must use the bencode tool :( for t in response['torrents'] : # Call each individual page in parallel thread = Thread(target=torrent2magnet, args = (t, q, user_credentials['token'])) thread.start() threads.append(thread) # And get all the results for t in threads : t.join() while not q.empty(): item = q.get() result.append({ "size":sizeof_fmt(item["size"]), "seeds": item["seeds"], "peers": item["peers"], "name": item["name"], "trackers": item["trackers"], "info_hash": item["info_hash"], "is_private": True, "provider":"[COLOR FF18F6F3]t411[/COLOR]", "icon": icon }) return result
def extract_torrents(data): try: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.find("table", id="torrents") cont = 0 results = [] if links is not None: links = links.tbody.findAll('tr') for link in links: try: columns = link.findAll('td') if len(columns) == 6: name = columns[1].text # name magnet = columns[0].a["href"] # torrent size = columns[3].text # size seeds = columns[4].text # seeds peers = columns[5].text # peers # info_magnet = common.Magnet(magnet) if filters.verify(filters.title, size): cont += 1 results.append({"name": name.strip(), "uri": magnet, # "info_hash": info_magnet.hash, "size": size.strip(), "seeds": int(seeds), "peers": int(peers), "language": settings.value.get("language", "en"), "provider": settings.name }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) except: continue provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon) return []
def extract_torrents(data): try: filters.information() # print filters settings soup = BeautifulSoup(data, 'html5lib') links = soup.table.tbody.findAll('tr') cont = 0 results = [] for link in links: try: columns = link.findAll('td') name = columns[1].div.text # name magnet = columns[1].select('div + a')[0]["href"] # magnet size = columns[1].font.text.split(',')[1].replace('Size', '').replace(' ', ' ') # size seeds = columns[2].text # seeds peers = columns[3].text # peers size = common.Filtering.normalize(size) # info_magnet = common.Magnet(magnet) if filters.verify(name, size): cont += 1 results.append({"name": name.strip(), "uri": magnet, # "info_hash": info_magnet.hash, "size": size.strip(), "seeds": int(seeds), "peers": int(peers), "language": settings.value.get("language", "en"), "provider": settings.name }) # return the torrent if cont >= int(settings.value.get("max_magnets", 10)): # limit magnets break else: provider.log.warning(filters.reason) except: continue provider.log.info('>>>>>>' + str(cont) + ' torrents sent to Quasar<<<<<<<') return results except: provider.log.error('>>>>>>>ERROR parsing data<<<<<<<') provider.notify(message='ERROR parsing data', header=None, time=5000, image=settings.icon) return []
def search(query, cat_id=CAT_VIDEO, terms=None, episode=False, season=False): global USER_CREDENTIALS provider.notify(message=str(query).title(), header="Quasar [COLOR FF18F6F3]t411[/COLOR] Provider", time=3000, image=_ICON_) result = [] search_url = '/torrents/search/%s?limit=%s&cid=%s%s' provider.log.debug("QUERY : %s" % query) query = query.replace(' ', '%20') torrents_anim = [] torrents_saison = [] torrents_serie_tv = [] torrents_serie_anim = [] torrents_saison_tv = [] torrents_saison_anim = [] torrents = call(search_url % (query, _FILTER_LIMIT_, cat_id, terms)) if len(torrents) < (_FILTER_LIMIT_ - 1): if not episode and not season: # add animated movie torrents_anim = call( search_url % (query, _FILTER_LIMIT_, CAT_MOVIE_ANIM, terms)) torrents = sum([torrents, torrents_anim], []) else: if episode and _FILTER_SERIES_FULL_ == 'true': terms2 = terms[:-3] + '936' torrents_saison = call(search_url % (query, _FILTER_LIMIT_, cat_id, terms2)) if not len(torrents_saison): torrents_saison_tv = call( search_url % (query, _FILTER_LIMIT_, CAT_SERIES_EMISSION, terms2)) if not len(torrents_saison_tv): torrents_saison_anim = call( search_url % (query, _FILTER_LIMIT_, CAT_SERIES_ANIMATED, terms2)) torrents = sum([torrents, torrents_saison_anim], []) else: torrents = sum([torrents, torrents_saison_tv], []) else: torrents = sum([torrents, torrents_saison], []) if (episode or season) and len(torrents) < int( ceil(_FILTER_LIMIT_ / 2)): # search for animation and emission series too torrents_serie_tv = call( search_url % (query, _FILTER_LIMIT_, CAT_SERIES_EMISSION, terms)) if not len(torrents_serie_tv): torrents_serie_anim = call( search_url % (query, _FILTER_LIMIT_, CAT_SERIES_ANIMATED, terms)) torrents = sum([torrents, torrents_serie_anim], []) else: torrents = sum([torrents, torrents_serie_tv], []) for torrent in torrents: if 'id' in torrent: torrent = torrent2magnet(torrent, USER_CREDENTIALS['token']) if 'id' in torrent: category = "" if (episode or season) and _FILTER_SERIES_FULL_ == 'true': if in_category(torrents_saison, torrent['id']): category = "[FULL]" elif (episode or season) and in_category( torrents_saison_tv, torrent['id']): category = "[FULL][TV]" elif (episode or season) and in_category( torrents_saison_anim, torrent['id']): category = "[FULL][ANIM]" elif episode or season: if in_category(torrents_serie_tv, torrent['id']): category = "[TV]" elif in_category(torrents_serie_anim, torrent['id']): category = "[ANIM]" elif (not episode and not season) and in_category( torrents_anim, torrent['id']): category = "[ANIM]" if 'uri' in torrent: result.append({ "name": "[COLOR FFF05129]%s[/COLOR] [COLOR FF18F6F3]%s[/COLOR][COLOR FFe19623]%s[/COLOR] [COLOR FFa39e9d]%s[/COLOR]" % (torrent["added"], torrent["languages"], category, torrent["name"]), "provider": "t411", "icon": _ICON_, "uri": torrent["uri"], "size": sizeof_fmt(int(torrent["size"])), "seeds": torrent["seeds"], "peers": torrent["peers"], "trackers": torrent["trackers"], "info_hash": torrent["info_hash"], "resolution": torrent["resolution"], "languages": "", "is_private": True }) # provider.log.debug("==> RESULT <==") # provider.log.debug(result) return result
filters = common.Filtering() # login username = provider.ADDON.getSetting('username') # username password = provider.ADDON.getSetting('password') # passsword resp_login = provider.POST('%s/auth' % settings.value["url_address"], params={}, headers={}, data='username='******'&password='******'token'] #provider.log.info('token : %s' % token) except: provider.notify(message=resp_login.json()['error'], header=None, time=5000, image='') def extract_torrents(data): sint = common.ignore_exception(ValueError)(int) results = [] threads = [] q = Queue.Queue() cont = 0 if data is not None: filters.information() # print filters settings links = data['torrents'] for link in links: name = link['name'] # name magnet = '%s/torrents/download/%s' % (