def login(self, root_url, url, data, headers, fails_with): """ Login wrapper around ``open`` Args: url (str): The URL to open data (dict): POST login data fails_with (str): String that must **not** be included in the response's content Returns: bool: Whether or not login was successful """ if not url.startswith('http'): url = root_url + url if self.open(url.encode('utf-8'), post_data=encode_dict(data, self.request_charset), headers=headers): try: if fails_with in self.content: self.status = 'Wrong username or password' return False except Exception as e: log.debug("Login failed with: %s" % e) try: if fails_with in self.content.decode('utf-8'): self.status = 'Wrong username or password' return False except: return False return True return False
def _save_cookies(self): self._cookies_filename = self._locate_cookies(self.url) try: self._cookies.save(self._cookies_filename) except Exception as e: log.debug("Saving cookies error: %s" % repr(e))
def _read_cookies(self, url=''): self._cookies_filename = self._locate_cookies(url) if os.path.exists(self._cookies_filename): try: self._cookies.load(self._cookies_filename) except Exception as e: log.debug("Reading cookies error: %s" % repr(e))
def ResolveOpennic(host): try: log.debug("Custom DNS resolving with public DNS for: %s" % host) resolver = dns.resolver.Resolver() resolver.nameservers = dns_opennic_list answer = resolver.query(host, 'A') return answer.rrset.items[0].address except: return
def patched_create_connection(address, *args, **kwargs): """Wrap urllib3's create_connection to resolve the name elsewhere""" # resolve hostname to an ip address; use your own # resolver here, as otherwise the system resolver will be used. host, port = address log.debug("Custom resolver: %s --- %s --- %s" % (host, port, repr(address))) hostname = MyResolver(host) return _orig_create_connection((hostname, port), *args, **kwargs)
def _locate_cookies(self, url=''): cookies_path = os.path.join(PATH_TEMP, 'burst') if not os.path.exists(cookies_path): try: os.makedirs(cookies_path) except Exception as e: log.debug("Error creating cookies directory: %s" % repr(e)) # return os.path.join(cookies_path, urlparse(url).netloc + '_cookies.jar') # Do we really need to split cookies for each domain? return os.path.join(cookies_path, 'common_cookies.jar')
def generate_payload(provider, generator, filtering, verify_name=True, verify_size=True): """ Payload formatter to format results the way projectx expects them Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes Returns: list: Formatted results """ filtering.information(provider) results = [] definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) for name, info_hash, uri, size, seeds, peers in generator: size = clean_size(size) # uri, info_hash = clean_magnet(uri, info_hash) v_name = name if verify_name else filtering.title v_size = size if verify_size else None if filtering.verify(provider, v_name, v_size): sort_seeds = get_int(seeds) sort_resolution = filtering.determine_resolution(v_name)[1]+1 sort_balance = sort_seeds * 3 * sort_resolution results.append({ "name": name, "uri": uri, "info_hash": info_hash, "size": size, "seeds": sort_seeds, "peers": get_int(peers), "language": definition["language"] if 'language' in definition else 'en', "provider": '[COLOR %s]%s[/COLOR]' % (definition['color'], definition['name']), "icon": os.path.join(ADDON_PATH, 'burst', 'providers', 'icons', '%s.png' % provider), "sort_resolution": sort_resolution, "sort_balance": sort_balance }) else: log.debug(filtering.reason.encode('utf-8')) log.debug('[%s] >>>>>> %s would send %d torrents to projectx <<<<<<<' % (provider, provider, len(results))) return results
def MyResolver(host): if '.' not in host: return host try: return dns_cache[host] except KeyError: pass ip = ResolvePublic(host) if not ip: ip = ResolveOpennic(host) if ip: log.debug("Host %s resolved to %s" % (host, ip)) dns_cache[host] = ip return ip else: return host
def load_overrides(path, custom=False): """ Overrides loader for Python files Note: Overrides must be in an ``overrides`` dictionary. Args: path (str): Path to Python file to be loaded custom (bool): Boolean flag to specify if this is a custom overrides file """ try: if custom: sys.path.append(path) from overrides import overrides log.debug("Imported overrides: %s", repr(overrides)) for provider in overrides: update_definitions(provider, overrides[provider]) log.info("Successfully loaded overrides from %s", os.path.join(path, "overrides.py")) except Exception as e: import traceback log.error("Failed importing %soverrides: %s", "custom " if custom else "", repr(e)) map(log.error, traceback.format_exc().split("\n"))
def run_provider(provider, payload, method): """ Provider thread entrypoint Args: provider (str): Provider ID payload (dict): Search payload from projectx method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` """ log.debug("[%s] Processing %s with %s method" % (provider, provider, method)) filterInstance = Filtering() if method == 'movie': filterInstance.use_movie(provider, payload) elif method == 'season': filterInstance.use_season(provider, payload) elif method == 'episode': filterInstance.use_episode(provider, payload) elif method == 'anime': filterInstance.use_anime(provider, payload) else: filterInstance.use_general(provider, payload) if 'is_api' in definitions[provider]: results = process(provider=provider, generator=extract_from_api, filtering=filterInstance, has_special=payload['has_special'], skip_auth=payload['skip_auth']) else: results = process(provider=provider, generator=extract_torrents, filtering=filterInstance, has_special=payload['has_special'], skip_auth=payload['skip_auth']) got_results(provider, results)
def extract_subpage(q, name, torrent, size, seeds, peers, info_hash, referer): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey headers = {} if "subpage_mode" in definition: if definition["subpage_mode"] == "xhr": headers['X-Requested-With'] = 'XMLHttpRequest' headers['Content-Language'] = '' if referer: headers['Referer'] = referer uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8'), headers=headers) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len( uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error( "[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("[%s] Subpage torrent for %s: %s" % (provider, repr(uri[0]), torrent)) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret)
def extract_from_page(provider, content): """ Sub-page extraction method Args: provider (str): Provider ID content (str): Page content from Client instance Returns: str: Torrent or magnet link extracted from sub-page """ definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) try: matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content) if matches: result = matches[0] log.debug('[%s] Matched magnet link: %s' % (provider, repr(result))) return result matches = re.findall('http(.*?).torrent["\']', content) if matches: result = 'http' + matches[0] + '.torrent' result = result.replace('torcache.net', 'itorrents.org') log.debug('[%s] Matched torrent link: %s' % (provider, repr(result))) return result matches = re.findall('/download\?token=[A-Za-z0-9%]+', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with token: %s' % (provider, repr(result))) return result matches = re.findall('"(/download/[A-Za-z0-9]+)"', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link: %s' % (provider, repr(result))) return result matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+', content) # t411 if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with an ID: %s' % (provider, repr(result))) return result matches = re.findall('\: ([A-Fa-f0-9]{40})', content) if matches: result = "magnet:?xt=urn:btih:" + matches[0] log.debug('[%s] Matched magnet info_hash search: %s' % (provider, repr(result))) return result matches = re.findall('/download.php\?id=([A-Za-z0-9]{40})\W', content) if matches: result = "magnet:?xt=urn:btih:" + matches[0] log.debug('[%s] Matched download link: %s' % (provider, repr(result))) return result matches = re.findall('(/download.php\?id=[A-Za-z0-9]+[^\s\'"]*)', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link: %s' % (provider, repr(result))) return result except: pass return None
def extract_from_api(provider, client): """ Main API parsing generator for API-based providers An almost clever API parser, mostly just for YTS, RARBG and T411 Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ try: data = json.loads(client.content) except: data = [] log.debug("[%s] JSON response from API: %s" % (unquote(provider), repr(data))) definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) api_format = definition['api_format'] results = [] # If 'results' is empty - then we can try to take all the data as an array of results. # Usable when api returns results without any other data. if not api_format['results']: results = data else: result_keys = api_format['results'].split('.') log.debug("[%s] result_keys: %s" % (provider, repr(result_keys))) for key in result_keys: if key in data: data = data[key] else: data = [] results = data log.debug("[%s] results: %s" % (provider, repr(results))) if 'subresults' in api_format: from copy import deepcopy for result in results: # A little too specific to YTS but who cares... result['name'] = result[api_format['name']] subresults = [] subresults_keys = api_format['subresults'].split('.') for key in subresults_keys: for result in results: if key in result: for subresult in result[key]: sub = deepcopy(result) sub.update(subresult) subresults.append(sub) results = subresults log.debug("[%s] with subresults: %s" % (provider, repr(results))) for result in results: if not result or not isinstance(result, dict): continue name = '' info_hash = '' torrent = '' size = '' seeds = '' peers = '' if 'name' in api_format: name = result[api_format['name']] if 'description' in api_format: if name: name += ' ' name += result[api_format['description']] if 'torrent' in api_format: torrent = result[api_format['torrent']] if 'download_path' in definition: torrent = definition['base_url'] + definition[ 'download_path'] + torrent if client.token: user_agent = USER_AGENT headers = { 'Authorization': client.token, 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if 'info_hash' in api_format: info_hash = result[api_format['info_hash']] if 'quality' in api_format: # Again quite specific to YTS... name = "%s - %s" % (name, result[api_format['quality']]) if 'size' in api_format: size = result[api_format['size']] if type(size) in (long, int): size = sizeof(size) elif type(size) in (str, unicode) and size.isdigit(): size = sizeof(int(size)) if 'seeds' in api_format: seeds = result[api_format['seeds']] if type(seeds) in (str, unicode) and seeds.isdigit(): seeds = int(seeds) if 'peers' in api_format: peers = result[api_format['peers']] if type(peers) in (str, unicode) and peers.isdigit(): peers = int(peers) yield (name, info_hash, torrent, size, seeds, peers)
def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from projectx. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format projectx expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general': if 'query' in payload: payload['title'] = payload['query'] payload['titles'] = {'source': payload['query']} else: payload = { 'title': payload, 'titles': { 'source': payload }, } payload['titles'] = dict( (k.lower(), v) for k, v in payload['titles'].iteritems()) # If titles[] exists in payload and there are special chars in titles[source] # then we set a flag to possibly modify the search query payload['has_special'] = 'titles' in payload and \ bool(payload['titles']) and \ 'source' in payload['titles'] and \ any(c in payload['titles']['source'] for c in special_chars) if payload['has_special']: log.debug( "Query title contains special chars, so removing any quotes in the search query" ) if 'proxy_url' not in payload: payload['proxy_url'] = '' if 'internal_proxy_url' not in payload: payload['internal_proxy_url'] = '' if 'projectx_url' not in payload: payload['projectx_url'] = '' if 'silent' not in payload: payload['silent'] = False if 'skip_auth' not in payload: payload['skip_auth'] = False global request_time global provider_names global provider_results global available_providers provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() providers = get_enabled_providers(method) if len(providers) == 0: if not payload['silent']: notify(translation(32060), image=get_icon_path()) log.error("No providers enabled") return [] log.info( "Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers])) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif 'titles' not in payload or not payload['titles']: log.info("No translations available...") elif payload['titles'] and kodi_language not in payload['titles']: log.info("No '%s' translation available..." % kodi_language) p_dialog = xbmcgui.DialogProgressBG() if not payload['silent']: p_dialog.create('projectx [COLOR FFFF6B00]Burst[/COLOR]', translation(32061)) for provider in providers: available_providers += 1 provider_names.append(definitions[provider]['name']) task = Thread(target=run_provider, args=(provider, payload, method)) task.start() providers_time = time.time() total = float(available_providers) # Exit if all providers have returned results or timeout reached, check every 100ms while time.time() - providers_time < timeout and available_providers > 0: timer = time.time() - providers_time log.debug("Timer: %ds / %ds" % (timer, timeout)) if timer > timeout: break message = translation( 32062 ) % available_providers if available_providers > 1 else translation( 32063) if not payload['silent']: p_dialog.update(int((total - available_providers) / total * 100), message=message) time.sleep(0.25) if not payload['silent']: p_dialog.close() del p_dialog if available_providers > 0: message = u', '.join(provider_names) message = message + translation(32064) log.warning(message.encode('utf-8')) if not payload['silent']: notify(message, ADDON_ICON) log.debug("all provider_results: %s" % repr(provider_results)) filtered_results = apply_filters(provider_results) log.debug("all filtered_results: %s" % repr(filtered_results)) log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2))) return filtered_results
def extract_torrents(provider, client): """ Main torrent extraction generator for non-API based providers Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) log.debug("[%s] Extracting torrents from %s using definitions: %s" % (provider, provider, repr(definition))) if not client.content: if get_setting("use_debug_parser", bool): log.debug("[%s] Parser debug | Page content is empty" % provider) raise StopIteration dom = Html().feed(client.content) key_search = get_search_query(definition, "key") row_search = get_search_query(definition, "row") name_search = get_search_query(definition, "name") torrent_search = get_search_query(definition, "torrent") info_hash_search = get_search_query(definition, "infohash") size_search = get_search_query(definition, "size") seeds_search = get_search_query(definition, "seeds") peers_search = get_search_query(definition, "peers") referer_search = get_search_query(definition, "referer") log.debug("[%s] Parser: %s" % (provider, repr(definition['parser']))) q = Queue() threads = [] needs_subpage = 'subpage' in definition and definition['subpage'] if needs_subpage: def extract_subpage(q, name, torrent, size, seeds, peers, info_hash, referer): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey headers = {} if "subpage_mode" in definition: if definition["subpage_mode"] == "xhr": headers['X-Requested-With'] = 'XMLHttpRequest' headers['Content-Language'] = '' if referer: headers['Referer'] = referer uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8'), headers=headers) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len( uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error( "[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("[%s] Subpage torrent for %s: %s" % (provider, repr(uri[0]), torrent)) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret) if not dom: if get_setting("use_debug_parser", bool): log.debug( "[%s] Parser debug | Could not parse DOM from page content" % provider) raise StopIteration if get_setting("use_debug_parser", bool): log.debug( "[%s] Parser debug | Page content: %s" % (provider, client.content.replace('\r', '').replace('\n', ''))) key = eval(key_search) if key_search else "" if key_search and get_setting("use_debug_parser", bool): key_str = key.__str__() log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'key', key_search, key_str.replace('\r', '').replace( '\n', ''))) items = eval(row_search) if get_setting("use_debug_parser", bool): log.debug("[%s] Parser debug | Matched %d items for '%s' query '%s'" % (provider, len(items), 'row', row_search)) for item in items: if get_setting("use_debug_parser", bool): item_str = item.__str__() log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'row', row_search, item_str.replace( '\r', '').replace('\n', ''))) if not item: continue try: name = eval(name_search) if name_search else "" torrent = eval(torrent_search) if torrent_search else "" size = eval(size_search) if size_search else "" seeds = eval(seeds_search) if seeds_search else "" peers = eval(peers_search) if peers_search else "" info_hash = eval(info_hash_search) if info_hash_search else "" referer = eval(referer_search) if referer_search else "" if 'magnet:?' in torrent: torrent = torrent[torrent.find('magnet:?'):] if get_setting("use_debug_parser", bool): log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'name', name_search, name)) log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'torrent', torrent_search, torrent)) log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'size', size_search, size)) log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'seeds', seeds_search, seeds)) log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'peers', peers_search, peers)) if info_hash_search: log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'info_hash', info_hash_search, info_hash)) if referer_search: log.debug( "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'info_hash', referer_search, referer)) # Pass client cookies with torrent if private if not torrent.startswith('magnet'): user_agent = USER_AGENT if client.passkey: torrent = torrent.replace('PASSKEY', client.passkey) elif client.token: headers = { 'Authorization': client.token, 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) else: parsed_url = urlparse(torrent.split('|')[0]) cookie_domain = '{uri.netloc}'.format(uri=parsed_url) cookie_domain = re.sub('www\d*\.', '', cookie_domain) cookies = [] for cookie in client._cookies: if cookie_domain in cookie.domain: cookies.append(cookie) headers = {} if cookies: headers = {'User-Agent': user_agent} log.debug("[%s] Cookies res: %s / %s" % (provider, repr(headers), repr(client.request_headers))) if client.request_headers: headers.update(client.request_headers) if client.url: headers['Referer'] = client.url headers['Origin'] = client.url # Need to set Cookie afterwards to avoid rewriting it with session Cookies headers['Cookie'] = ";".join( ["%s=%s" % (c.name, c.value) for c in cookies]) else: headers = {'User-Agent': user_agent} torrent = append_headers(torrent, headers) if name and torrent and needs_subpage and not torrent.startswith( 'magnet'): if not torrent.startswith('http'): torrent = definition['root_url'] + torrent.encode('utf-8') t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash, referer)) threads.append(t) else: yield (name, info_hash, torrent, size, seeds, peers) except Exception as e: log.error("[%s] Got an exception while parsing results: %s" % (provider, repr(e))) if needs_subpage: log.debug("[%s] Starting subpage threads..." % provider) for t in threads: t.start() for t in threads: t.join() for i in range(q.qsize()): ret = q.get_nowait() log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret))) yield ret
available_providers = 0 request_time = time.time() auto_timeout = get_setting("auto_timeout", bool) timeout = get_setting("timeout", int) special_chars = "()\"':.[]<>/\\?" if auto_timeout: projectx_addon = xbmcaddon.Addon(id='plugin.video.projectx') if projectx_addon: if projectx_addon.getSetting( 'custom_provider_timeout_enabled') == "true": timeout = int( projectx_addon.getSetting('custom_provider_timeout')) - 2 else: timeout = 28 log.debug("Using timeout from projectx: %d seconds" % (timeout)) def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from projectx. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format projectx expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general':
def __init__(self, info=None, request_charset='utf-8', response_charset=None): self._counter = 0 self._cookies_filename = '' self._cookies = LWPCookieJar() self.url = None self.user_agent = USER_AGENT self.content = None self.status = None self.username = None self.token = None self.passkey = None self.info = info self.proxy_url = None self.request_charset = request_charset self.response_charset = response_charset self.needs_proxylock = False self.headers = dict() self.request_headers = None self.session = requests.session() self.session.verify = False # Enabling retrying on failed requests retries = Retry(total=2, read=2, connect=2, redirect=3, backoff_factor=0.1, status_forcelist=[429, 500, 502, 503, 504]) self.session.mount('http://', HTTPAdapter(max_retries=retries)) self.session.mount('https://', HTTPAdapter(max_retries=retries)) # self.session = cfscrape.create_scraper() # self.scraper = cfscrape.create_scraper() # self.session = self.scraper.session() global dns_public_list global dns_opennic_list dns_public_list = get_setting("public_dns_list", unicode).replace(" ", "").split(",") dns_opennic_list = get_setting("opennic_dns_list", unicode).replace(" ", "").split(",") # socket.setdefaulttimeout(60) # Parsing proxy information proxy = { 'enabled': get_setting("proxy_enabled", bool), 'use_type': get_setting("proxy_use_type", int), 'type': proxy_types[0], 'host': get_setting("proxy_host", unicode), 'port': get_setting("proxy_port", int), 'login': get_setting("proxy_login", unicode), 'password': get_setting("proxy_password", unicode), } try: proxy['type'] = proxy_types[get_setting("proxy_type", int)] except: pass if get_setting("use_public_dns", bool): connection.create_connection = patched_create_connection if get_setting("use_projectx_proxy", bool): projectx_addon = xbmcaddon.Addon(id='plugin.video.projectx') if projectx_addon and projectx_addon.getSetting( 'internal_proxy_enabled') == "true": self.proxy_url = "{0}://{1}:{2}".format( "http", "127.0.0.1", "65222") if info and "internal_proxy_url" in info: self.proxy_url = info["internal_proxy_url"] self.session.proxies = { 'http': self.proxy_url, 'https': self.proxy_url, } elif proxy['enabled']: if proxy['use_type'] == 0 and info and "proxy_url" in info: log.debug("Setting proxy from projectx: %s" % (info["proxy_url"])) elif proxy['use_type'] == 1: log.debug("Setting proxy with custom settings: %s" % (repr(proxy))) if proxy['login'] or proxy['password']: self.proxy_url = "{0}://{1}:{2}@{3}:{4}".format( proxy['type'], proxy['login'], proxy['password'], proxy['host'], proxy['port']) else: self.proxy_url = "{0}://{1}:{2}".format( proxy['type'], proxy['host'], proxy['port']) if self.proxy_url: self.session.proxies = { 'http': self.proxy_url, 'https': self.proxy_url, }
def process(provider, generator, filtering, has_special, verify_name=True, verify_size=True, skip_auth=False): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance has_special (bool): Whether title contains special chars verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("[%s] execute_process for %s with %s" % (provider, provider, repr(generator))) definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) client = Client(info=filtering.info, request_charset=definition['charset'], response_charset=definition['response_charset']) token = None logged_in = False token_auth = False if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if has_special: # Removing quotes, surrounding {title*} keywords, when title contains special chars query = re.sub("[\"']({title.*?})[\"']", '\\1', query) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) if extra == '-' and filtering.results: continue try: if 'charset' in definition and definition['charset'] and 'utf' not in definition['charset'].lower(): query = urllib.quote(query.encode(definition['charset'])) extra = urllib.quote(extra.encode(definition['charset'])) else: query = urllib.quote(query.encode('utf-8')) extra = urllib.quote(extra.encode('utf-8')) except Exception as e: log.debug("[%s] Could not quote the query (%s): %s" % (provider, query, e)) pass log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra and extra != '-': url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) if definition['separator'] != '%20': url_search = url_search.replace('%20', definition['separator']) # MagnetDL fix... url_search = url_search.replace('FIRSTLETTER', query[:1]) # Creating the payload for POST method if 'post_data' in definition and not filtering.post_data: filtering.post_data = eval(definition['post_data']) payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] payload[key] = urllib.unquote(payload[key]) # Creating the payload for GET method headers = None data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if 'initial_url' in definition and definition['initial_url']: url = definition['initial_url'] if not url.startswith('http'): url = definition['root_url'] + url client.open(url) if token: log.info('[%s] Reusing existing token' % provider) url_search = url_search.replace('TOKEN', token) elif 'token' in definition: token_url = definition['base_url'] + definition['token'] log.debug("[%s] Getting token for %s at %s" % (provider, provider, repr(token_url))) client.open(token_url.encode('utf-8')) try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token for %s' % (provider, repr(url_search))) return filtering.results log.debug("[%s] Token response for %s: %s" % (provider, provider, repr(token_data))) if 'token' in token_data: token = token_data['token'] log.debug("[%s] Got token for %s: %s" % (provider, provider, repr(token))) url_search = url_search.replace('TOKEN', token) else: log.warning('%s: Unable to get token for %s' % (provider, repr(url_search))) if logged_in: log.info("[%s] Reusing previous login" % provider) elif token_auth: log.info("[%s] Reusing previous token authorization" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider, unicode) password = get_setting('%s_password' % provider, unicode) passkey = get_setting('%s_passkey' % provider, unicode) if not username and not password and not passkey: for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider): for setting in ('username', 'password'): try: value = xbmcaddon.Addon(addon_name).getSetting(setting) set_setting('%s_%s' % (provider, setting), value) if setting == 'username': username = value if setting == 'password': password = value except: pass if username: client.username = username url_search = url_search.replace('USERNAME', username) if passkey: logged_in = True client.passkey = passkey url_search = url_search.replace('PASSKEY', passkey) elif 'login_object' in definition and definition['login_object']: login_object = None login_headers = None logged_in = skip_auth try: login_object = definition['login_object'].replace('USERNAME', 'u"%s"' % username).replace('PASSWORD', 'u"%s"' % password) except Exception as e: log.error("Could not make login object for %s: %s" % (provider, e)) try: if 'login_headers' in definition and definition['login_headers']: login_headers = eval(definition['login_headers']) except Exception as e: log.error("Could not make login headers for %s: %s" % (provider, e)) # TODO generic flags in definitions for those... if 'csrf_token' in definition and definition['csrf_token']: client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_token = re.search(r'name=\"_?csrf_token\" value=\"(.*?)\"', client.content) if csrf_token: login_object = login_object.replace('CSRF_TOKEN', '"%s"' % csrf_token.group(1)) else: logged_in = True if 'token_auth' in definition: # log.debug("[%s] logging in with: %s" % (provider, login_object)) if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)): try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token from %s' % (provider, definition['token_auth'])) return filtering.results log.debug("[%s] Token response for %s: %s" % (provider, provider, repr(token_data))) if 'token' in token_data: client.token = token_data['token'] log.debug("[%s] Auth token for %s: %s" % (provider, provider, repr(client.token))) else: log.error('[%s] Unable to get auth token for %s' % (provider, repr(url_search))) return filtering.results log.info('[%s] Token auth successful' % provider) token_auth = True else: log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content))) return filtering.results elif not logged_in and client.login(definition['root_url'], definition['login_path'], eval(login_object), login_headers, definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) return filtering.results if logged_in: if provider == 'hd-torrents': client.open(definition['root_url'] + '/torrents.php') csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content) url_search = url_search.replace("CSRF_TOKEN", csrf_token.group(1)) log.info("[%s] > %s search URL: %s" % (provider, definition['name'].rjust(longest), url_search)) if 'headers' in definition and definition['headers']: headers = eval(definition['headers']) log.info("[%s] > %s headers: %s" % (provider, definition['name'].rjust(longest), headers)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data, headers=headers) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results
def open(self, url, language='en', post_data=None, get_data=None, headers=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) if self.session.proxies: log.debug("Proxies: %s" % (repr(self.session.proxies))) self._read_cookies(url) self.session.cookies = self._cookies # log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) # Default headers for any request. Pretend like we are the usual browser. req_headers = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', 'Accept-Language': 'en-EN,en;q=0.9,en-US;q=0.8,en;q=0.7,uk;q=0.6,pl;q=0.5', 'Cache-Control': 'no-cache', 'Content-Language': language, 'Origin': url, 'Referer': url, 'User-Agent': self.user_agent } # If headers passed to open() call - we overwrite headers. if headers: for key, value in headers.iteritems(): if key == ':path': u = urlparse(url) value = u.path if value: req_headers[key] = value elif key.capitalize() in req_headers: del req_headers[key.capitalize()] if self.token: req_headers["Authorization"] = self.token req = None if post_data: req = requests.Request('POST', url, data=post_data, headers=req_headers) else: req = requests.Request('GET', url, headers=req_headers) prepped = self.session.prepare_request(req) self.request_headers = prepped.headers try: self._good_spider() with self.session.send(prepped) as response: self.headers = response.headers self.status = response.status_code self.url = response.url self._save_cookies() if self.response_charset: self.content = response.content.decode( self.response_charset, 'ignore') else: self.content = response.text except requests.exceptions.InvalidSchema as e: # If link points to a magnet: then it can be used as a content matches = re.findall( 'No connection adapters were found for \'(.*?)\'', str(e)) if matches: self.content = matches[0] return True import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return self.status == 200