def get_cloudhole_clearance(cloudhole_key): """ CloudHole clearance fetcher Args: cloudhole_key (str): The CloudHole API key saved in settings or from ``get_cloudhole_key`` directly Returns: tuple: A CloudHole clearance cookie and user-agent string """ user_agent = USER_AGENT clearance = None if cloudhole_key: try: r = urllib2.Request("https://cloudhole.herokuapp.com/clearances") r.add_header('Content-type', 'application/json') r.add_header('Authorization', cloudhole_key) with closing(urllib2.urlopen(r)) as response: content = response.read() log.debug("CloudHole returned: %s" % content) data = json.loads(content) user_agent = data[0]['userAgent'] clearance = data[0]['cookies'] log.info("New UA and clearance: %s / %s" % (user_agent, clearance)) except Exception as e: log.error("CloudHole error: %s" % repr(e)) return clearance, user_agent
def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey if get_setting("use_cloudhole", bool): subclient.clearance = get_setting('clearance') subclient.user_agent = get_setting('user_agent') uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret)
def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret)
def load_overrides(path, custom=False): """ Overrides loader for Python files Note: Overrides must be in an ``overrides`` dictionary. Args: path (str): Path to Python file to be loaded custom (bool): Boolean flag to specify if this is a custom overrides file """ try: if custom: sys.path.append(path) from overrides import overrides else: from burst_overrides import overrides if custom: log.debug("Imported overrides: %s", repr(overrides)) for provider in overrides: update_definitions(provider, overrides[provider]) if custom: log.info("Successfully loaded overrides from %s", os.path.join(path, "overrides.py")) except Exception as e: import traceback log.error("Failed importing %soverrides: %s", "custom " if custom else "", repr(e)) map(log.error, traceback.format_exc().split("\n"))
def get_cloudhole_key(): """ CloudHole API key fetcher Returns: str: A CloudHole API key """ cloudhole_key = None try: r = urllib2.Request("https://cloudhole.herokuapp.com/key") r.add_header('Content-type', 'application/json') with closing(urllib2.urlopen(r)) as response: content = response.read() log.info("CloudHole key: %s" % content) data = json.loads(content) cloudhole_key = data['key'] except Exception as e: log.error("Getting CloudHole key error: %s" % repr(e)) return cloudhole_key
def load_providers(path, custom=False, fix_seasons=False): """ Definitions loader for json files Args: path (str): Path to json file to be loaded custom (bool): Boolean flag to specify if this is a custom provider fix_seasons (bool): Boolean flag to apply default fix to seasons keywords """ try: with open(path) as file: providers = json.load(file) for provider in providers: update_definitions(provider, providers[provider], custom, fix_seasons) except Exception as e: import traceback log.error("Failed importing providers from %s: %s", path, repr(e)) map(log.error, traceback.format_exc().split("\n"))
def load_providers(path, custom=False): """ Definitions loader for json files Args: path (str): Path to json file to be loaded custom (bool): Boolean flag to specify if this is a custom provider """ if not os.path.exists(path): return try: with open(path) as file: providers = json.load(file) for provider in providers: update_definitions(provider, providers[provider], custom) except Exception as e: import traceback log.error("Failed importing providers from %s: %s", path, repr(e)) map(log.error, traceback.format_exc().split("\n"))
def open(self, url, language='en', post_data=None, get_data=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if not post_data: post_data = {} if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) result = False data = urlencode(post_data) if len(post_data) > 0 else None req = urllib2.Request(url, data) self._read_cookies(url) log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cookies)) req.add_header('User-Agent', self.user_agent) req.add_header('Content-Language', language) req.add_header("Accept-Encoding", "gzip") if self.token: req.add_header("Authorization", self.token) try: self._good_spider() with closing(opener.open(req)) as response: self.headers = response.headers self._save_cookies() if response.headers.get("Content-Encoding", "") == "gzip": import zlib self.content = zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(response.read()) else: self.content = response.read() charset = response.headers.getparam('charset') if not charset: match = re.search("""<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"']*([^\s"'/>]*)""", self.content) if match: charset = match.group(1) if charset and charset.lower() == 'utf-8': charset = 'utf-8-sig' # Changing to utf-8-sig to remove BOM if found on decode from utf-8 if charset: log.debug('Decoding charset from %s for %s' % (charset, repr(url))) self.content = self.content.decode(charset, 'replace') self.status = response.getcode() result = True except urllib2.HTTPError as e: self.status = e.code log.warning("Status for %s : %s" % (repr(url), str(self.status))) if e.code == 403 or e.code == 503: log.warning("CloudFlared at %s, try enabling CloudHole" % url) except urllib2.URLError as e: self.status = repr(e.reason) log.warning("Status for %s : %s" % (repr(url), self.status)) except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return result
def open(self, url, language='en', post_data=None, get_data=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if not post_data: post_data = {} if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) result = False data = urlencode(post_data) if len(post_data) > 0 else None req = urllib2.Request(url, data) self._read_cookies(url) log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(self._cookies)) req.add_header('User-Agent', self.user_agent) req.add_header('Content-Language', language) req.add_header("Accept-Encoding", "gzip") req.add_header("Origin", url) req.add_header("Referer", url) try: self._good_spider() with closing(opener.open(req)) as response: self.headers = response.headers self._save_cookies() if response.headers.get("Content-Encoding", "") == "gzip": import zlib self.content = zlib.decompressobj( 16 + zlib.MAX_WBITS).decompress(response.read()) else: self.content = response.read() charset = response.headers.getparam('charset') if not charset: match = re.search( """<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"']*([^\s"'/>]*)""", self.content) if match: charset = match.group(1) if charset and charset.lower() == 'utf-8': charset = 'utf-8-sig' # Changing to utf-8-sig to remove BOM if found on decode from utf-8 if charset: log.debug('Decoding charset from %s for %s' % (charset, repr(url))) self.content = self.content.decode(charset, 'replace') self.status = response.getcode() result = True except urllib2.HTTPError as e: self.status = e.code log.warning("Status for %s : %s" % (repr(url), str(self.status))) except urllib2.URLError as e: self.status = repr(e.reason) log.warning("Status for %s : %s" % (repr(url), self.status)) except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return result
def process_keywords(self, provider, text): """ Processes the query payload from a provider's keyword definitions Args: provider (str): Provider ID text (str): Keyword placeholders from definitions, ie. {title} Returns: str: Processed query keywords """ keywords = self.read_keywords(text) for keyword in keywords: keyword = keyword.lower() if 'title' in keyword: title = self.info["title"] language = definitions[provider]['language'] use_language = None if ':' in keyword: use_language = keyword.split(':')[1] if provider not in self.language_exceptions and \ (use_language or self.kodi_language) and \ 'titles' in self.info and self.info['titles']: try: if self.kodi_language and self.kodi_language in self.info['titles']: use_language = self.kodi_language if use_language not in self.info['titles']: use_language = language if use_language in self.info['titles'] and self.info['titles'][use_language]: title = self.info['titles'][use_language] title = self.normalize_name(title) log.info("[%s] Using translated '%s' title %s" % (provider, use_language, repr(title))) log.debug("[%s] Translated titles from Quasar: %s" % (provider, repr(self.info['titles']))) except Exception as e: import traceback log.error("%s failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) text = text.replace('{%s}' % keyword, title) if 'year' in keyword: text = text.replace('{%s}' % keyword, str(self.info["year"])) if 'season' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411season": season = str(t411season(self.info['season'])) else: season = str(self.info["season"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') season = ('%%.%sd' % keys[1]) % self.info["season"] else: season = '%s' % self.info["season"] text = text.replace('{%s}' % keyword, season) if 'episode' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411episode": episode = str(t411episode(self.info['episode'])) else: episode = str(self.info["episode"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') episode = ('%%.%sd' % keys[1]) % self.info["episode"] else: episode = '%s' % self.info["episode"] text = text.replace('{%s}' % keyword, episode) return text
fix_seasons=True) # Load built-in providers load_providers(os.path.join(ADDON_PATH, 'burst', 'providers', 'providers.json')) # Load providers overrides load_overrides(os.path.join(ADDON_PATH, 'burst', 'providers')) # Load user's custom providers custom_providers = os.path.join(xbmc.translatePath(ADDON_PROFILE), "providers") if not os.path.exists(custom_providers): try: os.makedirs(custom_providers) except Exception as e: log.error("Unable to create custom providers folder: %s", repr(e)) pass for provider_file in glob(os.path.join(custom_providers, "*.json")): log.info("Importing and enabling %s" % provider_file) load_providers(provider_file, custom=True) # Load user's custom overrides custom_overrides = xbmc.translatePath(ADDON_PROFILE) if os.path.exists(os.path.join(custom_overrides, 'overrides.py')): load_overrides(custom_overrides, custom=True) longest = 10 if len(definitions) > 0: longest = len(definitions[sorted(definitions, key=lambda p: len(definitions[p]['name']), reverse=True)[0]]['name'])
def process(provider, generator, filtering, verify_name=True, verify_size=True): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("execute_process for %s with %s" % (provider, repr(generator))) definition = definitions[provider] client = Client() token = None logged_in = False token_auth = False if get_setting("use_cloudhole", bool): client.clearance = get_setting('clearance') client.user_agent = get_setting('user_agent') if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra: url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) # MagnetDL fix... url_search = url_search.replace('FIRSTLETTER', query[:1]) # Creating the payload for POST method payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] # Creating the payload for GET method data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if token: log.info('[%s] Reusing existing token' % provider) url_search = url_search.replace('TOKEN', token) elif 'token' in definition: token_url = definition['base_url'] + definition['token'] log.debug("Getting token for %s at %s" % (provider, repr(token_url))) client.open(token_url.encode('utf-8')) try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token for %s' % (provider, repr(url_search))) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: token = token_data['token'] log.debug("Got token for %s: %s" % (provider, repr(token))) url_search = url_search.replace('TOKEN', token) else: log.warning('%s: Unable to get token for %s' % (provider, repr(url_search))) if logged_in: log.info("[%s] Reusing previous login" % provider) elif token_auth: log.info("[%s] Reusing previous token authorization" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider) password = get_setting('%s_password' % provider) passkey = get_setting('%s_passkey' % provider) if not username and not password and not passkey: for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider): for setting in ('username', 'password'): try: value = xbmcaddon.Addon(addon_name).getSetting(setting) set_setting('%s_%s' % (provider, setting), value) if setting == 'username': username = value if setting == 'password': password = value except: pass if passkey: logged_in = True client.passkey = passkey url_search = url_search.replace('PASSKEY', passkey) elif 'login_object' in definition and definition['login_object']: logged_in = False login_object = definition['login_object'].replace('USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password) # TODO generic flags in definitions for those... if provider == 'alphareign': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_name = re.search(r'name="csrf_name" value="(.*?)"', client.content) csrf_value = re.search(r'name="csrf_value" value="(.*?)"', client.content) if csrf_name and csrf_value: login_object = login_object.replace("CSRF_NAME", '"%s"' % csrf_name.group(1)) login_object = login_object.replace("CSRF_VALUE", '"%s"' % csrf_value.group(1)) else: logged_in = True if provider == 'hd-torrents': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content) if csrf_token: login_object = login_object.replace('CSRF_TOKEN', '"%s"' % csrf_token.group(1)) else: logged_in = True if 'token_auth' in definition: # log.debug("[%s] logging in with: %s" % (provider, login_object)) if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)): try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token from %s' % (provider, definition['token_auth'])) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: client.token = token_data['token'] log.debug("Auth token for %s: %s" % (provider, repr(client.token))) else: log.error('%s: Unable to get auth token for %s' % (provider, repr(url_search))) return filtering.results log.info('[%s] Token auth successful' % provider) token_auth = True else: log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content))) return filtering.results elif not logged_in and client.login(definition['root_url'] + definition['login_path'], eval(login_object), definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) return filtering.results if logged_in: if provider == 'hd-torrents': client.open(definition['root_url'] + '/torrents.php') csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content) url_search = url_search.replace("CSRF_TOKEN", csrf_token.group(1)) log.info("> %s search URL: %s" % (definition['name'].rjust(longest), url_search)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results
def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from Quasar. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format Quasar expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general': payload = { 'title': payload } global request_time global provider_names global provider_results global available_providers provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() providers = get_enabled_providers() if len(providers) == 0: notify(translation(32060), image=get_icon_path()) log.error("No providers enabled") return [] log.info("Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers])) if get_setting("use_cloudhole", bool): clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key()) set_setting('clearance', clearance) set_setting('user_agent', user_agent) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif 'titles' not in payload or not payload['titles']: log.info("No translations available...") elif payload['titles'] and kodi_language not in payload['titles']: log.info("No '%s' translation available..." % kodi_language) p_dialog = xbmcgui.DialogProgressBG() p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061)) for provider in providers: available_providers += 1 provider_names.append(definitions[provider]['name']) task = Thread(target=run_provider, args=(provider, payload, method)) task.start() providers_time = time.time() total = float(available_providers) # Exit if all providers have returned results or timeout reached, check every 100ms while time.time() - providers_time < timeout and available_providers > 0: timer = time.time() - providers_time log.debug("Timer: %ds / %ds" % (timer, timeout)) if timer > timeout: break message = translation(32062) % available_providers if available_providers > 1 else translation(32063) p_dialog.update(int((total - available_providers) / total * 100), message=message) time.sleep(0.25) p_dialog.close() del p_dialog if available_providers > 0: message = u', '.join(provider_names) message = message + translation(32064) log.warning(message.encode('utf-8')) notify(message, ADDON_ICON) log.debug("all provider_results: %s" % repr(provider_results)) filtered_results = apply_filters(provider_results) log.debug("all filtered_results: %s" % repr(filtered_results)) log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2))) return filtered_results
def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from Quasar. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format Quasar expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general': payload = {'title': payload} global request_time global provider_names global provider_results global available_providers provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() providers = get_enabled_providers() if len(providers) == 0: notify(translation(32060), image=get_icon_path()) log.error("No providers enabled") return [] log.info( "Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers])) if get_setting("use_cloudhole", bool): clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key()) set_setting('clearance', clearance) set_setting('user_agent', user_agent) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif 'titles' not in payload or not payload['titles']: log.info("No translations available...") elif payload['titles'] and kodi_language not in payload['titles']: log.info("No '%s' translation available..." % kodi_language) p_dialog = xbmcgui.DialogProgressBG() p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061)) for provider in providers: available_providers += 1 provider_names.append(definitions[provider]['name']) task = Thread(target=run_provider, args=(provider, payload, method)) task.start() providers_time = time.time() total = float(available_providers) # Exit if all providers have returned results or timeout reached, check every 100ms while time.time() - providers_time < timeout and available_providers > 0: timer = time.time() - providers_time log.debug("Timer: %ds / %ds" % (timer, timeout)) if timer > timeout: break message = translation( 32062 ) % available_providers if available_providers > 1 else translation( 32063) p_dialog.update(int((total - available_providers) / total * 100), message=message) time.sleep(0.25) p_dialog.close() del p_dialog if available_providers > 0: message = u', '.join(provider_names) message = message + translation(32064) log.warning(message.encode('utf-8')) notify(message, ADDON_ICON) log.debug("all provider_results: %s" % repr(provider_results)) filtered_results = apply_filters(provider_results) log.debug("all filtered_results: %s" % repr(filtered_results)) log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2))) return filtered_results
def process_keywords(self, provider, text): """ Processes the query payload from a provider's keyword definitions Args: provider (str): Provider ID text (str): Keyword placeholders from definitions, ie. {title} Returns: str: Processed query keywords """ keywords = self.read_keywords(text) for keyword in keywords: keyword = keyword.lower() if 'title' in keyword: title = self.info["title"] language = definitions[provider]['language'] use_language = None if ':' in keyword: use_language = keyword.split(':')[1] if provider not in self.language_exceptions and \ (use_language or self.kodi_language) and \ 'titles' in self.info and self.info['titles']: try: if self.kodi_language and self.kodi_language in self.info[ 'titles']: use_language = self.kodi_language if use_language not in self.info['titles']: use_language = language if use_language in self.info['titles'] and self.info[ 'titles'][use_language]: title = self.info['titles'][use_language] title = self.normalize_name(title) log.info("[%s] Using translated '%s' title %s" % (provider, use_language, repr(title))) log.debug( "[%s] Translated titles from Quasar: %s" % (provider, repr(self.info['titles']))) except Exception as e: import traceback log.error("%s failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) text = text.replace('{%s}' % keyword, title) if 'year' in keyword: text = text.replace('{%s}' % keyword, str(self.info["year"])) if 'season' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411season": season = str(t411season(self.info['season'])) else: season = str(self.info["season"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') season = ('%%.%sd' % keys[1]) % self.info["season"] else: season = '%s' % self.info["season"] text = text.replace('{%s}' % keyword, season) if 'episode' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411episode": episode = str(t411episode(self.info['episode'])) else: episode = str(self.info["episode"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') episode = ('%%.%sd' % keys[1]) % self.info["episode"] else: episode = '%s' % self.info["episode"] text = text.replace('{%s}' % keyword, episode) return text
def process(provider, generator, filtering, verify_name=True, verify_size=True): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("execute_process for %s with %s" % (provider, repr(generator))) definition = definitions[provider] client = Client() token = None logged_in = False token_auth = False if get_setting("use_cloudhole", bool): client.clearance = get_setting('clearance') client.user_agent = get_setting('user_agent') if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra: url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) # MagnetDL fix... url_search = url_search.replace('FIRSTLETTER', query[:1]) # Creating the payload for POST method payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] # Creating the payload for GET method data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if token: log.info('[%s] Reusing existing token' % provider) url_search = url_search.replace('TOKEN', token) elif 'token' in definition: token_url = definition['base_url'] + definition['token'] log.debug("Getting token for %s at %s" % (provider, repr(token_url))) client.open(token_url.encode('utf-8')) try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token for %s' % (provider, repr(url_search))) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: token = token_data['token'] log.debug("Got token for %s: %s" % (provider, repr(token))) url_search = url_search.replace('TOKEN', token) time.sleep(2) else: log.warning('%s: Unable to get token for %s' % (provider, repr(url_search))) if logged_in: log.info("[%s] Reusing previous login" % provider) elif token_auth: log.info("[%s] Reusing previous token authorization" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider) password = get_setting('%s_password' % provider) passkey = get_setting('%s_passkey' % provider) if not username and not password and not passkey: for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider): for setting in ('username', 'password'): try: value = xbmcaddon.Addon(addon_name).getSetting( setting) set_setting('%s_%s' % (provider, setting), value) if setting == 'username': username = value if setting == 'password': password = value except: pass if passkey: logged_in = True client.passkey = passkey url_search = url_search.replace('PASSKEY', passkey) elif 'login_object' in definition and definition['login_object']: logged_in = False login_object = definition['login_object'].replace( 'USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password) # TODO generic flags in definitions for those... if provider == 'alphareign': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_name = re.search( r'name="csrf_name" value="(.*?)"', client.content) csrf_value = re.search( r'name="csrf_value" value="(.*?)"', client.content) if csrf_name and csrf_value: login_object = login_object.replace( "CSRF_NAME", '"%s"' % csrf_name.group(1)) login_object = login_object.replace( "CSRF_VALUE", '"%s"' % csrf_value.group(1)) else: logged_in = True if provider == 'hd-torrents': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_token = re.search( r'name="csrfToken" value="(.*?)"', client.content) if csrf_token: login_object = login_object.replace( 'CSRF_TOKEN', '"%s"' % csrf_token.group(1)) else: logged_in = True if 'token_auth' in definition: # log.debug("[%s] logging in with: %s" % (provider, login_object)) if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)): try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token from %s' % (provider, definition['token_auth'])) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: client.token = token_data['token'] log.debug("Auth token for %s: %s" % (provider, repr(client.token))) else: log.error('%s: Unable to get auth token for %s' % (provider, repr(url_search))) return filtering.results log.info('[%s] Token auth successful' % provider) token_auth = True else: log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content))) return filtering.results elif not logged_in and client.login( definition['root_url'] + definition['login_path'], eval(login_object), definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) return filtering.results if logged_in: if provider == 'hd-torrents': client.open(definition['root_url'] + '/torrents.php') csrf_token = re.search( r'name="csrfToken" value="(.*?)"', client.content) url_search = url_search.replace( "CSRF_TOKEN", csrf_token.group(1)) log.info("> %s search URL: %s" % (definition['name'].rjust(longest), url_search)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results
def process(provider, generator, filtering, has_special, verify_name=True, verify_size=True): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance has_special (bool): Whether title contains special chars verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("execute_process for %s with %s" % (provider, repr(generator))) definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) client = Client() logged_in = False if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if has_special: # Removing quotes, surrounding {title*} keywords, when title contains special chars query = re.sub("[\"']({title.*?})[\"']", '\\1', query) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) if 'charset' in definition and 'utf' not in definition[ 'charset'].lower(): try: query = urllib.quote(query.encode(definition['charset'])) extra = urllib.quote(extra.encode(definition['charset'])) except: pass log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra: url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) # Creating the payload for POST method payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] # Creating the payload for GET method data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if logged_in: log.info("[%s] Reusing previous login" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider) password = get_setting('%s_password' % provider) passkey = get_setting('%s_passkey' % provider) if 'login_object' in definition and definition['login_object']: logged_in = False login_object = definition['login_object'].replace( 'USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password) # TODO generic flags in definitions for those... if provider == 'lostfilm': client.open(definition['root_url'] + '/v_search.php?c=110&s=1&e=1') if client.content == 'log in first': pass else: log.info('[%s] Login successful' % provider) logged_in = True if not logged_in and client.login( definition['root_url'] + definition['login_path'], eval(login_object), definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) notify(translation(32089) % provider, image=get_icon_path()) return filtering.results if logged_in: if provider == 'lostfilm': log.info('[%s] Search lostfilm serial ID...', provider) url_search = fix_lf(url_search) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) search_info = re.search(r'rel="(.*?)">', client.content) if search_info: series_details = re.search('(\d+),(\d+),(\d+)', search_info.group(1)) client.open(definition['root_url'] + '/v_search.php?c=%s&s=%s&e=%s' % (series_details.group(1), series_details.group(2), series_details.group(3))) redirect_url = re.search(ur'url=(.*?)">', client.content) if redirect_url is not None: url_search = redirect_url.group(1) else: log.info('[%s] Not found ID in %s' % (provider, url_search)) return filtering.results log.info("> %s search URL: %s" % (definition['name'].rjust(longest), url_search)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results