def get_cloudhole_clearance(cloudhole_key): """ CloudHole clearance fetcher Args: cloudhole_key (str): The CloudHole API key saved in settings or from ``get_cloudhole_key`` directly Returns: tuple: A CloudHole clearance cookie and user-agent string """ user_agent = USER_AGENT clearance = None if cloudhole_key: try: r = urllib2.Request("https://cloudhole.herokuapp.com/clearances") r.add_header('Content-type', 'application/json') r.add_header('Authorization', cloudhole_key) with closing(urllib2.urlopen(r)) as response: content = response.read() log.debug("CloudHole returned: %s" % content) data = json.loads(content) user_agent = data[0]['userAgent'] clearance = data[0]['cookies'] log.info("New UA and clearance: %s / %s" % (user_agent, clearance)) except Exception as e: log.error("CloudHole error: %s" % repr(e)) return clearance, user_agent
def get_cloudhole_clearance(cloudhole_key): """ CloudHole clearance fetcher Args: cloudhole_key (str): The CloudHole API key saved in settings or from ``get_cloudhole_key`` directly Returns: tuple: A CloudHole clearance cookie and user-agent string """ user_agent = USER_AGENT clearance = None if cloudhole_key: try: r = urllib2.Request("https://cloudhole.herokuapp.com/clearances") r.add_header('Content-type', 'application/json') r.add_header('Authorization', cloudhole_key) with closing(urllib2.urlopen(r)) as response: content = response.read() log.debug("CloudHole returned: %s" % content) data = json.loads(content) user_agent = data[0]['userAgent'] clearance = data[0]['cookies'] log.info("New UA and clearance: %s / %s" % (user_agent, clearance)) except Exception as e: log.error("CloudHole error: %s" % repr(e)) return clearance, user_agent
def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey if get_setting("use_cloudhole", bool): subclient.clearance = get_setting('clearance') subclient.user_agent = get_setting('user_agent') uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret)
def use_anime(self, provider, info): """ Setup method to define anime search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] anime_query = definition['anime_query'] if definition[ 'anime_query'] else '' log.debug("Anime URL: %s%s" % (definition['base_url'], anime_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_episodes')) self.max_size = get_float(get_setting('max_size_episodes')) self.check_sizes() self.info = info self.url = u"%s%s" % (definition['base_url'], anime_query) if self.info['absolute_number']: self.info['episode'] = self.info['absolute_number'] if definition['anime_keywords']: self.queries = ["%s" % definition['anime_keywords']] self.extras = [ "%s" % definition['anime_extra'] if definition['anime_extra'] else '' ]
def _read_cookies(self, url=''): cookies_path = os.path.join(PATH_TEMP, 'burst') if not os.path.exists(cookies_path): try: os.makedirs(cookies_path) except Exception as e: log.debug("Error creating cookies directory: %s" % repr(e)) self._cookies_filename = os.path.join(cookies_path, urlparse(url).netloc + '_cookies.jar') if os.path.exists(self._cookies_filename): try: self._cookies.load(self._cookies_filename) except Exception as e: log.debug("Reading cookies error: %s" % repr(e)) # Check for cf_clearance cookie # https://github.com/scakemyer/cloudhole-api if self.clearance and not any(cookie.name == 'cf_clearance' for cookie in self._cookies): c = Cookie(version=None, name='cf_clearance', value=self.clearance[13:], port=None, port_specified=False, domain='.{uri.netloc}'.format(uri=urlparse(url)), domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None, rfc2109=False) self._cookies.set_cookie(c)
def got_results(provider, results): """ Results callback once a provider found all its results, or not Args: provider (str): The provider ID results (list): The list of results """ global provider_names global provider_results global available_providers definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) max_results = get_setting('max_results', int) if get_setting('sort_by_resolution', bool): log.debug("[%s][EXPEREMENTAL] Sorting by resolution before cutoff max_results" % provider) sorted_results = sorted(results, key=lambda r: (r['resolution']), reverse=True) else: sorted_results = sorted(results, key=lambda r: (r['seeds']), reverse=True) if get_setting('disable_max', bool): log.debug('[%s] Don\'t apply "max_results" settings' % provider) max_results = 999 elif len(sorted_results) > max_results: sorted_results = sorted_results[:max_results] log.info(">> %s returned %2d results in %.1f seconds%s" % ( definition['name'].rjust(longest), len(results), round(time.time() - request_time, 2), (", sending %d best ones" % max_results) if len(results) > max_results else "")) provider_results.extend(sorted_results) available_providers -= 1 if definition['name'] in provider_names: provider_names.remove(definition['name'])
def load_overrides(path, custom=False): """ Overrides loader for Python files Note: Overrides must be in an ``overrides`` dictionary. Args: path (str): Path to Python file to be loaded custom (bool): Boolean flag to specify if this is a custom overrides file """ try: if custom: sys.path.append(path) from overrides import overrides else: from burst_overrides import overrides if custom: log.debug("Imported overrides: %s", repr(overrides)) for provider in overrides: update_definitions(provider, overrides[provider]) if custom: log.info("Successfully loaded overrides from %s", os.path.join(path, "overrides.py")) except Exception as e: import traceback log.error("Failed importing %soverrides: %s", "custom " if custom else "", repr(e)) map(log.error, traceback.format_exc().split("\n"))
def use_episode(self, provider, payload): """ Setup method to define episode search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] show_query = definition['show_query'] if definition[ 'show_query'] else '' log.debug("Episode URL: %s%s" % (definition['base_url'], show_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_episodes')) self.max_size = get_float(get_setting('max_size_episodes')) self.check_sizes() self.info = payload self.url = u"%s%s" % (definition['base_url'], show_query) if definition['tv_keywords']: self.queries = ["%s" % definition['tv_keywords']] self.extras = [ "%s" % definition['tv_extra'] if definition['tv_extra'] else '' ] # TODO this sucks, tv_keywords should be a list from the start.. if definition['tv_keywords2']: self.queries.append(definition['tv_keywords2']) self.extras.append( definition['tv_extra2'] if definition['tv_extra2'] else '')
def use_season(self, provider, info): """ Setup method to define season search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] season_query = definition['season_query'] if definition[ 'season_query'] else '' log.debug("Season URL: %s%s" % (definition['base_url'], season_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_seasons')) self.max_size = get_float(get_setting('max_size_seasons')) self.check_sizes() self.info = info self.url = u"%s%s" % (definition['base_url'], season_query) if definition['season_keywords']: self.queries = ["%s" % definition['season_keywords']] self.extras = [ "%s" % definition['season_extra'] if definition['season_extra'] else '' ] if definition['season_keywords2']: self.queries.append("%s" % definition['season_keywords2']) self.extras.append("%s" % definition['season_extra2'] if definition['season_extra2'] else '')
def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret)
def run_provider(provider, payload, method): """ Provider thread entrypoint Args: provider (str): Provider ID payload (dict): Search payload from Quasar method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` """ log.debug("Processing %s with %s method" % (provider, method)) filterInstance = Filtering() if method == 'movie': filterInstance.use_movie(provider, payload) elif method == 'season': filterInstance.use_season(provider, payload) elif method == 'episode': filterInstance.use_episode(provider, payload) elif method == 'anime': filterInstance.use_anime(provider, payload) else: filterInstance.use_general(provider, payload) if 'is_api' in definitions[provider]: results = process(provider=provider, generator=extract_from_api, filtering=filterInstance) else: results = process(provider=provider, generator=extract_torrents, filtering=filterInstance) got_results(provider, results)
def run_provider(provider, payload, method): """ Provider thread entrypoint Args: provider (str): Provider ID payload (dict): Search payload from Quasar method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` """ log.debug("Processing %s with %s method" % (provider, method)) filterInstance = Filtering() if method == 'movie': filterInstance.use_movie(provider, payload) elif method == 'season': filterInstance.use_season(provider, payload) elif method == 'episode': filterInstance.use_episode(provider, payload) elif method == 'anime': filterInstance.use_anime(provider, payload) else: filterInstance.use_general(provider, payload) if 'is_api' in definitions[provider]: results = process(provider=provider, generator=extract_from_api, filtering=filterInstance) else: results = process(provider=provider, generator=extract_torrents, filtering=filterInstance) got_results(provider, results)
def information(self, provider): """ Debugging method to print keywords and file sizes """ log.debug('[%s] Accepted resolutions: %s' % (provider, self.resolutions_allow)) log.debug('[%s] Accepted release types: %s' % (provider, self.releases_allow)) log.debug('[%s] Blocked release types: %s' % (provider, self.releases_deny)) log.debug('[%s] Minimum size: %s' % (provider, str(self.min_size) + ' GB')) log.debug('[%s] Maximum size: %s' % (provider, str(self.max_size) + ' GB'))
def generate_payload(provider, generator, filtering, verify_name=True, verify_size=True): """ Payload formatter to format results the way Quasar expects them Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes Returns: list: Formatted results """ filtering.information(provider) results = [] definition = definitions[provider] for name, info_hash, uri, size, seeds, peers in generator: size = clean_size(size) # uri, info_hash = clean_magnet(uri, info_hash) v_name = name if verify_name else filtering.title v_size = size if verify_size else None if filtering.verify(provider, v_name, v_size): results.append({ "name": name, "uri": uri, "info_hash": info_hash, "size": size, "seeds": get_int(seeds), "peers": get_int(peers), "language": definition["language"] if 'language' in definition else 'en', "provider": '[COLOR %s]%s[/COLOR]' % (definition['color'], definition['name']), "icon": os.path.join(ADDON_PATH, 'burst', 'providers', 'icons', '%s.png' % provider), }) else: log.debug(filtering.reason.encode('utf-8')) log.debug('>>>>>> %s would send %d torrents to Quasar <<<<<<<' % (provider, len(results))) return results
def cleanup_results(results_list): """ Remove duplicate results, hash results without an info_hash, and sort by seeders Args: results_list (list): Results to clean-up Returns: list: De-duplicated, hashed and sorted results """ if len(results_list) == 0: return [] hashes = [] filtered_list = [] for result in results_list: if not result['seeds']: continue if not result['uri']: if not result['name']: continue try: log.warning('[%s] No URI for %s' % (result['provider'][16:-8], repr(result['name']))) except Exception as e: import traceback log.warning("%s logging failed with: %s" % (result['provider'], repr(e))) map(log.debug, traceback.format_exc().split("\n")) continue hash_ = result['info_hash'].upper() if not hash_: if result['uri'] and result['uri'].startswith('magnet'): hash_ = Magnet(result['uri']).info_hash.upper() else: hash_ = hashlib.md5(result['uri']).hexdigest() try: log.debug("[%s] Hash for %s: %s" % (result['provider'][16:-8], repr(result['name']), hash_)) except Exception as e: import traceback log.warning("%s logging failed with: %s" % (result['provider'], repr(e))) map(log.debug, traceback.format_exc().split("\n")) if not any(existing == hash_ for existing in hashes): filtered_list.append(result) hashes.append(hash_) return sorted(filtered_list, key=lambda r: (get_int(r['seeds'])), reverse=True)
def apply_filters(results_list): """ Applies final result de-duplicating, hashing and sorting Args: results_list (list): Formatted results in any order Returns: list: Filtered and sorted results """ results_list = cleanup_results(results_list) log.debug("Filtered results: %s" % repr(results_list)) return results_list
def information(self, provider): """ Debugging method to print keywords and file sizes """ log.debug('[%s] Accepted resolutions: %s' % (provider, self.resolutions_allow)) log.debug('[%s] Accepted release types: %s' % (provider, self.releases_allow)) log.debug('[%s] Blocked release types: %s' % (provider, self.releases_deny)) log.debug('[%s] Minimum size: %s' % (provider, str(self.min_size) + ' GB')) log.debug('[%s] Maximum size: %s' % (provider, str(self.max_size) + ' GB'))
def apply_filters(results_list): """ Applies final result de-duplicating, hashing and sorting Args: results_list (list): Formatted results in any order Returns: list: Filtered and sorted results """ results_list = cleanup_results(results_list) log.debug("Filtered results: %s" % repr(results_list)) return results_list
def cleanup_results(results_list): """ Remove duplicate results, hash results without an info_hash, and sort by seeders Args: results_list (list): Results to clean-up Returns: list: De-duplicated, hashed and sorted results """ if len(results_list) == 0: return [] hashes = [] filtered_list = [] for result in results_list: if not result['seeds']: continue if not result['uri']: if not result['name']: continue try: log.warning('[%s] No URI for %s' % (result['provider'][16:-8], repr(result['name']))) except Exception as e: import traceback log.warning("%s logging failed with: %s" % (result['provider'], repr(e))) map(log.debug, traceback.format_exc().split("\n")) continue hash_ = result['info_hash'].upper() if not hash_: if result['uri'] and result['uri'].startswith('magnet'): hash_ = Magnet(result['uri']).info_hash.upper() else: hash_ = hashlib.md5(result['uri']).hexdigest() try: log.debug("[%s] Hash for %s: %s" % (result['provider'][16:-8], repr(result['name']), hash_)) except Exception as e: import traceback log.warning("%s logging failed with: %s" % (result['provider'], repr(e))) map(log.debug, traceback.format_exc().split("\n")) if not any(existing == hash_ for existing in hashes): filtered_list.append(result) hashes.append(hash_) return sorted(filtered_list, key=lambda r: (get_int(r['seeds'])), reverse=True)
def use_general(self, provider, payload): """ Setup method to define general search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] general_query = definition['general_query'] if definition['general_query'] else '' log.debug("General URL: %s%s" % (definition['base_url'], general_query)) self.info = payload self.url = u"%s%s" % (definition['base_url'], general_query) if definition['general_keywords']: self.queries = [definition['general_keywords']] self.extras = [definition['general_extra']]
def _read_cookies(self, url=''): cookies_path = os.path.join(PATH_TEMP, 'nova') if not os.path.exists(cookies_path): try: os.makedirs(cookies_path) except Exception as e: log.debug("Error creating cookies directory: %s" % repr(e)) self._cookies_filename = os.path.join( cookies_path, urlparse(url).netloc + '_cookies.jar') if os.path.exists(self._cookies_filename): try: self._cookies.load(self._cookies_filename) except Exception as e: log.debug("Reading cookies error: %s" % repr(e))
def extract_from_page(provider, content): """ Sub-page extraction method Args: provider (str): Provider ID content (str): Page content from Client instance Returns: str: Torrent or magnet link extracted from sub-page """ definition = definitions[provider] matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content) if matches: result = matches[0] log.debug('[%s] Matched magnet link: %s' % (provider, repr(result))) return result matches = re.findall('http(.*?).torrent["\']', content) if matches: result = 'http' + matches[0] + '.torrent' result = result.replace('torcache.net', 'itorrents.org') log.debug('[%s] Matched torrent link: %s' % (provider, repr(result))) return result matches = re.findall('/download\?token=[A-Za-z0-9%]+', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with token: %s' % (provider, repr(result))) return result matches = re.findall('/telechargement/[a-z0-9-_.]+', content) # cpasbien if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched some french link: %s' % (provider, repr(result))) return result matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+', content) # t411 if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with an ID: %s' % (provider, repr(result))) return result return None
def use_general(self, provider, payload): """ Setup method to define general search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] general_query = definition['general_query'] if definition[ 'general_query'] else '' log.debug("General URL: %s%s" % (definition['base_url'], general_query)) self.info = payload self.url = u"%s%s" % (definition['base_url'], general_query) if definition['general_keywords']: self.queries = [definition['general_keywords']] self.extras = [definition['general_extra']]
def extract_from_page(provider, content): """ Sub-page extraction method Args: provider (str): Provider ID content (str): Page content from Client instance Returns: str: Torrent or magnet link extracted from sub-page """ definition = definitions[provider] matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content) if matches: result = matches[0] log.debug('[%s] Matched magnet link: %s' % (provider, repr(result))) return result matches = re.findall('http(.*?).torrent["\']', content) if matches: result = 'http' + matches[0] + '.torrent' result = result.replace('torcache.net', 'itorrents.org') log.debug('[%s] Matched torrent link: %s' % (provider, repr(result))) return result matches = re.findall('/download\?token=[A-Za-z0-9%]+', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with token: %s' % (provider, repr(result))) return result matches = re.findall('/telechargement/[a-z0-9-_.]+', content) # cpasbien if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched some french link: %s' % (provider, repr(result))) return result matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+', content) # t411 if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with an ID: %s' % (provider, repr(result))) return result return None
def use_movie(self, provider, payload): """ Setup method to define movie search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] movie_query = definition['movie_query'] if definition['movie_query'] else '' log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_movies')) self.max_size = get_float(get_setting('max_size_movies')) self.check_sizes() self.info = payload self.url = u"%s%s" % (definition['base_url'], movie_query) if definition['movie_keywords']: self.queries = ["%s" % definition['movie_keywords']] self.extras = ["%s" % definition['movie_extra']]
def use_movie(self, provider, payload): """ Setup method to define movie search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] movie_query = definition['movie_query'] if definition[ 'movie_query'] else '' log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_movies')) self.max_size = get_float(get_setting('max_size_movies')) self.check_sizes() self.info = payload self.url = u"%s%s" % (definition['base_url'], movie_query) if definition['movie_keywords']: self.queries = ["%s" % definition['movie_keywords']] self.extras = ["%s" % definition['movie_extra']]
def use_general(self, provider, payload): """ Setup method to define general search parameters Args: provider (str): Provider ID payload (dict): Elementum search payload """ definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) if get_setting("use_opennic_dns", bool) and "opennic_dns_alias" in definition: definition = get_alias(definition, definition["opennic_dns_alias"]) general_query = definition['general_query'] if definition[ 'general_query'] else '' log.debug("General URL: %s%s" % (definition['base_url'], general_query)) self.info = payload self.url = u"%s%s" % (definition['base_url'], general_query) if definition['general_keywords']: self.queries = [definition['general_keywords']] self.extras = [definition['general_extra']]
def use_anime(self, provider, info): """ Setup method to define anime search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] anime_query = definition['anime_query'] if definition['anime_query'] else '' log.debug("Anime URL: %s%s" % (definition['base_url'], anime_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_episodes')) self.max_size = get_float(get_setting('max_size_episodes')) self.check_sizes() self.info = info self.url = u"%s%s" % (definition['base_url'], anime_query) if self.info['absolute_number']: self.info['episode'] = self.info['absolute_number'] if definition['anime_keywords']: self.queries = ["%s" % definition['anime_keywords']] self.extras = ["%s" % definition['anime_extra'] if definition['anime_extra'] else '']
def generate_payload(provider, generator, filtering, verify_name=True, verify_size=True): """ Payload formatter to format results the way Quasar expects them Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes Returns: list: Formatted results """ filtering.information(provider) results = [] definition = definitions[provider] for name, info_hash, uri, size, seeds, peers in generator: size = clean_size(size) # uri, info_hash = clean_magnet(uri, info_hash) v_name = name if verify_name else filtering.title v_size = size if verify_size else None if filtering.verify(provider, v_name, v_size): results.append({"name": name, "uri": uri, "info_hash": info_hash, "size": size, "seeds": get_int(seeds), "peers": get_int(peers), "language": definition["language"] if 'language' in definition else 'en', "provider": '[COLOR %s]%s[/COLOR]' % (definition['color'], definition['name']), "icon": os.path.join(ADDON_PATH, 'burst', 'providers', 'icons', '%s.png' % provider), }) else: log.debug(filtering.reason.encode('utf-8')) log.debug('>>>>>> %s would send %d torrents to Quasar <<<<<<<' % (provider, len(results))) return results
def use_season(self, provider, info): """ Setup method to define season search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] season_query = definition['season_query'] if definition['season_query'] else '' log.debug("Season URL: %s%s" % (definition['base_url'], season_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_seasons')) self.max_size = get_float(get_setting('max_size_seasons')) self.check_sizes() self.info = info self.url = u"%s%s" % (definition['base_url'], season_query) if definition['season_keywords']: self.queries = ["%s" % definition['season_keywords']] self.extras = ["%s" % definition['season_extra'] if definition['season_extra'] else ''] if definition['season_keywords2']: self.queries.append("%s" % definition['season_keywords2']) self.extras.append("%s" % definition['season_extra2'] if definition['season_extra2'] else '')
def _read_cookies(self, url=''): cookies_path = os.path.join(PATH_TEMP, 'burst') if not os.path.exists(cookies_path): try: os.makedirs(cookies_path) except Exception as e: log.debug("Error creating cookies directory: %s" % repr(e)) self._cookies_filename = os.path.join( cookies_path, urlparse(url).netloc + '_cookies.jar') if os.path.exists(self._cookies_filename): try: self._cookies.load(self._cookies_filename) except Exception as e: log.debug("Reading cookies error: %s" % repr(e)) # Check for cf_clearance cookie # https://github.com/scakemyer/cloudhole-api if self.clearance and not any(cookie.name == 'cf_clearance' for cookie in self._cookies): c = Cookie(version=None, name='cf_clearance', value=self.clearance[13:], port=None, port_specified=False, domain='.{uri.netloc}'.format(uri=urlparse(url)), domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest=None, rfc2109=False) self._cookies.set_cookie(c)
def use_episode(self, provider, payload): """ Setup method to define episode search parameters Args: provider (str): Provider ID payload (dict): Quasar search payload """ definition = definitions[provider] show_query = definition['show_query'] if definition['show_query'] else '' log.debug("Episode URL: %s%s" % (definition['base_url'], show_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_episodes')) self.max_size = get_float(get_setting('max_size_episodes')) self.check_sizes() self.info = payload self.url = u"%s%s" % (definition['base_url'], show_query) if definition['tv_keywords']: self.queries = ["%s" % definition['tv_keywords']] self.extras = ["%s" % definition['tv_extra'] if definition['tv_extra'] else ''] # TODO this sucks, tv_keywords should be a list from the start.. if definition['tv_keywords2']: self.queries.append(definition['tv_keywords2']) self.extras.append(definition['tv_extra2'] if definition['tv_extra2'] else '')
def use_movie(self, provider, payload): """ Setup method to define movie search parameters Args: provider (str): Provider ID payload (dict): Elementum search payload """ definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) if get_setting("use_opennic_dns", bool) and "opennic_dns_alias" in definition: definition = get_alias(definition, definition["opennic_dns_alias"]) movie_query = definition['movie_query'] if definition[ 'movie_query'] else '' log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query)) if get_setting('separate_sizes', bool): self.min_size = get_float(get_setting('min_size_movies')) self.max_size = get_float(get_setting('max_size_movies')) self.check_sizes() self.info = payload self.url = u"%s%s" % (definition['base_url'], movie_query) if definition['movie_keywords']: self.queries = ["%s" % definition['movie_keywords']] self.extras = ["%s" % definition['movie_extra']]
def process(provider, generator, filtering, verify_name=True, verify_size=True): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("execute_process for %s with %s" % (provider, repr(generator))) definition = definitions[provider] client = Client() token = None logged_in = False token_auth = False if get_setting("use_cloudhole", bool): client.clearance = get_setting('clearance') client.user_agent = get_setting('user_agent') if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra: url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) # MagnetDL fix... url_search = url_search.replace('FIRSTLETTER', query[:1]) # Creating the payload for POST method payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] # Creating the payload for GET method data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if token: log.info('[%s] Reusing existing token' % provider) url_search = url_search.replace('TOKEN', token) elif 'token' in definition: token_url = definition['base_url'] + definition['token'] log.debug("Getting token for %s at %s" % (provider, repr(token_url))) client.open(token_url.encode('utf-8')) try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token for %s' % (provider, repr(url_search))) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: token = token_data['token'] log.debug("Got token for %s: %s" % (provider, repr(token))) url_search = url_search.replace('TOKEN', token) else: log.warning('%s: Unable to get token for %s' % (provider, repr(url_search))) if logged_in: log.info("[%s] Reusing previous login" % provider) elif token_auth: log.info("[%s] Reusing previous token authorization" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider) password = get_setting('%s_password' % provider) passkey = get_setting('%s_passkey' % provider) if not username and not password and not passkey: for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider): for setting in ('username', 'password'): try: value = xbmcaddon.Addon(addon_name).getSetting(setting) set_setting('%s_%s' % (provider, setting), value) if setting == 'username': username = value if setting == 'password': password = value except: pass if passkey: logged_in = True client.passkey = passkey url_search = url_search.replace('PASSKEY', passkey) elif 'login_object' in definition and definition['login_object']: logged_in = False login_object = definition['login_object'].replace('USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password) # TODO generic flags in definitions for those... if provider == 'alphareign': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_name = re.search(r'name="csrf_name" value="(.*?)"', client.content) csrf_value = re.search(r'name="csrf_value" value="(.*?)"', client.content) if csrf_name and csrf_value: login_object = login_object.replace("CSRF_NAME", '"%s"' % csrf_name.group(1)) login_object = login_object.replace("CSRF_VALUE", '"%s"' % csrf_value.group(1)) else: logged_in = True if provider == 'hd-torrents': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content) if csrf_token: login_object = login_object.replace('CSRF_TOKEN', '"%s"' % csrf_token.group(1)) else: logged_in = True if 'token_auth' in definition: # log.debug("[%s] logging in with: %s" % (provider, login_object)) if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)): try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token from %s' % (provider, definition['token_auth'])) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: client.token = token_data['token'] log.debug("Auth token for %s: %s" % (provider, repr(client.token))) else: log.error('%s: Unable to get auth token for %s' % (provider, repr(url_search))) return filtering.results log.info('[%s] Token auth successful' % provider) token_auth = True else: log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content))) return filtering.results elif not logged_in and client.login(definition['root_url'] + definition['login_path'], eval(login_object), definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) return filtering.results if logged_in: if provider == 'hd-torrents': client.open(definition['root_url'] + '/torrents.php') csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content) url_search = url_search.replace("CSRF_TOKEN", csrf_token.group(1)) log.info("> %s search URL: %s" % (definition['name'].rjust(longest), url_search)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results
def extract_torrents(provider, client): """ Main torrent extraction generator for non-API based providers Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ definition = definitions[provider] log.debug("Extracting torrents from %s using definitions: %s" % (provider, repr(definition))) if not client.content: raise StopIteration dom = Html().feed(client.content) row_search = "dom." + definition['parser']['row'] name_search = definition['parser']['name'] torrent_search = definition['parser']['torrent'] info_hash_search = definition['parser']['infohash'] size_search = definition['parser']['size'] seeds_search = definition['parser']['seeds'] peers_search = definition['parser']['peers'] log.debug("[%s] Parser: %s" % (provider, repr(definition['parser']))) q = Queue() threads = [] needs_subpage = 'subpage' in definition and definition['subpage'] if needs_subpage: def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey if get_setting("use_cloudhole", bool): subclient.clearance = get_setting('clearance') subclient.user_agent = get_setting('user_agent') uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret) if not dom: raise StopIteration for item in eval(row_search): if not item: continue name = eval(name_search) torrent = eval(torrent_search) if torrent_search else "" size = eval(size_search) if size_search else "" seeds = eval(seeds_search) if seeds_search else "" peers = eval(peers_search) if peers_search else "" info_hash = eval(info_hash_search) if info_hash_search else "" # Pass client cookies with torrent if private if (definition['private'] or get_setting("use_cloudhole", bool)) and not torrent.startswith('magnet'): user_agent = USER_AGENT if get_setting("use_cloudhole", bool): user_agent = get_setting("user_agent") if client.passkey: torrent = torrent.replace('PASSKEY', client.passkey) elif client.token: headers = {'Authorization': client.token, 'User-Agent': user_agent} log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) else: log.debug("[%s] Cookies: %s" % (provider, repr(client.cookies()))) parsed_url = urlparse(definition['root_url']) cookie_domain = '{uri.netloc}'.format(uri=parsed_url).replace('www.', '') cookies = [] log.debug("[%s] cookie_domain: %s" % (provider, cookie_domain)) for cookie in client._cookies: log.debug("[%s] cookie for domain: %s (%s=%s)" % (provider, cookie.domain, cookie.name, cookie.value)) if cookie_domain in cookie.domain: cookies.append(cookie) if cookies: headers = {'Cookie': ";".join(["%s=%s" % (c.name, c.value) for c in cookies]), 'User-Agent': user_agent} log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if name and torrent and needs_subpage: if not torrent.startswith('http'): torrent = definition['root_url'] + torrent.encode('utf-8') t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash)) threads.append(t) else: yield (name, info_hash, torrent, size, seeds, peers) if needs_subpage: log.debug("[%s] Starting subpage threads..." % provider) for t in threads: t.start() for t in threads: t.join() log.debug("[%s] Threads returned: %s" % (provider, repr(threads))) for i in range(q.qsize()): ret = q.get_nowait() log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret))) yield ret
def open(self, url, language='en', post_data=None, get_data=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if not post_data: post_data = {} if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) result = False data = urlencode(post_data) if len(post_data) > 0 else None req = urllib2.Request(url, data) self._read_cookies(url) log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(self._cookies)) req.add_header('User-Agent', self.user_agent) req.add_header('Content-Language', language) req.add_header("Accept-Encoding", "gzip") req.add_header("Origin", url) req.add_header("Referer", url) try: self._good_spider() with closing(opener.open(req)) as response: self.headers = response.headers self._save_cookies() if response.headers.get("Content-Encoding", "") == "gzip": import zlib self.content = zlib.decompressobj( 16 + zlib.MAX_WBITS).decompress(response.read()) else: self.content = response.read() charset = response.headers.getparam('charset') if not charset: match = re.search( """<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"']*([^\s"'/>]*)""", self.content) if match: charset = match.group(1) if charset and charset.lower() == 'utf-8': charset = 'utf-8-sig' # Changing to utf-8-sig to remove BOM if found on decode from utf-8 if charset: log.debug('Decoding charset from %s for %s' % (charset, repr(url))) self.content = self.content.decode(charset, 'replace') self.status = response.getcode() result = True except urllib2.HTTPError as e: self.status = e.code log.warning("Status for %s : %s" % (repr(url), str(self.status))) except urllib2.URLError as e: self.status = repr(e.reason) log.warning("Status for %s : %s" % (repr(url), self.status)) except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return result
provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() timeout = get_setting("timeout", int) auto_timeout = get_setting("auto_timeout", bool) if auto_timeout: quasar_addon = xbmcaddon.Addon(id='plugin.video.quasar') if quasar_addon: if quasar_addon.getSetting( 'custom_provider_timeout_enabled') == "true": timeout = int( quasar_addon.getSetting('custom_provider_timeout')) - 2 log.debug("Using timeout from Quasar: %d seconds" % (timeout)) else: timeout = 9 def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from Quasar. method (str): Type of search, can be ``general``, ``movie``, ``episode``, ``season`` or ``anime`` Returns: list: All filtered results in the format Quasar expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload)))
def extract_from_page(provider, content): """ Sub-page extraction method Args: provider (str): Provider ID content (str): Page content from Client instance Returns: str: Torrent or magnet link extracted from sub-page """ definition = definitions[provider] try: matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content) if matches: result = matches[0] log.debug('[%s] Matched magnet link: %s' % (provider, repr(result))) return result matches = re.findall('http(.*?).torrent["\']', content) if matches: result = 'http' + matches[0] + '.torrent' result = result.replace('torcache.net', 'itorrents.org') log.debug('[%s] Matched torrent link: %s' % (provider, repr(result))) return result matches = re.findall('/download\?token=[A-Za-z0-9%]+', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with token: %s' % (provider, repr(result))) return result matches = re.findall('"(/download/[A-Za-z0-9]+)"', content) if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link: %s' % (provider, repr(result))) return result matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+', content) # t411 if matches: result = definition['root_url'] + matches[0] log.debug('[%s] Matched download link with an ID: %s' % (provider, repr(result))) return result matches = re.findall('\: ([A-Fa-f0-9]{40})', content) if matches: result = "magnet:?xt=urn:btih:" + matches[0] log.debug('[%s] Matched magnet info_hash search: %s' % (provider, repr(result))) return result except: pass return None
def extract_torrents(provider, client): """ Main torrent extraction generator for non-API based providers Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ definition = definitions[provider] log.debug("Extracting torrents from %s using definitions: %s" % (provider, repr(definition))) if not client.content: raise StopIteration dom = Html().feed(client.content) row_search = "dom." + definition['parser']['row'] name_search = definition['parser']['name'] torrent_search = definition['parser']['torrent'] info_hash_search = definition['parser']['infohash'] size_search = definition['parser']['size'] seeds_search = definition['parser']['seeds'] peers_search = definition['parser']['peers'] log.debug("[%s] Parser: %s" % (provider, repr(definition['parser']))) q = Queue() threads = [] needs_subpage = 'subpage' in definition and definition['subpage'] if needs_subpage: def extract_subpage(q, name, torrent, size, seeds, peers, info_hash): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey if get_setting("use_cloudhole", bool): subclient.clearance = get_setting('clearance') subclient.user_agent = get_setting('user_agent') uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len( uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error( "[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret) if not dom: raise StopIteration for item in eval(row_search): if not item: continue name = eval(name_search) torrent = eval(torrent_search) if torrent_search else "" size = eval(size_search) if size_search else "" seeds = eval(seeds_search) if seeds_search else "" peers = eval(peers_search) if peers_search else "" info_hash = eval(info_hash_search) if info_hash_search else "" # Pass client cookies with torrent if private if (definition['private'] or get_setting( "use_cloudhole", bool)) and not torrent.startswith('magnet'): user_agent = USER_AGENT if get_setting("use_cloudhole", bool): user_agent = get_setting("user_agent") if client.passkey: torrent = torrent.replace('PASSKEY', client.passkey) elif client.token: headers = { 'Authorization': client.token, 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) else: log.debug("[%s] Cookies: %s" % (provider, repr(client.cookies()))) parsed_url = urlparse(definition['root_url']) cookie_domain = '{uri.netloc}'.format(uri=parsed_url).replace( 'www.', '') cookies = [] log.debug("[%s] cookie_domain: %s" % (provider, cookie_domain)) for cookie in client._cookies: log.debug( "[%s] cookie for domain: %s (%s=%s)" % (provider, cookie.domain, cookie.name, cookie.value)) if cookie_domain in cookie.domain: cookies.append(cookie) if cookies: headers = { 'Cookie': ";".join( ["%s=%s" % (c.name, c.value) for c in cookies]), 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if name and torrent and needs_subpage: if not torrent.startswith('http'): torrent = definition['root_url'] + torrent.encode('utf-8') t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash)) threads.append(t) else: yield (name, info_hash, torrent, size, seeds, peers) if needs_subpage: log.debug("[%s] Starting subpage threads..." % provider) for t in threads: t.start() for t in threads: t.join() log.debug("[%s] Threads returned: %s" % (provider, repr(threads))) for i in range(q.qsize()): ret = q.get_nowait() log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret))) yield ret
def _save_cookies(self): try: self._cookies.save(self._cookies_filename) except Exception as e: log.debug("Saving cookies error: %s" % repr(e))
def process_keywords(self, provider, text): """ Processes the query payload from a provider's keyword definitions Args: provider (str): Provider ID text (str): Keyword placeholders from definitions, ie. {title} Returns: str: Processed query keywords """ keywords = self.read_keywords(text) for keyword in keywords: keyword = keyword.lower() if 'title' in keyword: title = self.info["title"] language = definitions[provider]['language'] use_language = None if ':' in keyword: use_language = keyword.split(':')[1] if provider not in self.language_exceptions and \ (use_language or self.kodi_language) and \ 'titles' in self.info and self.info['titles']: try: if self.kodi_language and self.kodi_language in self.info[ 'titles']: use_language = self.kodi_language if use_language not in self.info['titles']: use_language = language if use_language in self.info['titles'] and self.info[ 'titles'][use_language]: title = self.info['titles'][use_language] title = self.normalize_name(title) log.info("[%s] Using translated '%s' title %s" % (provider, use_language, repr(title))) log.debug( "[%s] Translated titles from Quasar: %s" % (provider, repr(self.info['titles']))) except Exception as e: import traceback log.error("%s failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) text = text.replace('{%s}' % keyword, title) if 'year' in keyword: text = text.replace('{%s}' % keyword, str(self.info["year"])) if 'season' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411season": season = str(t411season(self.info['season'])) else: season = str(self.info["season"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') season = ('%%.%sd' % keys[1]) % self.info["season"] else: season = '%s' % self.info["season"] text = text.replace('{%s}' % keyword, season) if 'episode' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411episode": episode = str(t411episode(self.info['episode'])) else: episode = str(self.info["episode"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') episode = ('%%.%sd' % keys[1]) % self.info["episode"] else: episode = '%s' % self.info["episode"] text = text.replace('{%s}' % keyword, episode) return text
def open(self, url, language='en', post_data=None, get_data=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if not post_data: post_data = {} if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) result = False data = urlencode(post_data) if len(post_data) > 0 else None req = urllib2.Request(url, data) self._read_cookies(url) log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cookies)) req.add_header('User-Agent', self.user_agent) req.add_header('Content-Language', language) req.add_header("Accept-Encoding", "gzip") if self.token: req.add_header("Authorization", self.token) try: self._good_spider() with closing(opener.open(req)) as response: self.headers = response.headers self._save_cookies() if response.headers.get("Content-Encoding", "") == "gzip": import zlib self.content = zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(response.read()) else: self.content = response.read() charset = response.headers.getparam('charset') if not charset: match = re.search("""<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"']*([^\s"'/>]*)""", self.content) if match: charset = match.group(1) if charset and charset.lower() == 'utf-8': charset = 'utf-8-sig' # Changing to utf-8-sig to remove BOM if found on decode from utf-8 if charset: log.debug('Decoding charset from %s for %s' % (charset, repr(url))) self.content = self.content.decode(charset, 'replace') self.status = response.getcode() result = True except urllib2.HTTPError as e: self.status = e.code log.warning("Status for %s : %s" % (repr(url), str(self.status))) if e.code == 403 or e.code == 503: log.warning("CloudFlared at %s, try enabling CloudHole" % url) except urllib2.URLError as e: self.status = repr(e.reason) log.warning("Status for %s : %s" % (repr(url), self.status)) except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return result
def _save_cookies(self): try: self._cookies.save(self._cookies_filename) except Exception as e: log.debug("Saving cookies error: %s" % repr(e))
def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from Quasar. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format Quasar expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general': payload = {'title': payload} global request_time global provider_names global provider_results global available_providers provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() providers = get_enabled_providers() if len(providers) == 0: notify(translation(32060), image=get_icon_path()) log.error("No providers enabled") return [] log.info( "Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers])) if get_setting("use_cloudhole", bool): clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key()) set_setting('clearance', clearance) set_setting('user_agent', user_agent) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif 'titles' not in payload or not payload['titles']: log.info("No translations available...") elif payload['titles'] and kodi_language not in payload['titles']: log.info("No '%s' translation available..." % kodi_language) p_dialog = xbmcgui.DialogProgressBG() p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061)) for provider in providers: available_providers += 1 provider_names.append(definitions[provider]['name']) task = Thread(target=run_provider, args=(provider, payload, method)) task.start() providers_time = time.time() total = float(available_providers) # Exit if all providers have returned results or timeout reached, check every 100ms while time.time() - providers_time < timeout and available_providers > 0: timer = time.time() - providers_time log.debug("Timer: %ds / %ds" % (timer, timeout)) if timer > timeout: break message = translation( 32062 ) % available_providers if available_providers > 1 else translation( 32063) p_dialog.update(int((total - available_providers) / total * 100), message=message) time.sleep(0.25) p_dialog.close() del p_dialog if available_providers > 0: message = u', '.join(provider_names) message = message + translation(32064) log.warning(message.encode('utf-8')) notify(message, ADDON_ICON) log.debug("all provider_results: %s" % repr(provider_results)) filtered_results = apply_filters(provider_results) log.debug("all filtered_results: %s" % repr(filtered_results)) log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2))) return filtered_results
def extract_from_api(provider, client): """ Main API parsing generator for API-based providers An almost clever API parser, mostly just for YTS, RARBG and T411 Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ try: data = json.loads(client.content) except: data = [] log.debug("[%s] JSON response from API: %s" % (provider, repr(data))) definition = definitions[provider] api_format = definition['api_format'] results = [] result_keys = api_format['results'].split('.') log.debug("%s result_keys: %s" % (provider, repr(result_keys))) for key in result_keys: if key in data: data = data[key] else: data = [] # log.debug("%s nested results: %s" % (provider, repr(data))) results = data log.debug("%s results: %s" % (provider, repr(results))) if 'subresults' in api_format: from copy import deepcopy for result in results: # A little too specific to YTS but who cares... result['name'] = result[api_format['name']] subresults = [] subresults_keys = api_format['subresults'].split('.') for key in subresults_keys: for result in results: if key in result: for subresult in result[key]: sub = deepcopy(result) sub.update(subresult) subresults.append(sub) results = subresults log.debug("%s with subresults: %s" % (provider, repr(results))) for result in results: if not result or not isinstance(result, dict): continue name = '' info_hash = '' torrent = '' size = '' seeds = '' peers = '' if 'name' in api_format: name = result[api_format['name']] if 'torrent' in api_format: torrent = result[api_format['torrent']] if 'download_path' in definition: torrent = definition['base_url'] + definition[ 'download_path'] + torrent if client.token: user_agent = USER_AGENT if get_setting("use_cloudhole", bool): user_agent = get_setting("user_agent") headers = { 'Authorization': client.token, 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if 'info_hash' in api_format: info_hash = result[api_format['info_hash']] if 'quality' in api_format: # Again quite specific to YTS... name = "%s - %s" % (name, result[api_format['quality']]) if 'size' in api_format: size = result[api_format['size']] if type(size) in (long, int): size = sizeof(size) elif type(size) in (str, unicode) and size.isdigit(): size = sizeof(int(size)) if 'seeds' in api_format: seeds = result[api_format['seeds']] if type(seeds) in (str, unicode) and seeds.isdigit(): seeds = int(seeds) if 'peers' in api_format: peers = result[api_format['peers']] if type(peers) in (str, unicode) and peers.isdigit(): peers = int(peers) yield (name, info_hash, torrent, size, seeds, peers)
def process_keywords(self, provider, text): """ Processes the query payload from a provider's keyword definitions Args: provider (str): Provider ID text (str): Keyword placeholders from definitions, ie. {title} Returns: str: Processed query keywords """ keywords = self.read_keywords(text) for keyword in keywords: keyword = keyword.lower() if 'title' in keyword: title = self.info["title"] language = definitions[provider]['language'] use_language = None if ':' in keyword: use_language = keyword.split(':')[1] if provider not in self.language_exceptions and \ (use_language or self.kodi_language) and \ 'titles' in self.info and self.info['titles']: try: if self.kodi_language and self.kodi_language in self.info['titles']: use_language = self.kodi_language if use_language not in self.info['titles']: use_language = language if use_language in self.info['titles'] and self.info['titles'][use_language]: title = self.info['titles'][use_language] title = self.normalize_name(title) log.info("[%s] Using translated '%s' title %s" % (provider, use_language, repr(title))) log.debug("[%s] Translated titles from Quasar: %s" % (provider, repr(self.info['titles']))) except Exception as e: import traceback log.error("%s failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) text = text.replace('{%s}' % keyword, title) if 'year' in keyword: text = text.replace('{%s}' % keyword, str(self.info["year"])) if 'season' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411season": season = str(t411season(self.info['season'])) else: season = str(self.info["season"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') season = ('%%.%sd' % keys[1]) % self.info["season"] else: season = '%s' % self.info["season"] text = text.replace('{%s}' % keyword, season) if 'episode' in keyword: if '+' in keyword: keys = keyword.split('+') if keys[1] == "t411episode": episode = str(t411episode(self.info['episode'])) else: episode = str(self.info["episode"] + get_int(keys[1])) elif ':' in keyword: keys = keyword.split(':') episode = ('%%.%sd' % keys[1]) % self.info["episode"] else: episode = '%s' % self.info["episode"] text = text.replace('{%s}' % keyword, episode) return text
def search(payload, method="general"): """ Main search entrypoint Args: payload (dict): Search payload from Quasar. method (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime`` Returns: list: All filtered results in the format Quasar expects """ log.debug("Searching with payload (%s): %s" % (method, repr(payload))) if method == 'general': payload = { 'title': payload } global request_time global provider_names global provider_results global available_providers provider_names = [] provider_results = [] available_providers = 0 request_time = time.time() providers = get_enabled_providers() if len(providers) == 0: notify(translation(32060), image=get_icon_path()) log.error("No providers enabled") return [] log.info("Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers])) if get_setting("use_cloudhole", bool): clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key()) set_setting('clearance', clearance) set_setting('user_agent', user_agent) if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if not kodi_language: log.warning("Kodi returned empty language code...") elif 'titles' not in payload or not payload['titles']: log.info("No translations available...") elif payload['titles'] and kodi_language not in payload['titles']: log.info("No '%s' translation available..." % kodi_language) p_dialog = xbmcgui.DialogProgressBG() p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061)) for provider in providers: available_providers += 1 provider_names.append(definitions[provider]['name']) task = Thread(target=run_provider, args=(provider, payload, method)) task.start() providers_time = time.time() total = float(available_providers) # Exit if all providers have returned results or timeout reached, check every 100ms while time.time() - providers_time < timeout and available_providers > 0: timer = time.time() - providers_time log.debug("Timer: %ds / %ds" % (timer, timeout)) if timer > timeout: break message = translation(32062) % available_providers if available_providers > 1 else translation(32063) p_dialog.update(int((total - available_providers) / total * 100), message=message) time.sleep(0.25) p_dialog.close() del p_dialog if available_providers > 0: message = u', '.join(provider_names) message = message + translation(32064) log.warning(message.encode('utf-8')) notify(message, ADDON_ICON) log.debug("all provider_results: %s" % repr(provider_results)) filtered_results = apply_filters(provider_results) log.debug("all filtered_results: %s" % repr(filtered_results)) log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2))) return filtered_results
def extract_torrents(provider, client): """ Main torrent extraction generator for non-API based providers Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ definition = definitions[provider] log.debug("Extracting torrents from %s using definitions: %s" % (provider, repr(definition))) if not client.content: raise StopIteration dom = Html().feed(client.content) row_search = get_parser(definition, "row") name_search = get_parser(definition, "name") torrent_search = get_parser(definition, "torrent") info_hash_search = get_parser(definition, "infohash") size_search = get_parser(definition, "size") seeds_search = get_parser(definition, "seeds") peers_search = get_parser(definition, "peers") referer_search = get_parser(definition, "referer") log.debug("[%s] Parser: %s" % (provider, repr(definition['parser']))) q = Queue() threads = [] needs_subpage = 'subpage' in definition and definition['subpage'] if needs_subpage: def extract_subpage(q, name, torrent, size, seeds, peers, info_hash, referer): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: import traceback log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e))) map(log.debug, traceback.format_exc().split("\n")) # New client instance, otherwise it's race conditions all over the place subclient = Client() subclient.passkey = client.passkey headers = {} if "subpage_mode" in definition: if definition["subpage_mode"] == "xhr": headers['X-Requested-With'] = 'XMLHttpRequest' headers['Content-Language'] = '' if referer: headers['Referer'] = referer subclient.headers = headers uri = torrent.split('|') # Split cookies for private trackers subclient.open(uri[0].encode('utf-8')) if 'bittorrent' in subclient.headers.get('content-type', ''): log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent))) if len(uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) else: try: torrent = extract_from_page(provider, subclient.content) if torrent and not torrent.startswith('magnet') and len( uri) > 1: # Stick back cookies if needed torrent = '%s|%s' % (torrent, uri[1]) except Exception as e: import traceback log.error( "[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e))) map(log.debug, traceback.format_exc().split("\n")) ret = (name, info_hash, torrent, size, seeds, peers) q.put_nowait(ret) if not dom: raise StopIteration for item in eval(row_search): if not item: continue name = eval(name_search) if name_search else "" torrent = eval(torrent_search) if torrent_search else "" size = eval(size_search) if size_search else "" seeds = eval(seeds_search) if seeds_search else "" peers = eval(peers_search) if peers_search else "" info_hash = eval(info_hash_search) if info_hash_search else "" referer = eval(referer_search) if referer_search else "" if 'magnet:?' in torrent: torrent = torrent[torrent.find('magnet:?'):] # Pass client cookies with torrent if private if definition['private'] and not torrent.startswith('magnet'): user_agent = USER_AGENT if client.passkey: torrent = torrent.replace('PASSKEY', client.passkey) elif client.token: headers = { 'Authorization': client.token, 'User-Agent': user_agent } log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) else: log.debug("[%s] Cookies: %s" % (provider, repr(client.cookies()))) parsed_url = urlparse(definition['root_url']) cookie_domain = '{uri.netloc}'.format(uri=parsed_url) cookie_domain = re.sub('www\d*\.', '', cookie_domain) cookies = [] for cookie in client._cookies: if cookie_domain in cookie.domain: cookies.append(cookie) headers = {'User-Agent': user_agent} if cookies: headers['Cookie'] = ";".join( ["%s=%s" % (c.name, c.value) for c in cookies]) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if name and torrent and needs_subpage and not torrent.startswith( 'magnet'): if not torrent.startswith('http'): torrent = definition['root_url'] + torrent.encode('utf-8') t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash, referer)) threads.append(t) else: yield (name, info_hash, torrent, size, seeds, peers) if needs_subpage: log.debug("[%s] Starting subpage threads..." % provider) for t in threads: t.start() for t in threads: t.join() log.debug("[%s] Threads returned: %s" % (provider, repr(threads))) for i in range(q.qsize()): ret = q.get_nowait() log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret))) yield ret
def extract_from_api(provider, client): """ Main API parsing generator for API-based providers An almost clever API parser, mostly just for YTS, RARBG and T411 Args: provider (str): Provider ID client (Client): Client class instance Yields: tuple: A torrent result """ try: data = json.loads(client.content) except: data = [] log.debug("[%s] JSON response from API: %s" % (provider, repr(data))) definition = definitions[provider] api_format = definition['api_format'] results = [] result_keys = api_format['results'].split('.') log.debug("%s result_keys: %s" % (provider, repr(result_keys))) for key in result_keys: if key in data: data = data[key] else: data = [] # log.debug("%s nested results: %s" % (provider, repr(data))) results = data log.debug("%s results: %s" % (provider, repr(results))) if 'subresults' in api_format: from copy import deepcopy for result in results: # A little too specific to YTS but who cares... result['name'] = result[api_format['name']] subresults = [] subresults_keys = api_format['subresults'].split('.') for key in subresults_keys: for result in results: if key in result: for subresult in result[key]: sub = deepcopy(result) sub.update(subresult) subresults.append(sub) results = subresults log.debug("%s with subresults: %s" % (provider, repr(results))) for result in results: if not result or not isinstance(result, dict): continue name = '' info_hash = '' torrent = '' size = '' seeds = '' peers = '' if 'name' in api_format: name = result[api_format['name']] if 'torrent' in api_format: torrent = result[api_format['torrent']] if 'download_path' in definition: torrent = definition['base_url'] + definition['download_path'] + torrent if client.token: user_agent = USER_AGENT if get_setting("use_cloudhole", bool): user_agent = get_setting("user_agent") headers = {'Authorization': client.token, 'User-Agent': user_agent} log.debug("[%s] Appending headers: %s" % (provider, repr(headers))) torrent = append_headers(torrent, headers) log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent))) if 'info_hash' in api_format: info_hash = result[api_format['info_hash']] if 'quality' in api_format: # Again quite specific to YTS... name = "%s - %s" % (name, result[api_format['quality']]) if 'size' in api_format: size = result[api_format['size']] if type(size) in (long, int): size = sizeof(size) elif type(size) in (str, unicode) and size.isdigit(): size = sizeof(int(size)) if 'seeds' in api_format: seeds = result[api_format['seeds']] if type(seeds) in (str, unicode) and seeds.isdigit(): seeds = int(seeds) if 'peers' in api_format: peers = result[api_format['peers']] if type(peers) in (str, unicode) and peers.isdigit(): peers = int(peers) yield (name, info_hash, torrent, size, seeds, peers)
def process(provider, generator, filtering, verify_name=True, verify_size=True): """ Method for processing provider results using its generator and Filtering class instance Args: provider (str): Provider ID generator (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api`` filtering (Filtering): Filtering class instance verify_name (bool): Whether to double-check the results' names match the query or not verify_size (bool): Whether to check the results' file sizes """ log.debug("execute_process for %s with %s" % (provider, repr(generator))) definition = definitions[provider] client = Client() token = None logged_in = False token_auth = False if get_setting("use_cloudhole", bool): client.clearance = get_setting('clearance') client.user_agent = get_setting('user_agent') if get_setting('kodi_language', bool): kodi_language = xbmc.getLanguage(xbmc.ISO_639_1) if kodi_language: filtering.kodi_language = kodi_language language_exceptions = get_setting('language_exceptions') if language_exceptions.strip().lower(): filtering.language_exceptions = re.split(r',\s?', language_exceptions) log.debug("[%s] Queries: %s" % (provider, filtering.queries)) log.debug("[%s] Extras: %s" % (provider, filtering.extras)) for query, extra in zip(filtering.queries, filtering.extras): log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) query = filtering.process_keywords(provider, query) extra = filtering.process_keywords(provider, extra) log.debug("[%s] After keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra))) if not query: return filtering.results url_search = filtering.url.replace('QUERY', query) if extra: url_search = url_search.replace('EXTRA', extra) else: url_search = url_search.replace('EXTRA', '') url_search = url_search.replace(' ', definition['separator']) # MagnetDL fix... url_search = url_search.replace('FIRSTLETTER', query[:1]) # Creating the payload for POST method payload = dict() for key, value in filtering.post_data.iteritems(): if 'QUERY' in value: payload[key] = filtering.post_data[key].replace('QUERY', query) else: payload[key] = filtering.post_data[key] # Creating the payload for GET method data = None if filtering.get_data: data = dict() for key, value in filtering.get_data.iteritems(): if 'QUERY' in value: data[key] = filtering.get_data[key].replace('QUERY', query) else: data[key] = filtering.get_data[key] log.debug("- %s query: %s" % (provider, repr(query))) log.debug("-- %s url_search before token: %s" % (provider, repr(url_search))) log.debug("--- %s using POST payload: %s" % (provider, repr(payload))) log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data))) # Set search's "title" in filtering to double-check results' names if 'filter_title' in definition and definition['filter_title']: filtering.filter_title = True filtering.title = query if token: log.info('[%s] Reusing existing token' % provider) url_search = url_search.replace('TOKEN', token) elif 'token' in definition: token_url = definition['base_url'] + definition['token'] log.debug("Getting token for %s at %s" % (provider, repr(token_url))) client.open(token_url.encode('utf-8')) try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token for %s' % (provider, repr(url_search))) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: token = token_data['token'] log.debug("Got token for %s: %s" % (provider, repr(token))) url_search = url_search.replace('TOKEN', token) time.sleep(2) else: log.warning('%s: Unable to get token for %s' % (provider, repr(url_search))) if logged_in: log.info("[%s] Reusing previous login" % provider) elif token_auth: log.info("[%s] Reusing previous token authorization" % provider) elif 'private' in definition and definition['private']: username = get_setting('%s_username' % provider) password = get_setting('%s_password' % provider) passkey = get_setting('%s_passkey' % provider) if not username and not password and not passkey: for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider): for setting in ('username', 'password'): try: value = xbmcaddon.Addon(addon_name).getSetting( setting) set_setting('%s_%s' % (provider, setting), value) if setting == 'username': username = value if setting == 'password': password = value except: pass if passkey: logged_in = True client.passkey = passkey url_search = url_search.replace('PASSKEY', passkey) elif 'login_object' in definition and definition['login_object']: logged_in = False login_object = definition['login_object'].replace( 'USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password) # TODO generic flags in definitions for those... if provider == 'alphareign': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_name = re.search( r'name="csrf_name" value="(.*?)"', client.content) csrf_value = re.search( r'name="csrf_value" value="(.*?)"', client.content) if csrf_name and csrf_value: login_object = login_object.replace( "CSRF_NAME", '"%s"' % csrf_name.group(1)) login_object = login_object.replace( "CSRF_VALUE", '"%s"' % csrf_value.group(1)) else: logged_in = True if provider == 'hd-torrents': client.open(definition['root_url'] + definition['login_path']) if client.content: csrf_token = re.search( r'name="csrfToken" value="(.*?)"', client.content) if csrf_token: login_object = login_object.replace( 'CSRF_TOKEN', '"%s"' % csrf_token.group(1)) else: logged_in = True if 'token_auth' in definition: # log.debug("[%s] logging in with: %s" % (provider, login_object)) if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)): try: token_data = json.loads(client.content) except: log.error('%s: Failed to get token from %s' % (provider, definition['token_auth'])) return filtering.results log.debug("Token response for %s: %s" % (provider, repr(token_data))) if 'token' in token_data: client.token = token_data['token'] log.debug("Auth token for %s: %s" % (provider, repr(client.token))) else: log.error('%s: Unable to get auth token for %s' % (provider, repr(url_search))) return filtering.results log.info('[%s] Token auth successful' % provider) token_auth = True else: log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content))) return filtering.results elif not logged_in and client.login( definition['root_url'] + definition['login_path'], eval(login_object), definition['login_failed']): log.info('[%s] Login successful' % provider) logged_in = True elif not logged_in: log.error("[%s] Login failed: %s", provider, client.status) log.debug("[%s] Failed login content: %s", provider, repr(client.content)) return filtering.results if logged_in: if provider == 'hd-torrents': client.open(definition['root_url'] + '/torrents.php') csrf_token = re.search( r'name="csrfToken" value="(.*?)"', client.content) url_search = url_search.replace( "CSRF_TOKEN", csrf_token.group(1)) log.info("> %s search URL: %s" % (definition['name'].rjust(longest), url_search)) client.open(url_search.encode('utf-8'), post_data=payload, get_data=data) filtering.results.extend( generate_payload(provider, generator(provider, client), filtering, verify_name, verify_size)) return filtering.results
import json import urllib2 from time import sleep from urlparse import urlparse from contextlib import closing from quasar.provider import log, get_setting from cookielib import Cookie, LWPCookieJar from urllib import urlencode from utils import encode_dict from xbmc import translatePath try: ssl._create_default_https_context = ssl._create_unverified_context except: log.debug("Skipping SSL workaround due to old Python version") pass USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 " \ "(KHTML, like Gecko) Chrome/53.0.2785.21 Safari/537.36" try: PATH_TEMP = translatePath("special://temp").decode( sys.getfilesystemencoding(), 'ignore') except: PATH_TEMP = translatePath("special://temp").decode('utf-8') if get_setting("use_opennic_dns", bool): import socket prv_getaddrinfo = socket.getaddrinfo dns_cache = { ('nnm-club.lib', 80, 0, 1): [(2, 1, 0, '', ('81.17.30.22', 80))],