def use_season(self, provider, info):
        """ Setup method to define season search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_opennic_dns", bool) and "opennic_dns_alias" in definition:
            definition = get_alias(definition, definition["opennic_dns_alias"])
        season_query = definition['season_query'] if definition['season_query'] else ''
        log.debug("Season URL: %s%s" % (definition['base_url'], season_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_seasons'))
            self.max_size = get_float(get_setting('max_size_seasons'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], season_query)
        if definition['season_keywords']:
            self.queries = ["%s" % definition['season_keywords']]
            self.extras = ["%s" % definition['season_extra'] if definition['season_extra'] else '']
            if definition['season_keywords2']:
                self.queries.append("%s" % definition['season_keywords2'])
                self.extras.append("%s" % definition['season_extra2'] if definition['season_extra2'] else '')
Exemple #2
0
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    max_results = get_setting('max_results', int)
    sorted_results = sorted(results, key=lambda r: (r['seeds']), reverse=True)
    if len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info(">> %s returned %2d results in %.1f seconds%s" %
             (definition['name'].rjust(longest), len(results),
              round(time.time() - request_time, 2),
              (", sending %d best ones" %
               max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
Exemple #3
0
    def use_general(self, provider, payload):
        """ Setup method to define general search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns",
                       bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        general_query = definition['general_query'] if definition[
            'general_query'] else ''
        log.debug("General URL: %s%s" %
                  (definition['base_url'], general_query))
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], general_query)
        if definition['general_keywords']:
            self.queries = [definition['general_keywords']]
            self.extras = [definition['general_extra']]
        if 'general_keywords_fallback' in definition and definition[
                'general_keywords_fallback']:
            self.queries.append(definition['general_keywords_fallback'])
            self.extras.append('-')
    def __init__(self, info=None):
        self._counter = 0
        self._cookies_filename = ''
        self._cookies = LWPCookieJar()
        self.user_agent = USER_AGENT
        self.info = info
        self.proxy_type = None
        self.proxy_url = None
        self.content = None
        self.status = None
        self.headers = dict()

        if get_setting("use_elementum_proxy", bool):
            elementum_addon = xbmcaddon.Addon(id='plugin.video.elementum')
            if elementum_addon and elementum_addon.getSetting('internal_proxy_enabled') == "true":
                self.proxy_url = "{0}://{1}:{2}".format("http", "127.0.0.1", "65222")
                if info and "internal_proxy_url" in info:
                    log.debug("Use Internal Elementum Proxy")
                    self.proxy_url = info["internal_proxy_url"]
            if elementum_addon.getSetting("proxy_enabled") == "true" and get_setting("use_proxy_setting", bool):
                self.proxy_type = int(elementum_addon.getSetting("proxy_type"))
                log.debug("Use users proxy from elementum settings: {0}".format(proxy_types[self.proxy_type]))
                prx_host = elementum_addon.getSetting("proxy_host")
                prx_port = elementum_addon.getSetting("proxy_port")
                self.proxy_url = "{0}://{1}:{2}".format(proxy_types[self.proxy_type], prx_host, prx_port)
Exemple #5
0
    def use_season(self, provider, info):
        """ Setup method to define season search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns",
                       bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        season_query = definition[
            'season_query'] if 'season_query' in definition and definition[
                'season_query'] else ''
        log.debug("[%s] Season URL: %s%s" %
                  (provider, definition['base_url'], season_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_seasons'))
            self.max_size = get_float(get_setting('max_size_seasons'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], season_query)

        self.collect_queries('season', definition)
Exemple #6
0
def get_enabled_providers(method):
    """ Utility method to get all enabled provider IDs

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    type = "2"
    if method == "general":
        type = "0"
    elif method == "movie":
        type = "1"
    for provider in definitions:
        if 'enabled' in definitions[
                provider] and not definitions[provider]['enabled']:
            continue

        if get_setting('use_%s' % provider, bool):
            contains = get_setting('%s_contains' % provider,
                                   choices=('All', 'Movies', 'Shows'))
            if not contains or contains == "0":
                results.append(provider)
            elif contains == type:
                results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider][
                'custom']:
            results.append(provider)
    return results
Exemple #7
0
    def use_anime(self, provider, info):
        """ Setup method to define anime search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns",
                       bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        anime_query = definition[
            'anime_query'] if 'anime_query' in definition and definition[
                'anime_query'] else ''
        log.debug("[%s] Anime URL: %s%s" %
                  (provider, definition['base_url'], anime_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], anime_query)
        if self.info['absolute_number']:
            self.info['episode'] = self.info['absolute_number']

        self.collect_queries('anime', definition)
Exemple #8
0
    def use_movie(self, provider, payload):
        """ Setup method to define movie search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns",
                       bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        movie_query = definition['movie_query'] if definition[
            'movie_query'] else ''
        log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_movies'))
            self.max_size = get_float(get_setting('max_size_movies'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], movie_query)
        if definition['movie_keywords']:
            self.queries = ["%s" % definition['movie_keywords']]
            self.extras = ["%s" % definition['movie_extra']]
        if 'movie_keywords2' in definition and definition['movie_keywords2']:
            self.queries.append("%s" % definition['movie_keywords2'])
            self.extras.append("%s" % definition['movie_extra2']
                               if definition['movie_extra2'] else '')
        if 'movie_keywords_fallback' in definition and definition[
                'movie_keywords_fallback']:
            self.queries.append(definition['movie_keywords_fallback'])
            self.extras.append('-')
Exemple #9
0
    def use_episode(self, provider, payload):
        """ Setup method to define episode search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns", bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        show_query = definition['show_query'] if definition['show_query'] else ''
        log.debug("Episode URL: %s%s" % (definition['base_url'], show_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], show_query)
        if definition['tv_keywords']:
            self.queries = ["%s" % definition['tv_keywords']]
            self.extras = ["%s" % definition['tv_extra'] if definition['tv_extra'] else '']
            # TODO this sucks, tv_keywords should be a list from the start..
            if definition['tv_keywords2']:
                self.queries.append(definition['tv_keywords2'])
                self.extras.append(definition['tv_extra2'] if definition['tv_extra2'] else '')
    def verify(self, provider, name, size):
        """ Main filtering method to match torrent names, resolutions, release types and size filters

        Args:
            provider (str): Provider ID
            name     (str): Torrent name
            size     (str): Arbitrary torrent size to be parsed

        Returns:
            bool: ``True`` if torrent name passed filtering, ``False`` otherwise.
        """
        if not name:
            self.reason = '[%s] %s' % (provider, '*** Empty name ***')
            return False

        name = normalize_string(name)
        if self.filter_title and self.title:
            self.title = normalize_string(self.title)

        self.reason = "[%s] %70s ***" % (provider, name)

        if self.filter_resolutions and get_setting('require_resolution', bool):
            resolution = self.determine_resolution(name)[0]
            if resolution not in self.resolutions_allow:
                self.reason += " Resolution not allowed ({0})".format(
                    resolution)
                return False

        if self.filter_title:
            if not all(
                    map(lambda match: match in name, re.split(
                        r'\s', self.title))):
                self.reason += " Name mismatch"
                return False

        if self.require_keywords and get_setting('require_keywords', bool):
            for required in self.require_keywords:
                if not self.included(name, keys=[required]):
                    self.reason += " Missing required keyword"
                    return False

        if not self.included_rx(name,
                                keys=self.releases_allow) and get_setting(
                                    'require_release_type', bool):
            self.reason += " Missing release type keyword"
            return False

        if self.included_rx(name, keys=self.releases_deny) and get_setting(
                'require_release_type', bool):
            self.reason += " Blocked by release type keyword"
            return False

        if size and not self.in_size_range(size) and get_setting(
                'require_size', bool):
            self.reason += " Size out of range ({0})".format(size)
            return False

        return True
def cleanup_results(results_list):
    """ Remove duplicate results, hash results without an info_hash, and sort by seeders

    Args:
        results_list (list): Results to clean-up

    Returns:
        list: De-duplicated, hashed and sorted results
    """
    if len(results_list) == 0:
        return []

    hashes = []
    filtered_list = []
    allow_noseeds = get_setting('allow_noseeds', bool)
    for result in results_list:
        if not result['seeds'] and not allow_noseeds:
            continue

        provider_name = result['provider'][result['provider'].find(']')+1:result['provider'].find('[/')]

        if not result['uri']:
            if not result['name']:
                continue
            try:
                log.warning('[%s] No URI for %s' % (provider_name, repr(result['name'])))
            except Exception as e:
                import traceback
                log.warning("%s logging failed with: %s" % (provider_name, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))
            continue

        hash_ = result['info_hash'].upper()

        if not hash_:
            if result['uri'] and result['uri'].startswith('magnet'):
                hash_ = Magnet(result['uri']).info_hash.upper()
            else:
                hash_ = hashlib.md5(result['uri']).hexdigest()

        try:
            log.debug("[%s] Hash for %s: %s" % (provider_name, repr(result['name']), hash_))
        except Exception as e:
            import traceback
            log.warning("%s logging failed with: %s" % (result['provider'], repr(e)))
            map(log.debug, traceback.format_exc().split("\n"))

        if not any(existing == hash_ for existing in hashes):
            filtered_list.append(result)
            hashes.append(hash_)

    if (get_setting("sort_by_resolution", bool)):
        log.debug("[EXPEREMENTAL] Start last sorting list by resolution of all result before send to Elementum")
        filtered_list = sorted(filtered_list, key=lambda r: (get_int(r.pop('resolution'))), reverse=True)
    else:
        filtered_list = sorted(filtered_list, key=lambda r: (get_int(r['seeds'])), reverse=True)

    return filtered_list
Exemple #12
0
def do_search(query, category=None):
    notice('Search query: ' + str(query))
    try:
        partis = Partis(get_setting('username', unicode),
                        get_setting('password', unicode))
        notify('Searching ...')
        return partis.updateIconPath(partis.search(query, category),
                                     os.path.join(ADDON_PATH, 'Partis'))
    except Exception as e:
        log.debug(getattr(e, 'message', repr(e)))
        notify(getattr(e, 'message', repr(e)))
        return []
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    max_results = get_setting('max_results', int)
    sort_by = get_setting('sort_by', int)
    # 0 "Resolution"
    # 1 "Seeds"
    # 2 "Size"
    # 3 "Balanced"

    if not sort_by or sort_by == 3 or sort_by > 3:
        # TODO: think of something interesting to balance sort results
        sorted_results = sorted(results,
                                key=lambda r: (nonesorter(r['sort_balance'])),
                                reverse=True)
    elif sort_by == 0:
        sorted_results = sorted(results,
                                key=lambda r:
                                (nonesorter(r['sort_resolution'])),
                                reverse=True)
    elif sort_by == 1:
        sorted_results = sorted(results,
                                key=lambda r: (nonesorter(r['seeds'])),
                                reverse=True)
    elif sort_by == 2:
        sorted_results = sorted(results,
                                key=lambda r: (nonesorter(r['size'])),
                                reverse=True)

    if len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info("[%s] >> %s returned %2d results in %.1f seconds%s" %
             (provider, definition['name'].rjust(longest), len(results),
              round(time.time() - request_time, 2),
              (", sending %d best ones" %
               max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
Exemple #14
0
        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash,
                            referer):
            try:
                log.debug("[%s] Getting subpage at %s" %
                          (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" %
                          (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey
            headers = {}

            if get_setting("use_cloudhole", bool):
                subclient.clearance = get_setting('clearance')
                subclient.user_agent = get_setting('user_agent')
            if "subpage_mode" in definition:
                if definition["subpage_mode"] == "xhr":
                    headers['X-Requested-With'] = 'XMLHttpRequest'
                    headers['Content-Language'] = ''

            if referer:
                headers['Referer'] = referer

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'), headers=headers)

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' %
                          (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(
                            uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error(
                        "[%s] Subpage extraction for %s failed with: %s" %
                        (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)
Exemple #15
0
def generate_payload(provider,
                     generator,
                     filtering,
                     verify_name=True,
                     verify_size=True):
    """ Payload formatter to format results the way Elementum expects them

    Args:
        provider        (str): Provider ID
        generator  (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api``
        filtering (Filtering): Filtering class instance
        verify_name    (bool): Whether to double-check the results' names match the query or not
        verify_size    (bool): Whether to check the results' file sizes

    Returns:
        list: Formatted results
    """
    filtering.information(provider)
    results = []

    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    for name, info_hash, uri, size, seeds, peers in generator:
        size = clean_size(size)
        # uri, info_hash = clean_magnet(uri, info_hash)
        v_name = name if verify_name else filtering.title
        v_size = size if verify_size else None
        if filtering.verify(provider, v_name, v_size):
            results.append({
                "name":
                name,
                "uri":
                uri,
                "info_hash":
                info_hash,
                "size":
                size,
                "seeds":
                get_int(seeds),
                "peers":
                get_int(peers),
                "language":
                definition["language"] if 'language' in definition else 'en',
                "provider":
                '[COLOR %s]%s[/COLOR]' %
                (definition['color'], definition['name']),
                "icon":
                os.path.join(ADDON_PATH, 'burst', 'providers', 'icons',
                             '%s.png' % provider),
            })
        else:
            log.debug(filtering.reason.encode('utf-8'))

    log.debug('>>>>>> %s would send %d torrents to Elementum <<<<<<<' %
              (provider, len(results)))

    return results
Exemple #16
0
def extract_from_page(provider, content):
    """ Sub-page extraction method

    Args:
        provider (str): Provider ID
        content  (str): Page content from Client instance

    Returns:
        str: Torrent or magnet link extracted from sub-page
    """
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    if provider == "kinozal":
        matches = re.findall(r'\: (.{40})', content)  # kinozal
        if matches:
            result = "magnet:?xt=urn:btih:" + matches[0]
            log.debug('[%s] Make magnet from info_hash: %s' %
                      (provider, repr(result)))
            return result

    matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content)
    if matches:
        result = matches[0]
        log.debug('[%s] Matched magnet link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('http(.*?).torrent["\']', content)
    if matches:
        result = 'http' + matches[0] + '.torrent'
        result = result.replace('torcache.net', 'itorrents.org')
        log.debug('[%s] Matched torrent link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('/download\?token=[A-Za-z0-9%]+', content)
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with token: %s' %
                  (provider, repr(result)))
        return result

    matches = re.findall('/telechargement/[a-z0-9-_.]+', content)  # cpasbien
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched some french link: %s' %
                  (provider, repr(result)))
        return result

    matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+',
                         content)  # t411
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with an ID: %s' %
                  (provider, repr(result)))
        return result

    return None
 def __init__(self):
     self._counter = 0
     self._cookies_filename = ''
     self._cookies = LWPCookieJar()
     self.user_agent = USER_AGENT
     self.clearance = None
     self.content = None
     self.status = None
     self.token = None
     self.passkey = None
     self.headers = dict()
     global dns_public_list
     global dns_opennic_list
     dns_public_list = get_setting("public_dns_list",
                                   unicode).replace(" ", "").split(",")
     dns_opennic_list = get_setting("opennic_dns_list",
                                    unicode).replace(" ", "").split(",")
     socket.setdefaulttimeout(60)
def cleanup_results(results_list):
    """ Remove duplicate results, hash results without an info_hash, and sort by seeders

    Args:
        results_list (list): Results to clean-up

    Returns:
        list: De-duplicated, hashed and sorted results
    """
    if len(results_list) == 0:
        return []

    hashes = []
    filtered_list = []
    allow_noseeds = get_setting('allow_noseeds', bool)
    for result in results_list:
        if not result['seeds'] and not allow_noseeds:
            continue

        if not result['uri']:
            if not result['name']:
                continue
            try:
                log.warning('[%s] No URI for %s' %
                            (result['provider'][16:-8], repr(result['name'])))
            except Exception as e:
                import traceback
                log.warning("%s logging failed with: %s" %
                            (result['provider'], repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))
            continue

        hash_ = result['info_hash'].upper()

        if not hash_:
            try:
                if result['uri'] and result['uri'].startswith('magnet'):
                    hash_ = Magnet(result['uri']).info_hash.upper()
                else:
                    hash_ = hashlib.md5(py2_encode(result['uri'])).hexdigest()
            except:
                pass
        # try:
        #     log.debug("[%s] Hash for %s: %s" % (result['provider'][16:-8], repr(result['name']), hash_))
        # except Exception as e:
        #     import traceback
        #     log.warning("%s logging failed with: %s" % (result['provider'], repr(e)))
        #     map(log.debug, traceback.format_exc().split("\n"))

        if not any(existing == hash_ for existing in hashes):
            filtered_list.append(result)
            hashes.append(hash_)

    return sorted(filtered_list,
                  key=lambda r: (get_int(r['seeds'])),
                  reverse=True)
Exemple #19
0
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    max_results = get_setting('max_results', int)
    if get_setting('sort_by_resolution', bool):
        log.debug(
            "[%s][EXPEREMENTAL] Sorting by resolution before cutoff max_results"
            % provider)
        sorted_results = sorted(results,
                                key=lambda r: (r['resolution']),
                                reverse=True)
    else:
        sorted_results = sorted(results,
                                key=lambda r: (r['seeds']),
                                reverse=True)

    if get_setting('disable_max', bool):
        log.debug('[%s] Don\'t apply "max_results" settings' % provider)
        max_results = 999
    elif len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info(">> %s returned %2d results in %.1f seconds%s" %
             (definition['name'].rjust(longest), len(results),
              round(time.time() - request_time, 2),
              (", sending %d best ones" %
               max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
Exemple #20
0
    def use_general(self, provider, payload):
        """ Setup method to define general search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_public_dns",
                       bool) and "public_dns_alias" in definition:
            definition = get_alias(definition, definition["public_dns_alias"])

        general_query = definition['general_query'] if definition[
            'general_query'] else ''
        log.debug("[%s] General URL: %s%s" %
                  (provider, definition['base_url'], general_query))
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], general_query)

        self.collect_queries('general', definition)
Exemple #21
0
    def use_anime(self, provider, info):
        """ Setup method to define anime search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        anime_query = definition['anime_query'] if definition['anime_query'] else ''
        log.debug("Anime URL: %s%s" % (definition['base_url'], anime_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], anime_query)
        if self.info['absolute_number']:
            self.info['episode'] = self.info['absolute_number']
        if definition['anime_keywords']:
            self.queries = ["%s" % definition['anime_keywords']]
            self.extras = ["%s" % definition['anime_extra'] if definition['anime_extra'] else '']
Exemple #22
0
def get_enabled_providers():
    """ Utility method to get all enabled provider IDs

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    for provider in definitions:
        if get_setting('use_%s' % provider, bool):
            results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider]['custom']:
            results.append(provider)
    return results
def extract_from_page(provider, content):
    """ Sub-page extraction method

    Args:
        provider (str): Provider ID
        content  (str): Page content from Client instance

    Returns:
        str: Torrent or magnet link extracted from sub-page
    """
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content)
    if matches:
        result = matches[0]
        log.debug('[%s] Matched magnet link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('http(.*?).torrent["\']', content)
    if matches:
        result = 'http' + matches[0] + '.torrent'
        result = result.replace('torcache.net', 'itorrents.org')
        log.debug('[%s] Matched torrent link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('/download\?token=[A-Za-z0-9%]+', content)
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with token: %s' %
                  (provider, repr(result)))
        return result

    matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+',
                         content)  # t411
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with an ID: %s' %
                  (provider, repr(result)))
        return result

    matches = re.findall(
        'torrents\.php\?action=download&id=\d+&key=[a-fA-F0-9]+',
        content)  # nCore
    if matches:
        result = definition['root_url'] + '/' + matches[0]
        log.debug('[%s] Matched download link with an ID: %s' %
                  (provider, repr(result)))
        return result

    return None
Exemple #24
0
    def use_general(self, provider, payload):
        """ Setup method to define general search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        general_query = definition['general_query'] if definition['general_query'] else ''
        log.debug("General URL: %s%s" % (definition['base_url'], general_query))
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], general_query)
        if definition['general_keywords']:
            self.queries = [definition['general_keywords']]
            self.extras = [definition['general_extra']]
def extract_from_page(provider, content):
    """ Sub-page extraction method

    Args:
        provider (str): Provider ID
        content  (str): Page content from Client instance

    Returns:
        str: Torrent or magnet link extracted from sub-page
    """
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    if provider == "kinozal":
        matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content)
        if matches:
            result = matches[0]
            log.debug('[%s] Matched magnet link: %s' %
                      (provider, repr(result)))
            return result

        matches = re.findall(r'\: ([A-Fa-f0-9]{40})', content)  # kinozal
        if matches:
            result = "magnet:?xt=urn:btih:" + matches[
                0] + "&tr=http%3A%2F%2Ftr0.torrent4me.com%2Fann%3Fuk%3Dstl41hKc1E&tr=http%3A%2F%2Ftr0.torrent4me.com%2Fann%3Fuk%3Dstl41hKc1E&tr=http%3A%2F%2Ftr0.tor4me.info%2Fann%3Fuk%3Dstl41hKc1E&tr=http%3A%2F%2Ftr0.tor2me.info%2Fann%3Fuk%3Dstl41hKc1E&tr=http%3A%2F%2Fretracker.local%2Fannounce"
            log.debug('[%s] Make magnet from info_hash: %s' %
                      (provider, repr(result)))
            return result

    matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content)
    if matches:
        result = matches[0]
        log.debug('[%s] Matched magnet link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('http(.*?).torrent["\']', content)
    if matches:
        result = 'http' + matches[0] + '.torrent'
        log.debug('[%s] Matched torrent link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('"(/download/[A-Za-z0-9]+)"', content)
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link: %s' % (provider, repr(result)))
        return result
    return None
    def __init__(self):
        resolutions = OrderedDict()

        # TODO: remove when finished with debugging resolutions detection
        # resolutions['filter_240p'] = ['240p', u'240р', '_tvrip_', 'satrip', 'vhsrip']
        # resolutions['filter_480p'] = ['480p', u'480р', 'xvid', 'dvd', 'dvdrip', 'hdtv']
        # resolutions['filter_720p'] = ['720p', u'720р', 'hdrip', 'bluray', 'blu_ray', 'brrip', 'bdrip', 'hdtv', '/hd720p', '1280x720']
        # resolutions['filter_1080p'] = ['1080p', u'1080р', '1080i', 'fullhd', '_fhd_', '/hd1080p', '/hdr1080p', '1920x1080']
        # resolutions['filter_2k'] = ['_2k_', '1440p', u'1440р', u'_2к_']
        # resolutions['filter_4k'] = ['_4k_', '2160p', u'2160р', '_uhd_', u'_4к_']

        resolutions['filter_240p'] = [u'240[pр]', u'vhs\-?rip']
        resolutions['filter_480p'] = [
            u'480[pр]',
            u'xvid|dvd|dvdrip|hdtv|web\-(dl)?rip|iptv|sat\-?rip|tv\-?rip'
        ]
        resolutions['filter_720p'] = [
            u'720[pр]|1280x720', u'hd720p?|hd\-?rip|b[rd]rip'
        ]
        resolutions['filter_1080p'] = [
            u'1080[piр]|1920x1080', u'hd1080p?|fullhd|fhd|blu\W*ray|bd\W*remux'
        ]
        resolutions['filter_2k'] = [u'1440[pр]', u'2k']
        resolutions['filter_4k'] = [u'4k|2160[pр]|uhd', u'4k|hd4k']
        resolutions['filter_music'] = [u'mp3|flac|alac|ost|sound\-?track']

        self.resolutions = resolutions

        self.release_types = {
            'filter_brrip': [u'brrip|bd\-?rip|blu\-?ray|bd\-?remux'],
            'filter_webdl': [u'web_?\-?dl|web\-?rip|dl\-?rip|yts'],
            'filter_hdrip': [u'hd\-?rip'],
            'filter_hdtv': [u'hd\-?tv'],
            'filter_dvd': [u'dvd|dvd\-?rip|vcd\-?rip'],
            'filter_dvdscr': [u'dvd\-?scr'],
            'filter_screener': [u'screener|scr'],
            'filter_3d': [u'3d'],
            'filter_telesync': [u'telesync|ts|tc'],
            'filter_cam': [u'cam|hd\-?cam'],
            'filter_tvrip': [u'tv\-?rip|sat\-?rip|dvb'],
            'filter_vhsrip': [u'vhs\-?rip'],
            'filter_iptvrip': [u'iptv\-?rip'],
            'filter_trailer': [u'trailer|трейлер|тизер'],
            'filter_workprint': [u'workprint'],
            'filter_line': [u'line']
        }

        # TODO: remove when finished with debugging resolutions detection
        # self.release_types = {
        #     'filter_brrip': ['brrip', 'bdrip', 'bd-rip', 'bluray', 'blu-ray', 'bdremux', 'bd-remux'],
        #     'filter_webdl': ['webdl', 'webrip', 'web-rip', 'web_dl', 'dlrip', '_yts_'],
        #     'filter_hdrip': ['hdrip', 'hd-rip'],
        #     'filter_hdtv': ['hdtv'],
        #     'filter_dvd': ['_dvd_', 'dvdrip', 'dvd-rip', 'vcdrip'],
        #     'filter_dvdscr': ['dvdscr', 'dvd-scr'],
        #     'filter_screener': ['screener', '_scr_'],
        #     'filter_3d': ['_3d_'],
        #     'filter_telesync': ['telesync', '_ts_', '_tc_'],
        #     'filter_cam': ['_cam_', 'hdcam'],
        #     'filter_tvrip': ['_tvrip', 'satrip'],
        #     'filter_vhsrip': ['vhsrip'],
        #     'filter_trailer': ['trailer', u'трейлер', u'тизер'],
        #     'filter_workprint': ['workprint']
        # }

        require = []
        resolutions_allow = []
        releases_allow = []
        releases_deny = []

        for resolution in self.resolutions:
            if get_setting(resolution, bool):
                resolutions_allow.append(resolution)
                # Add enabled resolutions to allowed release types to match
                # previous versions' behavior with certain providers
                # with no release types in torrent names, ie. YTS
                releases_allow.extend(self.resolutions[resolution])
        self.resolutions_allow = resolutions_allow

        # Skip resolution filtering if we're allowing all of them anyway
        self.filter_resolutions = True
        if len(self.resolutions_allow) == len(self.resolutions):
            self.filter_resolutions = False

        for release_type in self.release_types:
            if get_setting(release_type, bool):
                releases_allow.extend(self.release_types[release_type])
            else:
                releases_deny.extend(self.release_types[release_type])
        self.releases_allow = releases_allow
        self.releases_deny = releases_deny

        if get_setting('additional_filters', bool):
            accept = get_setting('accept', unicode).strip().lower()
            if accept:
                accept = re.split(r',\s?', accept)
                releases_allow.extend(accept)

            block = get_setting('block', unicode).strip().lower()
            if block:
                block = re.split(r',\s?', block)
                releases_deny.extend(block)

            require = get_setting('require', unicode).strip().lower()
            if require:
                require = re.split(r',\s?', require)

        self.require_keywords = require

        self.min_size = get_float(get_setting('min_size'))
        self.max_size = get_float(get_setting('max_size'))
        self.check_sizes()

        self.filter_title = False

        self.queries = []
        self.extras = []

        self.info = dict(title="",
                         proxy_url="",
                         internal_proxy_url="",
                         elementum_url="",
                         titles=[])
        self.kodi_language = ''
        self.language_exceptions = []
        self.get_data = {}
        self.post_data = {}
        self.url = ''
        self.title = ''
        self.reason = ''
        self.results = []
    def process_keywords(self, provider, text):
        """ Processes the query payload from a provider's keyword definitions

        Args:
            provider (str): Provider ID
            text     (str): Keyword placeholders from definitions, ie. {title}

        Returns:
            str: Processed query keywords
        """
        keywords = self.read_keywords(text)
        replacing = get_setting("filter_quotes", bool)

        for keyword in keywords:
            keyword = keyword.lower()
            if 'title' in keyword:
                title = self.info["title"]
                language = definitions[provider]['language']
                use_language = None
                if ':' in keyword:
                    use_language = keyword.split(':')[1].lower()
                if provider not in self.language_exceptions and \
                   (use_language or self.kodi_language) and \
                   'titles' in self.info and self.info['titles']:
                    try:
                        if self.kodi_language and self.kodi_language in self.info[
                                'titles']:
                            use_language = self.kodi_language
                        if use_language not in self.info['titles']:
                            use_language = language
                            if 'original' in self.info['titles']:
                                title = self.info['titles']['original']
                        if use_language in self.info['titles'] and self.info[
                                'titles'][use_language]:
                            title = self.info['titles'][use_language]
                            title = normalize_string(title)
                            log.info("[%s] Using translated '%s' title %s" %
                                     (provider, use_language, repr(title)))
                            log.debug(
                                "[%s] Translated titles from Elementum: %s" %
                                (provider, repr(self.info['titles'])))
                    except Exception as e:
                        import traceback
                        log.error("%s failed with: %s" % (provider, repr(e)))
                        map(log.debug, traceback.format_exc().split("\n"))
                text = text.replace('{%s}' % keyword, title)

            if 'year' in keyword:
                text = text.replace('{%s}' % keyword, str(self.info["year"]))

            if 'season' in keyword:
                if '+' in keyword:
                    keys = keyword.split('+')
                    season = str(self.info["season"] + get_int(keys[1]))
                elif ':' in keyword:
                    keys = keyword.split(':')
                    season = ('%%.%sd' % keys[1]) % self.info["season"]
                else:
                    season = '%s' % self.info["season"]
                text = text.replace('{%s}' % keyword, season)

            if 'episode' in keyword:
                if '+' in keyword:
                    keys = keyword.split('+')
                    episode = str(self.info["episode"] + get_int(keys[1]))
                elif ':' in keyword:
                    keys = keyword.split(':')
                    episode = ('%%.%sd' % keys[1]) % self.info["episode"]
                else:
                    episode = '%s' % self.info["episode"]
                text = text.replace('{%s}' % keyword, episode)

        if replacing:
            text = text.replace(u"'", '')

        return text
Exemple #28
0
def extract_from_api(provider, client):
    """ Main API parsing generator for API-based providers

    An almost clever API parser, mostly just for YTS, RARBG and T411

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    try:
        data = json.loads(client.content)
    except:
        data = []
    log.debug("[%s] JSON response from API: %s" %
              (unquote(provider), repr(data)))

    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))
    api_format = definition['api_format']

    results = []
    result_keys = api_format['results'].split('.')
    log.debug("[%s] result_keys: %s" % (provider, repr(result_keys)))
    for key in result_keys:
        if key in data:
            data = data[key]
        else:
            data = []
        # log.debug("[%s] nested results: %s" % (provider, repr(data)))
    results = data
    log.debug("[%s] results: %s" % (provider, repr(results)))

    if 'subresults' in api_format:
        from copy import deepcopy
        for result in results:  # A little too specific to YTS but who cares...
            result['name'] = result[api_format['name']]
        subresults = []
        subresults_keys = api_format['subresults'].split('.')
        for key in subresults_keys:
            for result in results:
                if key in result:
                    for subresult in result[key]:
                        sub = deepcopy(result)
                        sub.update(subresult)
                        subresults.append(sub)
        results = subresults
        log.debug("[%s] with subresults: %s" % (provider, repr(results)))

    for result in results:
        if not result or not isinstance(result, dict):
            continue
        name = ''
        info_hash = ''
        torrent = ''
        size = ''
        seeds = ''
        peers = ''
        if 'name' in api_format:
            name = result[api_format['name']]
        if 'description' in api_format:
            if name:
                name += ' '
            name += result[api_format['description']]
        if 'torrent' in api_format:
            torrent = result[api_format['torrent']]
            if 'download_path' in definition:
                torrent = definition['base_url'] + definition[
                    'download_path'] + torrent
            if client.token:
                user_agent = USER_AGENT
                headers = {
                    'Authorization': client.token,
                    'User-Agent': user_agent
                }
                log.debug("[%s] Appending headers: %s" %
                          (provider, repr(headers)))
                torrent = append_headers(torrent, headers)
                log.debug("[%s] Torrent with headers: %s" %
                          (provider, repr(torrent)))
        if 'info_hash' in api_format:
            info_hash = result[api_format['info_hash']]
        if 'quality' in api_format:  # Again quite specific to YTS...
            name = "%s - %s" % (name, result[api_format['quality']])
        if 'size' in api_format:
            size = result[api_format['size']]
            if type(size) in (long, int):
                size = sizeof(size)
            elif type(size) in (str, unicode) and size.isdigit():
                size = sizeof(int(size))
        if 'seeds' in api_format:
            seeds = result[api_format['seeds']]
            if type(seeds) in (str, unicode) and seeds.isdigit():
                seeds = int(seeds)
        if 'peers' in api_format:
            peers = result[api_format['peers']]
            if type(peers) in (str, unicode) and peers.isdigit():
                peers = int(peers)
        yield (name, info_hash, torrent, size, seeds, peers)
Exemple #29
0
def search(payload, method="general"):
    """ Main search entrypoint

    Args:
        payload (dict): Search payload from Elementum.
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``

    Returns:
        list: All filtered results in the format Elementum expects
    """
    log.debug("Searching with payload (%s): %s" % (method, repr(payload)))

    if method == 'general':
        if 'query' in payload:
            payload['title'] = payload['query']
            payload['titles'] = {'source': payload['query']}
        else:
            payload = {
                'title': payload,
                'titles': {
                    'source': payload
                },
            }

    payload['titles'] = dict(
        (k.lower(), v) for k, v in payload['titles'].iteritems())

    # If titles[] exists in payload and there are special chars in titles[source]
    #   then we set a flag to possibly modify the search query
    payload['has_special'] = 'titles' in payload and \
                             bool(payload['titles']) and \
                             'source' in payload['titles'] and \
                             any(c in payload['titles']['source'] for c in special_chars)
    if payload['has_special']:
        log.debug(
            "Query title contains special chars, so removing any quotes in the search query"
        )

    if 'proxy_url' not in payload:
        payload['proxy_url'] = ''
    if 'internal_proxy_url' not in payload:
        payload['internal_proxy_url'] = ''
    if 'elementum_url' not in payload:
        payload['elementum_url'] = ''
    if 'silent' not in payload:
        payload['silent'] = False
    if 'skip_auth' not in payload:
        payload['skip_auth'] = False

    global request_time
    global provider_names
    global provider_results
    global available_providers

    provider_names = []
    provider_results = []
    available_providers = 0
    request_time = time.time()

    providers = get_enabled_providers(method)

    if len(providers) == 0:
        if not payload['silent']:
            notify(translation(32060), image=get_icon_path())
        log.error("No providers enabled")
        return []

    log.info(
        "Burstin' with %s" %
        ", ".join([definitions[provider]['name'] for provider in providers]))

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if not kodi_language:
            log.warning("Kodi returned empty language code...")
        elif 'titles' not in payload or not payload['titles']:
            log.info("No translations available...")
        elif payload['titles'] and kodi_language not in payload['titles']:
            log.info("No '%s' translation available..." % kodi_language)

    p_dialog = xbmcgui.DialogProgressBG()
    if not payload['silent']:
        p_dialog.create('Elementum [COLOR FFFF6B00]Burst[/COLOR]',
                        translation(32061))

    for provider in providers:
        available_providers += 1
        provider_names.append(definitions[provider]['name'])
        task = Thread(target=run_provider, args=(provider, payload, method))
        task.start()

    providers_time = time.time()
    total = float(available_providers)

    # Exit if all providers have returned results or timeout reached, check every 100ms
    while time.time() - providers_time < timeout and available_providers > 0:
        timer = time.time() - providers_time
        log.debug("Timer: %ds / %ds" % (timer, timeout))
        if timer > timeout:
            break
        message = translation(
            32062
        ) % available_providers if available_providers > 1 else translation(
            32063)
        if not payload['silent']:
            p_dialog.update(int((total - available_providers) / total * 100),
                            message=message)
        time.sleep(0.25)

    if not payload['silent']:
        p_dialog.close()
    del p_dialog

    if available_providers > 0:
        message = u', '.join(provider_names)
        message = message + translation(32064)
        log.warning(message.encode('utf-8'))
        if not payload['silent']:
            notify(message, ADDON_ICON)

    log.debug("all provider_results: %s" % repr(provider_results))

    filtered_results = apply_filters(provider_results)

    log.debug("all filtered_results: %s" % repr(filtered_results))

    log.info("Providers returned %d results in %s seconds" %
             (len(filtered_results), round(time.time() - request_time, 2)))

    return filtered_results
Exemple #30
0
def extract_torrents(provider, client):
    """ Main torrent extraction generator for non-API based providers

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))
    log.debug("[%s] Extracting torrents from %s using definitions: %s" %
              (provider, provider, repr(definition)))

    if not client.content:
        if get_setting("use_debug_parser", bool):
            log.debug("[%s] Parser debug | Page content is empty" % provider)

        raise StopIteration

    dom = Html().feed(client.content)

    key_search = get_search_query(definition, "key")
    row_search = get_search_query(definition, "row")
    name_search = get_search_query(definition, "name")
    torrent_search = get_search_query(definition, "torrent")
    info_hash_search = get_search_query(definition, "infohash")
    size_search = get_search_query(definition, "size")
    seeds_search = get_search_query(definition, "seeds")
    peers_search = get_search_query(definition, "peers")
    referer_search = get_search_query(definition, "referer")

    log.debug("[%s] Parser: %s" % (provider, repr(definition['parser'])))

    q = Queue()
    threads = []
    needs_subpage = 'subpage' in definition and definition['subpage']

    if needs_subpage:

        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash,
                            referer):
            try:
                log.debug("[%s] Getting subpage at %s" %
                          (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" %
                          (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey
            headers = {}

            if "subpage_mode" in definition:
                if definition["subpage_mode"] == "xhr":
                    headers['X-Requested-With'] = 'XMLHttpRequest'
                    headers['Content-Language'] = ''

            if referer:
                headers['Referer'] = referer

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'), headers=headers)

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' %
                          (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(
                            uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error(
                        "[%s] Subpage extraction for %s failed with: %s" %
                        (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)

    if not dom:
        if get_setting("use_debug_parser", bool):
            log.debug(
                "[%s] Parser debug | Could not parse DOM from page content" %
                provider)

        raise StopIteration

    if get_setting("use_debug_parser", bool):
        log.debug(
            "[%s] Parser debug | Page content: %s" %
            (provider, client.content.replace('\r', '').replace('\n', '')))

    key = eval(key_search) if key_search else ""
    if key_search and get_setting("use_debug_parser", bool):
        key_str = key.__str__()
        log.debug(
            "[%s] Parser debug | Matched '%s' iteration for query '%s': %s" %
            (provider, 'key', key_search, key_str.replace('\r', '').replace(
                '\n', '')))

    items = eval(row_search)
    if get_setting("use_debug_parser", bool):
        log.debug("[%s] Parser debug | Matched %d items for '%s' query '%s'" %
                  (provider, len(items), 'row', row_search))

    for item in items:
        if get_setting("use_debug_parser", bool):
            item_str = item.__str__()
            log.debug(
                "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                % (provider, 'row', row_search, item_str.replace(
                    '\r', '').replace('\n', '')))

        if not item:
            continue

        try:
            name = eval(name_search) if name_search else ""
            torrent = eval(torrent_search) if torrent_search else ""
            size = eval(size_search) if size_search else ""
            seeds = eval(seeds_search) if seeds_search else ""
            peers = eval(peers_search) if peers_search else ""
            info_hash = eval(info_hash_search) if info_hash_search else ""
            referer = eval(referer_search) if referer_search else ""

            if 'magnet:?' in torrent:
                torrent = torrent[torrent.find('magnet:?'):]

            if get_setting("use_debug_parser", bool):
                log.debug(
                    "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                    % (provider, 'name', name_search, name))
                log.debug(
                    "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                    % (provider, 'torrent', torrent_search, torrent))
                log.debug(
                    "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                    % (provider, 'size', size_search, size))
                log.debug(
                    "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                    % (provider, 'seeds', seeds_search, seeds))
                log.debug(
                    "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                    % (provider, 'peers', peers_search, peers))
                if info_hash_search:
                    log.debug(
                        "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                        % (provider, 'info_hash', info_hash_search, info_hash))
                if referer_search:
                    log.debug(
                        "[%s] Parser debug | Matched '%s' iteration for query '%s': %s"
                        % (provider, 'info_hash', referer_search, referer))

            # Pass client cookies with torrent if private
            if not torrent.startswith('magnet'):
                user_agent = USER_AGENT

                if client.passkey:
                    torrent = torrent.replace('PASSKEY', client.passkey)
                elif client.token:
                    headers = {
                        'Authorization': client.token,
                        'User-Agent': user_agent
                    }
                    log.debug("[%s] Appending headers: %s" %
                              (provider, repr(headers)))
                    torrent = append_headers(torrent, headers)
                    log.debug("[%s] Torrent with headers: %s" %
                              (provider, repr(torrent)))
                else:
                    parsed_url = urlparse(torrent.split('|')[0])
                    cookie_domain = '{uri.netloc}'.format(uri=parsed_url)
                    cookie_domain = re.sub('www\d*\.', '', cookie_domain)
                    cookies = []
                    for cookie in client._cookies:
                        if cookie_domain in cookie.domain:
                            cookies.append(cookie)
                    headers = {}
                    if cookies:
                        headers = {'User-Agent': user_agent}
                        log.debug("[%s] Cookies res: %s / %s" %
                                  (provider, repr(headers),
                                   repr(client.request_headers)))
                        if client.request_headers:
                            headers.update(client.request_headers)
                        if client.url:
                            headers['Referer'] = client.url
                            headers['Origin'] = client.url
                        # Need to set Cookie afterwards to avoid rewriting it with session Cookies
                        headers['Cookie'] = ";".join(
                            ["%s=%s" % (c.name, c.value) for c in cookies])
                    else:
                        headers = {'User-Agent': user_agent}

                    torrent = append_headers(torrent, headers)

            if name and torrent and needs_subpage and not torrent.startswith(
                    'magnet'):
                if not torrent.startswith('http'):
                    torrent = definition['root_url'] + torrent.encode('utf-8')
                t = Thread(target=extract_subpage,
                           args=(q, name, torrent, size, seeds, peers,
                                 info_hash, referer))
                threads.append(t)
            else:
                yield (name, info_hash, torrent, size, seeds, peers)
        except Exception as e:
            log.error("[%s] Got an exception while parsing results: %s" %
                      (provider, repr(e)))

    if needs_subpage:
        log.debug("[%s] Starting subpage threads..." % provider)
        for t in threads:
            t.start()
        for t in threads:
            t.join()

        for i in range(q.qsize()):
            ret = q.get_nowait()
            log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret)))
            yield ret