예제 #1
0
    def use_anime(self, provider, info):
        """ Setup method to define anime search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        anime_query = definition['anime_query'] if definition[
            'anime_query'] else ''
        log.debug("Anime URL: %s%s" % (definition['base_url'], anime_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], anime_query)
        if self.info['absolute_number']:
            self.info['episode'] = self.info['absolute_number']
        if definition['anime_keywords']:
            self.queries = ["%s" % definition['anime_keywords']]
            self.extras = [
                "%s" %
                definition['anime_extra'] if definition['anime_extra'] else ''
            ]
예제 #2
0
        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash):
            try:
                log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey

            if get_setting("use_cloudhole", bool):
                subclient.clearance = get_setting('clearance')
                subclient.user_agent = get_setting('user_agent')

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'))

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)
예제 #3
0
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    max_results = get_setting('max_results', int)
    if get_setting('sort_by_resolution', bool):
        log.debug("[%s][EXPEREMENTAL] Sorting by resolution before cutoff max_results" % provider)
        sorted_results = sorted(results, key=lambda r: (r['resolution']), reverse=True)
    else:
        sorted_results = sorted(results, key=lambda r: (r['seeds']), reverse=True)

    if get_setting('disable_max', bool):
        log.debug('[%s] Don\'t apply "max_results" settings' % provider)
        max_results = 999
    elif len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info(">> %s returned %2d results in %.1f seconds%s" % (
        definition['name'].rjust(longest), len(results), round(time.time() - request_time, 2),
        (", sending %d best ones" % max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
예제 #4
0
    def use_season(self, provider, info):
        """ Setup method to define season search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        season_query = definition['season_query'] if definition[
            'season_query'] else ''
        log.debug("Season URL: %s%s" % (definition['base_url'], season_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_seasons'))
            self.max_size = get_float(get_setting('max_size_seasons'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], season_query)
        if definition['season_keywords']:
            self.queries = ["%s" % definition['season_keywords']]
            self.extras = [
                "%s" % definition['season_extra']
                if definition['season_extra'] else ''
            ]
            if definition['season_keywords2']:
                self.queries.append("%s" % definition['season_keywords2'])
                self.extras.append("%s" % definition['season_extra2']
                                   if definition['season_extra2'] else '')
예제 #5
0
    def use_episode(self, provider, payload):
        """ Setup method to define episode search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        show_query = definition['show_query'] if definition[
            'show_query'] else ''
        log.debug("Episode URL: %s%s" % (definition['base_url'], show_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], show_query)
        if definition['tv_keywords']:
            self.queries = ["%s" % definition['tv_keywords']]
            self.extras = [
                "%s" % definition['tv_extra'] if definition['tv_extra'] else ''
            ]
            # TODO this sucks, tv_keywords should be a list from the start..
            if definition['tv_keywords2']:
                self.queries.append(definition['tv_keywords2'])
                self.extras.append(
                    definition['tv_extra2'] if definition['tv_extra2'] else '')
예제 #6
0
def get_enabled_providers(method):
    """ Utility method to get all enabled provider IDs for a search method

    Args:
        method (str): Type of search, can be ``general``, ``movie``, ``episode``, ``season`` or ``anime``

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    for provider in definitions:
        if 'enabled' in definitions[
                provider] and not definitions[provider]['enabled']:
            continue
        if get_setting('use_%s' % provider, bool):
            if method != 'general' and 'type' in definitions[provider]:
                provider_type = definitions[provider]['type']
                is_show_type = method in ('episode',
                                          'season') and 'show' in provider_type
                if method in provider_type or is_show_type:
                    results.append(provider)
            else:
                results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider][
                'custom']:
            results.append(provider)
    return results
예제 #7
0
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]

    max_results = get_setting('max_results', int)
    sorted_results = sorted(results, key=lambda r: (r['seeds']), reverse=True)
    if len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info(">> %s returned %2d results in %.1f seconds%s" %
             (definition['name'].rjust(longest), len(results),
              round(time.time() - request_time, 2),
              (", sending %d best ones" %
               max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
예제 #8
0
def got_results(provider, results):
    """ Results callback once a provider found all its results, or not

    Args:
        provider (str): The provider ID
        results (list): The list of results
    """
    global provider_names
    global provider_results
    global available_providers
    definition = definitions[provider]

    max_results = get_setting('max_results', int)
    sorted_results = sorted(results, key=lambda r: (r['seeds']), reverse=True)
    if len(sorted_results) > max_results:
        sorted_results = sorted_results[:max_results]

    log.info(">> %s returned %2d results in %.1f seconds%s" % (
            definition['name'].rjust(longest), len(results), round(time.time() - request_time, 2),
            (", sending %d best ones" % max_results) if len(results) > max_results else ""))

    provider_results.extend(sorted_results)
    available_providers -= 1
    if definition['name'] in provider_names:
        provider_names.remove(definition['name'])
예제 #9
0
    def use_movie(self, provider, payload):
        """ Setup method to define movie search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        movie_query = definition['movie_query'] if definition['movie_query'] else ''
        log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_movies'))
            self.max_size = get_float(get_setting('max_size_movies'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], movie_query)
        if definition['movie_keywords']:
            self.queries = ["%s" % definition['movie_keywords']]
            self.extras = ["%s" % definition['movie_extra']]
예제 #10
0
    def use_movie(self, provider, payload):
        """ Setup method to define movie search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        movie_query = definition['movie_query'] if definition[
            'movie_query'] else ''
        log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_movies'))
            self.max_size = get_float(get_setting('max_size_movies'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], movie_query)
        if definition['movie_keywords']:
            self.queries = ["%s" % definition['movie_keywords']]
            self.extras = ["%s" % definition['movie_extra']]
예제 #11
0
        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash):
            try:
                log.debug("[%s] Getting subpage at %s" %
                          (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" %
                          (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey

            if get_setting("use_cloudhole", bool):
                subclient.clearance = get_setting('clearance')
                subclient.user_agent = get_setting('user_agent')

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'))

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' %
                          (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(
                            uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error(
                        "[%s] Subpage extraction for %s failed with: %s" %
                        (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)
예제 #12
0
    def use_general(self, provider, payload):
        """ Setup method to define general search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_opennic_dns",
                       bool) and "opennic_dns_alias" in definition:
            definition = get_alias(definition, definition["opennic_dns_alias"])
        general_query = definition['general_query'] if definition[
            'general_query'] else ''
        log.debug("General URL: %s%s" %
                  (definition['base_url'], general_query))
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], general_query)
        if definition['general_keywords']:
            self.queries = [definition['general_keywords']]
            self.extras = [definition['general_extra']]
예제 #13
0
    def use_anime(self, provider, info):
        """ Setup method to define anime search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        anime_query = definition['anime_query'] if definition['anime_query'] else ''
        log.debug("Anime URL: %s%s" % (definition['base_url'], anime_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], anime_query)
        if self.info['absolute_number']:
            self.info['episode'] = self.info['absolute_number']
        if definition['anime_keywords']:
            self.queries = ["%s" % definition['anime_keywords']]
            self.extras = ["%s" % definition['anime_extra'] if definition['anime_extra'] else '']
예제 #14
0
def extract_from_page(provider, content):
    """ Sub-page extraction method

    Args:
        provider (str): Provider ID
        content  (str): Page content from Client instance

    Returns:
        str: Torrent or magnet link extracted from sub-page
    """
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    if provider == "kinozal":
        matches = re.findall(r'\: (.{40})', content)  # kinozal
        if matches:
            result = "magnet:?xt=urn:btih:" + matches[0]
            log.debug('[%s] Make magnet from info_hash: %s' % (provider, repr(result)))
            return result

    matches = re.findall(r'magnet:\?[^\'"\s<>\[\]]+', content)
    if matches:
        result = matches[0]
        log.debug('[%s] Matched magnet link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('http(.*?).torrent["\']', content)
    if matches:
        result = 'http' + matches[0] + '.torrent'
        result = result.replace('torcache.net', 'itorrents.org')
        log.debug('[%s] Matched torrent link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('/download\?token=[A-Za-z0-9%]+', content)
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with token: %s' % (provider, repr(result)))
        return result

    matches = re.findall('/telechargement/[a-z0-9-_.]+', content)  # cpasbien
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched some french link: %s' % (provider, repr(result)))
        return result

    matches = re.findall('/torrents/download/\?id=[a-z0-9-_.]+', content)  # t411
    if matches:
        result = definition['root_url'] + matches[0]
        log.debug('[%s] Matched download link with an ID: %s' % (provider, repr(result)))
        return result

    return None
예제 #15
0
    def use_season(self, provider, info):
        """ Setup method to define season search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        season_query = definition['season_query'] if definition['season_query'] else ''
        log.debug("Season URL: %s%s" % (definition['base_url'], season_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_seasons'))
            self.max_size = get_float(get_setting('max_size_seasons'))
            self.check_sizes()
        self.info = info
        self.url = u"%s%s" % (definition['base_url'], season_query)
        if definition['season_keywords']:
            self.queries = ["%s" % definition['season_keywords']]
            self.extras = ["%s" % definition['season_extra'] if definition['season_extra'] else '']
            if definition['season_keywords2']:
                self.queries.append("%s" % definition['season_keywords2'])
                self.extras.append("%s" % definition['season_extra2'] if definition['season_extra2'] else '')
예제 #16
0
def get_enabled_providers():
    """ Utility method to get all enabled provider IDs

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    for provider in definitions:
        if get_setting('use_%s' % provider, bool):
            results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider]['custom']:
            results.append(provider)
    return results
예제 #17
0
def get_enabled_providers():
    """ Utility method to get all enabled provider IDs

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    for provider in definitions:
        if get_setting('use_%s' % provider, bool):
            results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider][
                'custom']:
            results.append(provider)
    return results
예제 #18
0
    def use_episode(self, provider, payload):
        """ Setup method to define episode search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Quasar search payload
        """
        definition = definitions[provider]
        show_query = definition['show_query'] if definition['show_query'] else ''
        log.debug("Episode URL: %s%s" % (definition['base_url'], show_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_episodes'))
            self.max_size = get_float(get_setting('max_size_episodes'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], show_query)
        if definition['tv_keywords']:
            self.queries = ["%s" % definition['tv_keywords']]
            self.extras = ["%s" % definition['tv_extra'] if definition['tv_extra'] else '']
            # TODO this sucks, tv_keywords should be a list from the start..
            if definition['tv_keywords2']:
                self.queries.append(definition['tv_keywords2'])
                self.extras.append(definition['tv_extra2'] if definition['tv_extra2'] else '')
예제 #19
0
    def use_movie(self, provider, payload):
        """ Setup method to define movie search parameters

        Args:
            provider (str): Provider ID
            payload (dict): Elementum search payload
        """
        definition = definitions[provider]
        definition = get_alias(definition, get_setting("%s_alias" % provider))
        if get_setting("use_opennic_dns",
                       bool) and "opennic_dns_alias" in definition:
            definition = get_alias(definition, definition["opennic_dns_alias"])
        movie_query = definition['movie_query'] if definition[
            'movie_query'] else ''
        log.debug("Movies URL: %s%s" % (definition['base_url'], movie_query))
        if get_setting('separate_sizes', bool):
            self.min_size = get_float(get_setting('min_size_movies'))
            self.max_size = get_float(get_setting('max_size_movies'))
            self.check_sizes()
        self.info = payload
        self.url = u"%s%s" % (definition['base_url'], movie_query)
        if definition['movie_keywords']:
            self.queries = ["%s" % definition['movie_keywords']]
            self.extras = ["%s" % definition['movie_extra']]
예제 #20
0
def get_enabled_providers(method):
    """ Utility method to get all enabled provider IDs

    Returns:
        list: All available enabled provider IDs
    """
    results = []
    type = "2"
    if method == "general":
        type = "0"
    elif method == "movie":
        type = "1"
    for provider in definitions:
        if get_setting('use_%s' % provider, bool):
            contains = get_setting('%s_contains' % provider,
                                   choices=('All', 'Movies', 'Serials'))
            if not contains or contains == "0":
                results.append(provider)
            elif contains == type or type == "0":
                results.append(provider)
        if 'custom' in definitions[provider] and definitions[provider][
                'custom']:
            results.append(provider)
    return results
예제 #21
0
def process(provider, generator, filtering, verify_name=True, verify_size=True):
    """ Method for processing provider results using its generator and Filtering class instance

    Args:
        provider        (str): Provider ID
        generator  (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api``
        filtering (Filtering): Filtering class instance
        verify_name    (bool): Whether to double-check the results' names match the query or not
        verify_size    (bool): Whether to check the results' file sizes
    """
    log.debug("execute_process for %s with %s" % (provider, repr(generator)))
    definition = definitions[provider]

    client = Client()
    token = None
    logged_in = False
    token_auth = False

    if get_setting("use_cloudhole", bool):
        client.clearance = get_setting('clearance')
        client.user_agent = get_setting('user_agent')

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if kodi_language:
            filtering.kodi_language = kodi_language
        language_exceptions = get_setting('language_exceptions')
        if language_exceptions.strip().lower():
            filtering.language_exceptions = re.split(r',\s?', language_exceptions)

    log.debug("[%s] Queries: %s" % (provider, filtering.queries))
    log.debug("[%s] Extras:  %s" % (provider, filtering.extras))

    for query, extra in zip(filtering.queries, filtering.extras):
        log.debug("[%s] Before keywords - Query: %s - Extra: %s" % (provider, repr(query), repr(extra)))
        query = filtering.process_keywords(provider, query)
        extra = filtering.process_keywords(provider, extra)
        log.debug("[%s] After keywords  - Query: %s - Extra: %s" % (provider, repr(query), repr(extra)))
        if not query:
            return filtering.results

        url_search = filtering.url.replace('QUERY', query)
        if extra:
            url_search = url_search.replace('EXTRA', extra)
        else:
            url_search = url_search.replace('EXTRA', '')
        url_search = url_search.replace(' ', definition['separator'])

        # MagnetDL fix...
        url_search = url_search.replace('FIRSTLETTER', query[:1])

        # Creating the payload for POST method
        payload = dict()
        for key, value in filtering.post_data.iteritems():
            if 'QUERY' in value:
                payload[key] = filtering.post_data[key].replace('QUERY', query)
            else:
                payload[key] = filtering.post_data[key]

        # Creating the payload for GET method
        data = None
        if filtering.get_data:
            data = dict()
            for key, value in filtering.get_data.iteritems():
                if 'QUERY' in value:
                    data[key] = filtering.get_data[key].replace('QUERY', query)
                else:
                    data[key] = filtering.get_data[key]

        log.debug("-   %s query: %s" % (provider, repr(query)))
        log.debug("--  %s url_search before token: %s" % (provider, repr(url_search)))
        log.debug("--- %s using POST payload: %s" % (provider, repr(payload)))
        log.debug("----%s filtering with post_data: %s" % (provider, repr(filtering.post_data)))

        # Set search's "title" in filtering to double-check results' names
        if 'filter_title' in definition and definition['filter_title']:
            filtering.filter_title = True
            filtering.title = query

        if token:
            log.info('[%s] Reusing existing token' % provider)
            url_search = url_search.replace('TOKEN', token)
        elif 'token' in definition:
            token_url = definition['base_url'] + definition['token']
            log.debug("Getting token for %s at %s" % (provider, repr(token_url)))
            client.open(token_url.encode('utf-8'))
            try:
                token_data = json.loads(client.content)
            except:
                log.error('%s: Failed to get token for %s' % (provider, repr(url_search)))
                return filtering.results
            log.debug("Token response for %s: %s" % (provider, repr(token_data)))
            if 'token' in token_data:
                token = token_data['token']
                log.debug("Got token for %s: %s" % (provider, repr(token)))
                url_search = url_search.replace('TOKEN', token)
            else:
                log.warning('%s: Unable to get token for %s' % (provider, repr(url_search)))

        if logged_in:
            log.info("[%s] Reusing previous login" % provider)
        elif token_auth:
            log.info("[%s] Reusing previous token authorization" % provider)
        elif 'private' in definition and definition['private']:
            username = get_setting('%s_username' % provider)
            password = get_setting('%s_password' % provider)
            passkey = get_setting('%s_passkey' % provider)
            if not username and not password and not passkey:
                for addon_name in ('script.magnetic.%s' % provider, 'script.magnetic.%s-mc' % provider):
                    for setting in ('username', 'password'):
                        try:
                            value = xbmcaddon.Addon(addon_name).getSetting(setting)
                            set_setting('%s_%s' % (provider, setting), value)
                            if setting == 'username':
                                username = value
                            if setting == 'password':
                                password = value
                        except:
                            pass

            if passkey:
                logged_in = True
                client.passkey = passkey
                url_search = url_search.replace('PASSKEY', passkey)

            elif 'login_object' in definition and definition['login_object']:
                logged_in = False
                login_object = definition['login_object'].replace('USERNAME', '"%s"' % username).replace('PASSWORD', '"%s"' % password)

                # TODO generic flags in definitions for those...
                if provider == 'alphareign':
                    client.open(definition['root_url'] + definition['login_path'])
                    if client.content:
                        csrf_name = re.search(r'name="csrf_name" value="(.*?)"', client.content)
                        csrf_value = re.search(r'name="csrf_value" value="(.*?)"', client.content)
                        if csrf_name and csrf_value:
                            login_object = login_object.replace("CSRF_NAME", '"%s"' % csrf_name.group(1))
                            login_object = login_object.replace("CSRF_VALUE", '"%s"' % csrf_value.group(1))
                        else:
                            logged_in = True
                if provider == 'hd-torrents':
                    client.open(definition['root_url'] + definition['login_path'])
                    if client.content:
                        csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content)
                        if csrf_token:
                            login_object = login_object.replace('CSRF_TOKEN', '"%s"' % csrf_token.group(1))
                        else:
                            logged_in = True

                if 'token_auth' in definition:
                    # log.debug("[%s] logging in with: %s" % (provider, login_object))
                    if client.open(definition['root_url'] + definition['token_auth'], post_data=eval(login_object)):
                        try:
                            token_data = json.loads(client.content)
                        except:
                            log.error('%s: Failed to get token from %s' % (provider, definition['token_auth']))
                            return filtering.results
                        log.debug("Token response for %s: %s" % (provider, repr(token_data)))
                        if 'token' in token_data:
                            client.token = token_data['token']
                            log.debug("Auth token for %s: %s" % (provider, repr(client.token)))
                        else:
                            log.error('%s: Unable to get auth token for %s' % (provider, repr(url_search)))
                            return filtering.results
                        log.info('[%s] Token auth successful' % provider)
                        token_auth = True
                    else:
                        log.error("[%s] Token auth failed with response: %s" % (provider, repr(client.content)))
                        return filtering.results
                elif not logged_in and client.login(definition['root_url'] + definition['login_path'],
                                                    eval(login_object), definition['login_failed']):
                    log.info('[%s] Login successful' % provider)
                    logged_in = True
                elif not logged_in:
                    log.error("[%s] Login failed: %s", provider, client.status)
                    log.debug("[%s] Failed login content: %s", provider, repr(client.content))
                    return filtering.results

                if logged_in:
                    if provider == 'hd-torrents':
                        client.open(definition['root_url'] + '/torrents.php')
                        csrf_token = re.search(r'name="csrfToken" value="(.*?)"', client.content)
                        url_search = url_search.replace("CSRF_TOKEN", csrf_token.group(1))

        log.info(">  %s search URL: %s" % (definition['name'].rjust(longest), url_search))

        client.open(url_search.encode('utf-8'), post_data=payload, get_data=data)
        filtering.results.extend(
            generate_payload(provider,
                             generator(provider, client),
                             filtering,
                             verify_name,
                             verify_size))
    return filtering.results
예제 #22
0
try:
    ssl._create_default_https_context = ssl._create_unverified_context
except:
    log.debug("Skipping SSL workaround due to old Python version")
    pass

USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 " \
             "(KHTML, like Gecko) Chrome/53.0.2785.21 Safari/537.36"
try:
    PATH_TEMP = translatePath("special://temp").decode(
        sys.getfilesystemencoding(), 'ignore')
except:
    PATH_TEMP = translatePath("special://temp").decode('utf-8')

if get_setting("use_opennic_dns", bool):
    import socket
    prv_getaddrinfo = socket.getaddrinfo
    dns_cache = {
        ('nnm-club.lib', 80, 0, 1): [(2, 1, 0, '', ('81.17.30.22', 80))],
        ('rustorka.lib', 80, 0, 1): [(2, 1, 0, '', ('93.171.158.6', 80))],
        ('rutracker.lib', 80, 0, 1): [(2, 1, 0, '', ('195.82.146.214', 80))],
        ('rutor.lib', 80, 0, 1): [(2, 1, 0, '', ('185.191.239.206', 80))]
    }

    def new_getaddrinfo(*args):
        try:
            return dns_cache[args]
        except KeyError:
            res = prv_getaddrinfo(*args)
            dns_cache[args] = res
예제 #23
0
def cleanup_results(results_list):
    """ Remove duplicate results, hash results without an info_hash, and sort by seeders

    Args:
        results_list (list): Results to clean-up

    Returns:
        list: De-duplicated, hashed and sorted results
    """
    if len(results_list) == 0:
        return []

    hashes = []
    filtered_list = []
    allow_noseeds = get_setting('allow_noseeds', bool)
    for result in results_list:
        if not result['seeds'] and not allow_noseeds:
            continue

        if not result['uri']:
            if not result['name']:
                continue
            try:
                log.warning('[%s] No URI for %s' %
                            (result['provider'][16:-8], repr(result['name'])))
            except Exception as e:
                import traceback
                log.warning("%s logging failed with: %s" %
                            (result['provider'], repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))
            continue

        hash_ = result['info_hash'].upper()

        if not hash_:
            if result['uri'] and result['uri'].startswith('magnet'):
                hash_ = Magnet(result['uri']).info_hash.upper()
            else:
                hash_ = hashlib.md5(result['uri']).hexdigest()

        try:
            log.debug("[%s] Hash for %s: %s" %
                      (result['provider'][16:-8], repr(result['name']), hash_))
        except Exception as e:
            import traceback
            log.warning("%s logging failed with: %s" %
                        (result['provider'], repr(e)))
            map(log.debug, traceback.format_exc().split("\n"))

        if not any(existing == hash_ for existing in hashes):
            filtered_list.append(result)
            hashes.append(hash_)

    if (get_setting("sort_by_resolution", bool)):
        log.debug(
            "[EXPEREMENTAL] Start last sorting list by resolution of all result before send to Quasar"
        )
        filtered_list = sorted(filtered_list,
                               key=lambda r: (get_int(r.pop('resolution'))),
                               reverse=True)
    else:
        filtered_list = sorted(filtered_list,
                               key=lambda r: (get_int(r['seeds'])),
                               reverse=True)

    return filtered_list
예제 #24
0
def extract_from_api(provider, client):
    """ Main API parsing generator for API-based providers

    An almost clever API parser, mostly just for YTS, RARBG and T411

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    try:
        data = json.loads(client.content)
    except:
        data = []
    log.debug("[%s] JSON response from API: %s" % (provider, repr(data)))

    definition = definitions[provider]
    api_format = definition['api_format']

    results = []
    result_keys = api_format['results'].split('.')
    log.debug("%s result_keys: %s" % (provider, repr(result_keys)))
    for key in result_keys:
        if key in data:
            data = data[key]
        else:
            data = []
        # log.debug("%s nested results: %s" % (provider, repr(data)))
    results = data
    log.debug("%s results: %s" % (provider, repr(results)))

    if 'subresults' in api_format:
        from copy import deepcopy
        for result in results:  # A little too specific to YTS but who cares...
            result['name'] = result[api_format['name']]
        subresults = []
        subresults_keys = api_format['subresults'].split('.')
        for key in subresults_keys:
            for result in results:
                if key in result:
                    for subresult in result[key]:
                        sub = deepcopy(result)
                        sub.update(subresult)
                        subresults.append(sub)
        results = subresults
        log.debug("%s with subresults: %s" % (provider, repr(results)))

    for result in results:
        if not result or not isinstance(result, dict):
            continue
        name = ''
        info_hash = ''
        torrent = ''
        size = ''
        seeds = ''
        peers = ''
        if 'name' in api_format:
            name = result[api_format['name']]
        if 'torrent' in api_format:
            torrent = result[api_format['torrent']]
            if 'download_path' in definition:
                torrent = definition['base_url'] + definition['download_path'] + torrent
            if client.token:
                user_agent = USER_AGENT
                if get_setting("use_cloudhole", bool):
                    user_agent = get_setting("user_agent")
                headers = {'Authorization': client.token, 'User-Agent': user_agent}
                log.debug("[%s] Appending headers: %s" % (provider, repr(headers)))
                torrent = append_headers(torrent, headers)
                log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent)))
        if 'info_hash' in api_format:
            info_hash = result[api_format['info_hash']]
        if 'quality' in api_format:  # Again quite specific to YTS...
            name = "%s - %s" % (name, result[api_format['quality']])
        if 'size' in api_format:
            size = result[api_format['size']]
            if type(size) in (long, int):
                size = sizeof(size)
            elif type(size) in (str, unicode) and size.isdigit():
                size = sizeof(int(size))
        if 'seeds' in api_format:
            seeds = result[api_format['seeds']]
            if type(seeds) in (str, unicode) and seeds.isdigit():
                seeds = int(seeds)
        if 'peers' in api_format:
            peers = result[api_format['peers']]
            if type(peers) in (str, unicode) and peers.isdigit():
                peers = int(peers)
        yield (name, info_hash, torrent, size, seeds, peers)
예제 #25
0
def search(payload, method="general"):
    """ Main search entrypoint

    Args:
        payload (dict): Search payload from Quasar.
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``

    Returns:
        list: All filtered results in the format Quasar expects
    """
    log.debug("Searching with payload (%s): %s" % (method, repr(payload)))

    if method == 'general':
        payload = {'title': payload}

    global request_time
    global provider_names
    global provider_results
    global available_providers

    provider_names = []
    provider_results = []
    available_providers = 0
    request_time = time.time()

    providers = get_enabled_providers()

    if len(providers) == 0:
        notify(translation(32060), image=get_icon_path())
        log.error("No providers enabled")
        return []

    log.info(
        "Burstin' with %s" %
        ", ".join([definitions[provider]['name'] for provider in providers]))

    if get_setting("use_cloudhole", bool):
        clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key())
        set_setting('clearance', clearance)
        set_setting('user_agent', user_agent)

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if not kodi_language:
            log.warning("Kodi returned empty language code...")
        elif 'titles' not in payload or not payload['titles']:
            log.info("No translations available...")
        elif payload['titles'] and kodi_language not in payload['titles']:
            log.info("No '%s' translation available..." % kodi_language)

    p_dialog = xbmcgui.DialogProgressBG()
    p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061))
    for provider in providers:
        available_providers += 1
        provider_names.append(definitions[provider]['name'])
        task = Thread(target=run_provider, args=(provider, payload, method))
        task.start()

    providers_time = time.time()
    total = float(available_providers)

    # Exit if all providers have returned results or timeout reached, check every 100ms
    while time.time() - providers_time < timeout and available_providers > 0:
        timer = time.time() - providers_time
        log.debug("Timer: %ds / %ds" % (timer, timeout))
        if timer > timeout:
            break
        message = translation(
            32062
        ) % available_providers if available_providers > 1 else translation(
            32063)
        p_dialog.update(int((total - available_providers) / total * 100),
                        message=message)
        time.sleep(0.25)

    p_dialog.close()
    del p_dialog

    if available_providers > 0:
        message = u', '.join(provider_names)
        message = message + translation(32064)
        log.warning(message.encode('utf-8'))
        notify(message, ADDON_ICON)

    log.debug("all provider_results: %s" % repr(provider_results))

    filtered_results = apply_filters(provider_results)

    log.debug("all filtered_results: %s" % repr(filtered_results))

    log.info("Providers returned %d results in %s seconds" %
             (len(filtered_results), round(time.time() - request_time, 2)))

    return filtered_results
예제 #26
0
def extract_from_api(provider, client):
    """ Main API parsing generator for API-based providers

    An almost clever API parser, mostly just for YTS, RARBG and T411

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    try:
        data = json.loads(client.content)
    except:
        data = []
    log.debug("[%s] JSON response from API: %s" % (provider, repr(data)))

    definition = definitions[provider]
    api_format = definition['api_format']

    results = []
    result_keys = api_format['results'].split('.')
    log.debug("%s result_keys: %s" % (provider, repr(result_keys)))
    for key in result_keys:
        if key in data:
            data = data[key]
        else:
            data = []
        # log.debug("%s nested results: %s" % (provider, repr(data)))
    results = data
    log.debug("%s results: %s" % (provider, repr(results)))

    if 'subresults' in api_format:
        from copy import deepcopy
        for result in results:  # A little too specific to YTS but who cares...
            result['name'] = result[api_format['name']]
        subresults = []
        subresults_keys = api_format['subresults'].split('.')
        for key in subresults_keys:
            for result in results:
                if key in result:
                    for subresult in result[key]:
                        sub = deepcopy(result)
                        sub.update(subresult)
                        subresults.append(sub)
        results = subresults
        log.debug("%s with subresults: %s" % (provider, repr(results)))

    for result in results:
        if not result or not isinstance(result, dict):
            continue
        name = ''
        info_hash = ''
        torrent = ''
        size = ''
        seeds = ''
        peers = ''
        if 'name' in api_format:
            name = result[api_format['name']]
        if 'torrent' in api_format:
            torrent = result[api_format['torrent']]
            if 'download_path' in definition:
                torrent = definition['base_url'] + definition[
                    'download_path'] + torrent
            if client.token:
                user_agent = USER_AGENT
                if get_setting("use_cloudhole", bool):
                    user_agent = get_setting("user_agent")
                headers = {
                    'Authorization': client.token,
                    'User-Agent': user_agent
                }
                log.debug("[%s] Appending headers: %s" %
                          (provider, repr(headers)))
                torrent = append_headers(torrent, headers)
                log.debug("[%s] Torrent with headers: %s" %
                          (provider, repr(torrent)))
        if 'info_hash' in api_format:
            info_hash = result[api_format['info_hash']]
        if 'quality' in api_format:  # Again quite specific to YTS...
            name = "%s - %s" % (name, result[api_format['quality']])
        if 'size' in api_format:
            size = result[api_format['size']]
            if type(size) in (long, int):
                size = sizeof(size)
            elif type(size) in (str, unicode) and size.isdigit():
                size = sizeof(int(size))
        if 'seeds' in api_format:
            seeds = result[api_format['seeds']]
            if type(seeds) in (str, unicode) and seeds.isdigit():
                seeds = int(seeds)
        if 'peers' in api_format:
            peers = result[api_format['peers']]
            if type(peers) in (str, unicode) and peers.isdigit():
                peers = int(peers)
        yield (name, info_hash, torrent, size, seeds, peers)
예제 #27
0
def extract_torrents(provider, client):
    """ Main torrent extraction generator for non-API based providers

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    definition = definitions[provider]
    log.debug("Extracting torrents from %s using definitions: %s" %
              (provider, repr(definition)))

    if not client.content:
        raise StopIteration

    dom = Html().feed(client.content)

    row_search = "dom." + definition['parser']['row']
    name_search = definition['parser']['name']
    torrent_search = definition['parser']['torrent']
    info_hash_search = definition['parser']['infohash']
    size_search = definition['parser']['size']
    seeds_search = definition['parser']['seeds']
    peers_search = definition['parser']['peers']

    log.debug("[%s] Parser: %s" % (provider, repr(definition['parser'])))

    q = Queue()
    threads = []
    needs_subpage = 'subpage' in definition and definition['subpage']

    if needs_subpage:

        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash):
            try:
                log.debug("[%s] Getting subpage at %s" %
                          (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" %
                          (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey

            if get_setting("use_cloudhole", bool):
                subclient.clearance = get_setting('clearance')
                subclient.user_agent = get_setting('user_agent')

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'))

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' %
                          (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(
                            uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error(
                        "[%s] Subpage extraction for %s failed with: %s" %
                        (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)

    if not dom:
        raise StopIteration

    for item in eval(row_search):
        if not item:
            continue
        name = eval(name_search)
        torrent = eval(torrent_search) if torrent_search else ""
        size = eval(size_search) if size_search else ""
        seeds = eval(seeds_search) if seeds_search else ""
        peers = eval(peers_search) if peers_search else ""
        info_hash = eval(info_hash_search) if info_hash_search else ""

        # Pass client cookies with torrent if private
        if (definition['private'] or get_setting(
                "use_cloudhole", bool)) and not torrent.startswith('magnet'):
            user_agent = USER_AGENT
            if get_setting("use_cloudhole", bool):
                user_agent = get_setting("user_agent")

            if client.passkey:
                torrent = torrent.replace('PASSKEY', client.passkey)
            elif client.token:
                headers = {
                    'Authorization': client.token,
                    'User-Agent': user_agent
                }
                log.debug("[%s] Appending headers: %s" %
                          (provider, repr(headers)))
                torrent = append_headers(torrent, headers)
                log.debug("[%s] Torrent with headers: %s" %
                          (provider, repr(torrent)))
            else:
                log.debug("[%s] Cookies: %s" %
                          (provider, repr(client.cookies())))
                parsed_url = urlparse(definition['root_url'])
                cookie_domain = '{uri.netloc}'.format(uri=parsed_url).replace(
                    'www.', '')
                cookies = []
                log.debug("[%s] cookie_domain: %s" % (provider, cookie_domain))
                for cookie in client._cookies:
                    log.debug(
                        "[%s] cookie for domain: %s (%s=%s)" %
                        (provider, cookie.domain, cookie.name, cookie.value))
                    if cookie_domain in cookie.domain:
                        cookies.append(cookie)
                if cookies:
                    headers = {
                        'Cookie':
                        ";".join(
                            ["%s=%s" % (c.name, c.value) for c in cookies]),
                        'User-Agent':
                        user_agent
                    }
                    log.debug("[%s] Appending headers: %s" %
                              (provider, repr(headers)))
                    torrent = append_headers(torrent, headers)
                    log.debug("[%s] Torrent with headers: %s" %
                              (provider, repr(torrent)))

        if name and torrent and needs_subpage:
            if not torrent.startswith('http'):
                torrent = definition['root_url'] + torrent.encode('utf-8')
            t = Thread(target=extract_subpage,
                       args=(q, name, torrent, size, seeds, peers, info_hash))
            threads.append(t)
        else:
            yield (name, info_hash, torrent, size, seeds, peers)

    if needs_subpage:
        log.debug("[%s] Starting subpage threads..." % provider)
        for t in threads:
            t.start()
        for t in threads:
            t.join()

        log.debug("[%s] Threads returned: %s" % (provider, repr(threads)))

        for i in range(q.qsize()):
            ret = q.get_nowait()
            log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret)))
            yield ret
예제 #28
0
def search(payload, method="general"):
    """ Main search entrypoint

    Args:
        payload (dict): Search payload from Quasar.
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``

    Returns:
        list: All filtered results in the format Quasar expects
    """
    log.debug("Searching with payload (%s): %s" % (method, repr(payload)))

    if method == 'general':
        payload = {
            'title': payload
        }

    global request_time
    global provider_names
    global provider_results
    global available_providers

    provider_names = []
    provider_results = []
    available_providers = 0
    request_time = time.time()

    providers = get_enabled_providers()

    if len(providers) == 0:
        notify(translation(32060), image=get_icon_path())
        log.error("No providers enabled")
        return []

    log.info("Burstin' with %s" % ", ".join([definitions[provider]['name'] for provider in providers]))

    if get_setting("use_cloudhole", bool):
        clearance, user_agent = get_cloudhole_clearance(get_cloudhole_key())
        set_setting('clearance', clearance)
        set_setting('user_agent', user_agent)

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if not kodi_language:
            log.warning("Kodi returned empty language code...")
        elif 'titles' not in payload or not payload['titles']:
            log.info("No translations available...")
        elif payload['titles'] and kodi_language not in payload['titles']:
            log.info("No '%s' translation available..." % kodi_language)

    p_dialog = xbmcgui.DialogProgressBG()
    p_dialog.create('Quasar [COLOR FFFF6B00]Burst[/COLOR]', translation(32061))
    for provider in providers:
        available_providers += 1
        provider_names.append(definitions[provider]['name'])
        task = Thread(target=run_provider, args=(provider, payload, method))
        task.start()

    providers_time = time.time()
    total = float(available_providers)

    # Exit if all providers have returned results or timeout reached, check every 100ms
    while time.time() - providers_time < timeout and available_providers > 0:
        timer = time.time() - providers_time
        log.debug("Timer: %ds / %ds" % (timer, timeout))
        if timer > timeout:
            break
        message = translation(32062) % available_providers if available_providers > 1 else translation(32063)
        p_dialog.update(int((total - available_providers) / total * 100), message=message)
        time.sleep(0.25)

    p_dialog.close()
    del p_dialog

    if available_providers > 0:
        message = u', '.join(provider_names)
        message = message + translation(32064)
        log.warning(message.encode('utf-8'))
        notify(message, ADDON_ICON)

    log.debug("all provider_results: %s" % repr(provider_results))

    filtered_results = apply_filters(provider_results)

    log.debug("all filtered_results: %s" % repr(filtered_results))

    log.info("Providers returned %d results in %s seconds" % (len(filtered_results), round(time.time() - request_time, 2)))

    return filtered_results
예제 #29
0
from threading import Thread
from urlparse import urlparse
from quasar.provider import append_headers, get_setting, set_setting, log

from parser.ehp import Html
from provider import process
from providers.definitions import definitions, longest
from filtering import apply_filters, Filtering
from client import USER_AGENT, Client, get_cloudhole_key, get_cloudhole_clearance
from utils import ADDON_ICON, notify, translation, sizeof, get_icon_path, get_enabled_providers

provider_names = []
provider_results = []
available_providers = 0
request_time = time.time()
timeout = get_setting("timeout", int)


def search(payload, method="general"):
    """ Main search entrypoint

    Args:
        payload (dict): Search payload from Quasar.
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``

    Returns:
        list: All filtered results in the format Quasar expects
    """
    log.debug("Searching with payload (%s): %s" % (method, repr(payload)))

    if method == 'general':
예제 #30
0
def process(provider,
            generator,
            filtering,
            has_special,
            verify_name=True,
            verify_size=True):
    """ Method for processing provider results using its generator and Filtering class instance

    Args:
        provider        (str): Provider ID
        generator  (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api``
        filtering (Filtering): Filtering class instance
        has_special    (bool): Whether title contains special chars
        verify_name    (bool): Whether to double-check the results' names match the query or not
        verify_size    (bool): Whether to check the results' file sizes
    """
    log.debug("execute_process for %s with %s" % (provider, repr(generator)))
    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    client = Client()
    logged_in = False

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if kodi_language:
            filtering.kodi_language = kodi_language
        language_exceptions = get_setting('language_exceptions')
        if language_exceptions.strip().lower():
            filtering.language_exceptions = re.split(r',\s?',
                                                     language_exceptions)

    log.debug("[%s] Queries: %s" % (provider, filtering.queries))
    log.debug("[%s] Extras:  %s" % (provider, filtering.extras))

    for query, extra in zip(filtering.queries, filtering.extras):
        log.debug("[%s] Before keywords - Query: %s - Extra: %s" %
                  (provider, repr(query), repr(extra)))
        if has_special:
            # Removing quotes, surrounding {title*} keywords, when title contains special chars
            query = re.sub("[\"']({title.*?})[\"']", '\\1', query)

        query = filtering.process_keywords(provider, query)
        extra = filtering.process_keywords(provider, extra)
        if 'charset' in definition and 'utf' not in definition[
                'charset'].lower():
            try:
                query = urllib.quote(query.encode(definition['charset']))
                extra = urllib.quote(extra.encode(definition['charset']))
            except:
                pass

        log.debug("[%s] After keywords  - Query: %s - Extra: %s" %
                  (provider, repr(query), repr(extra)))
        if not query:
            return filtering.results

        url_search = filtering.url.replace('QUERY', query)
        if extra:
            url_search = url_search.replace('EXTRA', extra)
        else:
            url_search = url_search.replace('EXTRA', '')
        url_search = url_search.replace(' ', definition['separator'])

        # Creating the payload for POST method
        payload = dict()
        for key, value in filtering.post_data.iteritems():
            if 'QUERY' in value:
                payload[key] = filtering.post_data[key].replace('QUERY', query)
            else:
                payload[key] = filtering.post_data[key]

        # Creating the payload for GET method
        data = None
        if filtering.get_data:
            data = dict()
            for key, value in filtering.get_data.iteritems():
                if 'QUERY' in value:
                    data[key] = filtering.get_data[key].replace('QUERY', query)
                else:
                    data[key] = filtering.get_data[key]

        log.debug("-   %s query: %s" % (provider, repr(query)))
        log.debug("--  %s url_search before token: %s" %
                  (provider, repr(url_search)))
        log.debug("--- %s using POST payload: %s" % (provider, repr(payload)))
        log.debug("----%s filtering with post_data: %s" %
                  (provider, repr(filtering.post_data)))

        # Set search's "title" in filtering to double-check results' names
        if 'filter_title' in definition and definition['filter_title']:
            filtering.filter_title = True
            filtering.title = query

        if logged_in:
            log.info("[%s] Reusing previous login" % provider)
        elif 'private' in definition and definition['private']:
            username = get_setting('%s_username' % provider)
            password = get_setting('%s_password' % provider)
            passkey = get_setting('%s_passkey' % provider)

            if 'login_object' in definition and definition['login_object']:
                logged_in = False
                login_object = definition['login_object'].replace(
                    'USERNAME',
                    '"%s"' % username).replace('PASSWORD', '"%s"' % password)

                # TODO generic flags in definitions for those...
                if provider == 'lostfilm':
                    client.open(definition['root_url'] +
                                '/v_search.php?c=110&s=1&e=1')
                    if client.content == 'log in first':
                        pass
                    else:
                        log.info('[%s] Login successful' % provider)
                        logged_in = True

                if not logged_in and client.login(
                        definition['root_url'] + definition['login_path'],
                        eval(login_object), definition['login_failed']):
                    log.info('[%s] Login successful' % provider)
                    logged_in = True
                elif not logged_in:
                    log.error("[%s] Login failed: %s", provider, client.status)
                    log.debug("[%s] Failed login content: %s", provider,
                              repr(client.content))
                    notify(translation(32089) % provider,
                           image=get_icon_path())
                    return filtering.results

                if logged_in:
                    if provider == 'lostfilm':
                        log.info('[%s] Search lostfilm serial ID...', provider)
                        url_search = fix_lf(url_search)
                        client.open(url_search.encode('utf-8'),
                                    post_data=payload,
                                    get_data=data)
                        search_info = re.search(r'rel="(.*?)">',
                                                client.content)
                        if search_info:
                            series_details = re.search('(\d+),(\d+),(\d+)',
                                                       search_info.group(1))
                            client.open(definition['root_url'] +
                                        '/v_search.php?c=%s&s=%s&e=%s' %
                                        (series_details.group(1),
                                         series_details.group(2),
                                         series_details.group(3)))
                            redirect_url = re.search(ur'url=(.*?)">',
                                                     client.content)
                            if redirect_url is not None:
                                url_search = redirect_url.group(1)
                        else:
                            log.info('[%s] Not found ID in %s' %
                                     (provider, url_search))
                            return filtering.results

        log.info(">  %s search URL: %s" %
                 (definition['name'].rjust(longest), url_search))

        client.open(url_search.encode('utf-8'),
                    post_data=payload,
                    get_data=data)
        filtering.results.extend(
            generate_payload(provider, generator(provider, client), filtering,
                             verify_name, verify_size))
    return filtering.results
예제 #31
0
def extract_torrents(provider, client):
    """ Main torrent extraction generator for non-API based providers

    Args:
        provider  (str): Provider ID
        client (Client): Client class instance

    Yields:
        tuple: A torrent result
    """
    definition = definitions[provider]
    log.debug("Extracting torrents from %s using definitions: %s" % (provider, repr(definition)))

    if not client.content:
        raise StopIteration

    dom = Html().feed(client.content)

    row_search = "dom." + definition['parser']['row']
    name_search = definition['parser']['name']
    torrent_search = definition['parser']['torrent']
    info_hash_search = definition['parser']['infohash']
    size_search = definition['parser']['size']
    seeds_search = definition['parser']['seeds']
    peers_search = definition['parser']['peers']

    log.debug("[%s] Parser: %s" % (provider, repr(definition['parser'])))

    q = Queue()
    threads = []
    needs_subpage = 'subpage' in definition and definition['subpage']

    if needs_subpage:
        def extract_subpage(q, name, torrent, size, seeds, peers, info_hash):
            try:
                log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent)))
            except Exception as e:
                import traceback
                log.error("[%s] Subpage logging failed with: %s" % (provider, repr(e)))
                map(log.debug, traceback.format_exc().split("\n"))

            # New client instance, otherwise it's race conditions all over the place
            subclient = Client()
            subclient.passkey = client.passkey

            if get_setting("use_cloudhole", bool):
                subclient.clearance = get_setting('clearance')
                subclient.user_agent = get_setting('user_agent')

            uri = torrent.split('|')  # Split cookies for private trackers
            subclient.open(uri[0].encode('utf-8'))

            if 'bittorrent' in subclient.headers.get('content-type', ''):
                log.debug('[%s] bittorrent content-type for %s' % (provider, repr(torrent)))
                if len(uri) > 1:  # Stick back cookies if needed
                    torrent = '%s|%s' % (torrent, uri[1])
            else:
                try:
                    torrent = extract_from_page(provider, subclient.content)
                    if torrent and not torrent.startswith('magnet') and len(uri) > 1:  # Stick back cookies if needed
                        torrent = '%s|%s' % (torrent, uri[1])
                except Exception as e:
                    import traceback
                    log.error("[%s] Subpage extraction for %s failed with: %s" % (provider, repr(uri[0]), repr(e)))
                    map(log.debug, traceback.format_exc().split("\n"))

            ret = (name, info_hash, torrent, size, seeds, peers)
            q.put_nowait(ret)

    if not dom:
        raise StopIteration

    for item in eval(row_search):
        if not item:
            continue
        name = eval(name_search)
        torrent = eval(torrent_search) if torrent_search else ""
        size = eval(size_search) if size_search else ""
        seeds = eval(seeds_search) if seeds_search else ""
        peers = eval(peers_search) if peers_search else ""
        info_hash = eval(info_hash_search) if info_hash_search else ""

        # Pass client cookies with torrent if private
        if (definition['private'] or get_setting("use_cloudhole", bool)) and not torrent.startswith('magnet'):
            user_agent = USER_AGENT
            if get_setting("use_cloudhole", bool):
                user_agent = get_setting("user_agent")

            if client.passkey:
                torrent = torrent.replace('PASSKEY', client.passkey)
            elif client.token:
                headers = {'Authorization': client.token, 'User-Agent': user_agent}
                log.debug("[%s] Appending headers: %s" % (provider, repr(headers)))
                torrent = append_headers(torrent, headers)
                log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent)))
            else:
                log.debug("[%s] Cookies: %s" % (provider, repr(client.cookies())))
                parsed_url = urlparse(definition['root_url'])
                cookie_domain = '{uri.netloc}'.format(uri=parsed_url).replace('www.', '')
                cookies = []
                log.debug("[%s] cookie_domain: %s" % (provider, cookie_domain))
                for cookie in client._cookies:
                    log.debug("[%s] cookie for domain: %s (%s=%s)" % (provider, cookie.domain, cookie.name, cookie.value))
                    if cookie_domain in cookie.domain:
                        cookies.append(cookie)
                if cookies:
                    headers = {'Cookie': ";".join(["%s=%s" % (c.name, c.value) for c in cookies]), 'User-Agent': user_agent}
                    log.debug("[%s] Appending headers: %s" % (provider, repr(headers)))
                    torrent = append_headers(torrent, headers)
                    log.debug("[%s] Torrent with headers: %s" % (provider, repr(torrent)))

        if name and torrent and needs_subpage:
            if not torrent.startswith('http'):
                torrent = definition['root_url'] + torrent.encode('utf-8')
            t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash))
            threads.append(t)
        else:
            yield (name, info_hash, torrent, size, seeds, peers)

    if needs_subpage:
        log.debug("[%s] Starting subpage threads..." % provider)
        for t in threads:
            t.start()
        for t in threads:
            t.join()

        log.debug("[%s] Threads returned: %s" % (provider, repr(threads)))

        for i in range(q.qsize()):
            ret = q.get_nowait()
            log.debug("[%s] Queue %d got: %s" % (provider, i, repr(ret)))
            yield ret
예제 #32
0
    def __init__(self):
        resolutions = OrderedDict()
        resolutions['filter_240p'] = ['240p', 'tvrip', 'satrip', 'vhsrip']
        resolutions['filter_480p'] = ['480p', 'xvid', 'dvd', 'dvdrip', 'hdtv']
        resolutions['filter_720p'] = ['720p', 'hdrip', 'bluray', 'brrip', 'bdrip']
        resolutions['filter_1080p'] = ['1080p', 'fullhd', '_fhd_']
        resolutions['filter_2k'] = ['_2k_', '1440p']
        resolutions['filter_4k'] = ['_4k_', '2160p']
        self.resolutions = resolutions

        self.release_types = {
            'filter_brrip': ['brrip', 'bdrip', 'bluray'],
            'filter_webdl': ['webdl', 'webrip', 'web_dl', 'dlrip', '_yts_'],
            'filter_hdrip': ['hdrip'],
            'filter_hdtv': ['hdtv'],
            'filter_dvd': ['_dvd_', 'dvdrip'],
            'filter_dvdscr': ['dvdscr'],
            'filter_screener': ['screener', '_scr_'],
            'filter_3d': ['_3d_'],
            'filter_telesync': ['telesync', '_ts_', '_tc_'],
            'filter_cam': ['_cam_', 'hdcam'],
            'filter_tvrip': ['tvrip', 'satrip'],
            'filter_vhsrip': ['vhsrip'],
            'filter_trailer': ['trailer'],
            'filter_workprint': ['workprint']
        }

        require = []
        resolutions_allow = []
        releases_allow = []
        releases_deny = []

        for resolution in self.resolutions:
            if get_setting(resolution, bool):
                resolutions_allow.append(resolution)
                # Add enabled resolutions to allowed release types to match
                # previous versions' behavior with certain providers
                # with no release types in torrent names, ie. YTS
                releases_allow.extend(self.resolutions[resolution])
        self.resolutions_allow = resolutions_allow

        # Skip resolution filtering if we're allowing all of them anyway
        self.filter_resolutions = True
        if len(self.resolutions_allow) == len(self.resolutions):
            self.filter_resolutions = False

        for release_type in self.release_types:
            if get_setting(release_type, bool):
                releases_allow.extend(self.release_types[release_type])
            else:
                releases_deny.extend(self.release_types[release_type])
        self.releases_allow = releases_allow
        self.releases_deny = releases_deny

        if get_setting('additional_filters', bool):
            accept = get_setting('accept').strip().lower()
            if accept:
                accept = re.split(r',\s?', accept)
                releases_allow.extend(accept)

            block = get_setting('block').strip().lower()
            if block:
                block = re.split(r',\s?', block)
                releases_deny.extend(block)

            require = get_setting('require').strip().lower()
            if require:
                require = re.split(r',\s?', require)

        self.require_keywords = require

        self.min_size = get_float(get_setting('min_size'))
        self.max_size = get_float(get_setting('max_size'))
        self.check_sizes()

        self.filter_title = False

        self.queries = []
        self.extras = []

        self.info = dict(title="", titles=[])
        self.kodi_language = ''
        self.language_exceptions = []
        self.get_data = {}
        self.post_data = {}
        self.url = ''
        self.title = ''
        self.reason = ''
        self.results = []
예제 #33
0
from threading import Thread
from urlparse import urlparse
from quasar.provider import append_headers, get_setting, log

from parser.ehp import Html
from provider import process
from providers.definitions import definitions, longest
from filtering import apply_filters, Filtering
from client import USER_AGENT, Client
from utils import ADDON_ICON, notify, translation, sizeof, get_icon_path, get_enabled_providers

provider_names = []
provider_results = []
available_providers = 0
request_time = time.time()
timeout = get_setting("timeout", int)
auto_timeout = get_setting("auto_timeout", bool)

if auto_timeout:
    quasar_addon = xbmcaddon.Addon(id='plugin.video.quasar')
    if quasar_addon:
        if quasar_addon.getSetting(
                'custom_provider_timeout_enabled') == "true":
            timeout = int(
                quasar_addon.getSetting('custom_provider_timeout')) - 2
            log.debug("Using timeout from Quasar: %d seconds" % (timeout))
        else:
            timeout = 9


def search(payload, method="general"):
예제 #34
0
    def __init__(self):
        resolutions = OrderedDict()
        resolutions['filter_240p'] = ['240p', 'tvrip', 'satrip', 'vhsrip']
        resolutions['filter_480p'] = ['480p', 'xvid', 'dvd', 'dvdrip', 'hdtv']
        resolutions['filter_720p'] = [
            '720p', 'hdrip', 'bluray', 'brrip', 'bdrip'
        ]
        resolutions['filter_1080p'] = ['1080p', 'fullhd', '_fhd_']
        resolutions['filter_2k'] = ['_2k_', '1440p']
        resolutions['filter_4k'] = ['_4k_', '2160p']
        self.resolutions = resolutions

        self.release_types = {
            'filter_brrip': ['brrip', 'bdrip', 'bluray'],
            'filter_webdl': ['webdl', 'webrip', 'web_dl', 'dlrip', '_yts_'],
            'filter_hdrip': ['hdrip'],
            'filter_hdtv': ['hdtv'],
            'filter_dvd': ['_dvd_', 'dvdrip'],
            'filter_dvdscr': ['dvdscr'],
            'filter_screener': ['screener', '_scr_'],
            'filter_3d': ['_3d_'],
            'filter_telesync': ['telesync', '_ts_', '_tc_'],
            'filter_cam': ['_cam_', 'hdcam'],
            'filter_tvrip': ['tvrip', 'satrip'],
            'filter_vhsrip': ['vhsrip'],
            'filter_trailer': ['trailer'],
            'filter_workprint': ['workprint']
        }

        require = []
        resolutions_allow = []
        releases_allow = []
        releases_deny = []

        for resolution in self.resolutions:
            if get_setting(resolution, bool):
                resolutions_allow.append(resolution)
                # Add enabled resolutions to allowed release types to match
                # previous versions' behavior with certain providers
                # with no release types in torrent names, ie. YTS
                releases_allow.extend(self.resolutions[resolution])
        self.resolutions_allow = resolutions_allow

        # Skip resolution filtering if we're allowing all of them anyway
        self.filter_resolutions = True
        if len(self.resolutions_allow) == len(self.resolutions):
            self.filter_resolutions = False

        for release_type in self.release_types:
            if get_setting(release_type, bool):
                releases_allow.extend(self.release_types[release_type])
            else:
                releases_deny.extend(self.release_types[release_type])
        self.releases_allow = releases_allow
        self.releases_deny = releases_deny

        if get_setting('additional_filters', bool):
            accept = get_setting('accept').strip().lower()
            if accept:
                accept = re.split(r',\s?', accept)
                releases_allow.extend(accept)

            block = get_setting('block').strip().lower()
            if block:
                block = re.split(r',\s?', block)
                releases_deny.extend(block)

            require = get_setting('require').strip().lower()
            if require:
                require = re.split(r',\s?', require)

        self.require_keywords = require

        self.min_size = get_float(get_setting('min_size'))
        self.max_size = get_float(get_setting('max_size'))
        self.check_sizes()

        self.filter_title = False

        self.queries = []
        self.extras = []

        self.info = dict(title="", titles=[])
        self.kodi_language = ''
        self.language_exceptions = []
        self.get_data = {}
        self.post_data = {}
        self.url = ''
        self.title = ''
        self.reason = ''
        self.results = []
예제 #35
0
def generate_payload(provider,
                     generator,
                     filtering,
                     verify_name=True,
                     verify_size=True):
    """ Payload formatter to format results the way Elementum expects them

    Args:
        provider        (str): Provider ID
        generator  (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api``
        filtering (Filtering): Filtering class instance
        verify_name    (bool): Whether to double-check the results' names match the query or not
        verify_size    (bool): Whether to check the results' file sizes

    Returns:
        list: Formatted results
    """
    filtering.information(provider)
    results = []

    definition = definitions[provider]
    definition = get_alias(definition, get_setting("%s_alias" % provider))

    for name, info_hash, uri, size, seeds, peers in generator:
        size = clean_size(size)
        # uri, info_hash = clean_magnet(uri, info_hash)
        v_name = name if verify_name else filtering.title
        v_size = size if verify_size else None
        if filtering.verify(provider, v_name, v_size):
            if (get_setting("sort_by_resolution", bool)):
                results.append({
                    "name":
                    name,
                    "uri":
                    uri,
                    "info_hash":
                    info_hash,
                    "size":
                    size,
                    "seeds":
                    get_int(seeds),
                    "peers":
                    get_int(peers),
                    "language":
                    definition["language"]
                    if 'language' in definition else 'en',
                    "provider":
                    '[COLOR %s]%s[/COLOR]' %
                    (definition['color'], definition['name']),
                    "icon":
                    os.path.join(ADDON_PATH, 'burst', 'providers', 'icons',
                                 '%s.png' % provider),
                    "resolution":
                    get_int(filtering.determine_resolution(v_name)[7:-1]),
                })
            else:
                results.append({
                    "name":
                    name,
                    "uri":
                    uri,
                    "info_hash":
                    info_hash,
                    "size":
                    size,
                    "seeds":
                    get_int(seeds),
                    "peers":
                    get_int(peers),
                    "language":
                    definition["language"]
                    if 'language' in definition else 'en',
                    "provider":
                    '[COLOR %s]%s[/COLOR]' %
                    (definition['color'], definition['name']),
                    "icon":
                    os.path.join(ADDON_PATH, 'burst', 'providers', 'icons',
                                 '%s.png' % provider),
                })
        else:
            log.debug(filtering.reason.encode('utf-8'))

    log.debug('>>>>>> %s would send %d torrents to Quasar <<<<<<<' %
              (provider, len(results)))

    return results
예제 #36
0
def process(provider,
            generator,
            filtering,
            verify_name=True,
            verify_size=True):
    """ Method for processing provider results using its generator and Filtering class instance

    Args:
        provider        (str): Provider ID
        generator  (function): Generator method, can be either ``extract_torrents`` or ``extract_from_api``
        filtering (Filtering): Filtering class instance
        verify_name    (bool): Whether to double-check the results' names match the query or not
        verify_size    (bool): Whether to check the results' file sizes
    """
    log.debug("execute_process for %s with %s" % (provider, repr(generator)))
    definition = definitions[provider]

    client = Client()
    token = None
    logged_in = False
    token_auth = False

    if get_setting("use_cloudhole", bool):
        client.clearance = get_setting('clearance')
        client.user_agent = get_setting('user_agent')

    if get_setting('kodi_language', bool):
        kodi_language = xbmc.getLanguage(xbmc.ISO_639_1)
        if kodi_language:
            filtering.kodi_language = kodi_language
        language_exceptions = get_setting('language_exceptions')
        if language_exceptions.strip().lower():
            filtering.language_exceptions = re.split(r',\s?',
                                                     language_exceptions)

    log.debug("[%s] Queries: %s" % (provider, filtering.queries))
    log.debug("[%s] Extras:  %s" % (provider, filtering.extras))

    for query, extra in zip(filtering.queries, filtering.extras):
        log.debug("[%s] Before keywords - Query: %s - Extra: %s" %
                  (provider, repr(query), repr(extra)))
        query = filtering.process_keywords(provider, query)
        extra = filtering.process_keywords(provider, extra)
        log.debug("[%s] After keywords  - Query: %s - Extra: %s" %
                  (provider, repr(query), repr(extra)))
        if not query:
            return filtering.results

        url_search = filtering.url.replace('QUERY', query)
        if extra:
            url_search = url_search.replace('EXTRA', extra)
        else:
            url_search = url_search.replace('EXTRA', '')
        url_search = url_search.replace(' ', definition['separator'])

        # MagnetDL fix...
        url_search = url_search.replace('FIRSTLETTER', query[:1])

        # Creating the payload for POST method
        payload = dict()
        for key, value in filtering.post_data.iteritems():
            if 'QUERY' in value:
                payload[key] = filtering.post_data[key].replace('QUERY', query)
            else:
                payload[key] = filtering.post_data[key]

        # Creating the payload for GET method
        data = None
        if filtering.get_data:
            data = dict()
            for key, value in filtering.get_data.iteritems():
                if 'QUERY' in value:
                    data[key] = filtering.get_data[key].replace('QUERY', query)
                else:
                    data[key] = filtering.get_data[key]

        log.debug("-   %s query: %s" % (provider, repr(query)))
        log.debug("--  %s url_search before token: %s" %
                  (provider, repr(url_search)))
        log.debug("--- %s using POST payload: %s" % (provider, repr(payload)))
        log.debug("----%s filtering with post_data: %s" %
                  (provider, repr(filtering.post_data)))

        # Set search's "title" in filtering to double-check results' names
        if 'filter_title' in definition and definition['filter_title']:
            filtering.filter_title = True
            filtering.title = query

        if token:
            log.info('[%s] Reusing existing token' % provider)
            url_search = url_search.replace('TOKEN', token)
        elif 'token' in definition:
            token_url = definition['base_url'] + definition['token']
            log.debug("Getting token for %s at %s" %
                      (provider, repr(token_url)))
            client.open(token_url.encode('utf-8'))
            try:
                token_data = json.loads(client.content)
            except:
                log.error('%s: Failed to get token for %s' %
                          (provider, repr(url_search)))
                return filtering.results
            log.debug("Token response for %s: %s" %
                      (provider, repr(token_data)))
            if 'token' in token_data:
                token = token_data['token']
                log.debug("Got token for %s: %s" % (provider, repr(token)))
                url_search = url_search.replace('TOKEN', token)
                time.sleep(2)
            else:
                log.warning('%s: Unable to get token for %s' %
                            (provider, repr(url_search)))

        if logged_in:
            log.info("[%s] Reusing previous login" % provider)
        elif token_auth:
            log.info("[%s] Reusing previous token authorization" % provider)
        elif 'private' in definition and definition['private']:
            username = get_setting('%s_username' % provider)
            password = get_setting('%s_password' % provider)
            passkey = get_setting('%s_passkey' % provider)
            if not username and not password and not passkey:
                for addon_name in ('script.magnetic.%s' % provider,
                                   'script.magnetic.%s-mc' % provider):
                    for setting in ('username', 'password'):
                        try:
                            value = xbmcaddon.Addon(addon_name).getSetting(
                                setting)
                            set_setting('%s_%s' % (provider, setting), value)
                            if setting == 'username':
                                username = value
                            if setting == 'password':
                                password = value
                        except:
                            pass

            if passkey:
                logged_in = True
                client.passkey = passkey
                url_search = url_search.replace('PASSKEY', passkey)

            elif 'login_object' in definition and definition['login_object']:
                logged_in = False
                login_object = definition['login_object'].replace(
                    'USERNAME',
                    '"%s"' % username).replace('PASSWORD', '"%s"' % password)

                # TODO generic flags in definitions for those...
                if provider == 'alphareign':
                    client.open(definition['root_url'] +
                                definition['login_path'])
                    if client.content:
                        csrf_name = re.search(
                            r'name="csrf_name" value="(.*?)"', client.content)
                        csrf_value = re.search(
                            r'name="csrf_value" value="(.*?)"', client.content)
                        if csrf_name and csrf_value:
                            login_object = login_object.replace(
                                "CSRF_NAME", '"%s"' % csrf_name.group(1))
                            login_object = login_object.replace(
                                "CSRF_VALUE", '"%s"' % csrf_value.group(1))
                        else:
                            logged_in = True
                if provider == 'hd-torrents':
                    client.open(definition['root_url'] +
                                definition['login_path'])
                    if client.content:
                        csrf_token = re.search(
                            r'name="csrfToken" value="(.*?)"', client.content)
                        if csrf_token:
                            login_object = login_object.replace(
                                'CSRF_TOKEN', '"%s"' % csrf_token.group(1))
                        else:
                            logged_in = True

                if 'token_auth' in definition:
                    # log.debug("[%s] logging in with: %s" % (provider, login_object))
                    if client.open(definition['root_url'] +
                                   definition['token_auth'],
                                   post_data=eval(login_object)):
                        try:
                            token_data = json.loads(client.content)
                        except:
                            log.error('%s: Failed to get token from %s' %
                                      (provider, definition['token_auth']))
                            return filtering.results
                        log.debug("Token response for %s: %s" %
                                  (provider, repr(token_data)))
                        if 'token' in token_data:
                            client.token = token_data['token']
                            log.debug("Auth token for %s: %s" %
                                      (provider, repr(client.token)))
                        else:
                            log.error('%s: Unable to get auth token for %s' %
                                      (provider, repr(url_search)))
                            return filtering.results
                        log.info('[%s] Token auth successful' % provider)
                        token_auth = True
                    else:
                        log.error("[%s] Token auth failed with response: %s" %
                                  (provider, repr(client.content)))
                        return filtering.results
                elif not logged_in and client.login(
                        definition['root_url'] + definition['login_path'],
                        eval(login_object), definition['login_failed']):
                    log.info('[%s] Login successful' % provider)
                    logged_in = True
                elif not logged_in:
                    log.error("[%s] Login failed: %s", provider, client.status)
                    log.debug("[%s] Failed login content: %s", provider,
                              repr(client.content))
                    return filtering.results

                if logged_in:
                    if provider == 'hd-torrents':
                        client.open(definition['root_url'] + '/torrents.php')
                        csrf_token = re.search(
                            r'name="csrfToken" value="(.*?)"', client.content)
                        url_search = url_search.replace(
                            "CSRF_TOKEN", csrf_token.group(1))

        log.info(">  %s search URL: %s" %
                 (definition['name'].rjust(longest), url_search))

        client.open(url_search.encode('utf-8'),
                    post_data=payload,
                    get_data=data)
        filtering.results.extend(
            generate_payload(provider, generator(provider, client), filtering,
                             verify_name, verify_size))
    return filtering.results
예제 #37
0
from threading import Thread
from urlparse import urlparse
from quasar.provider import append_headers, get_setting, set_setting, log

from parser.ehp import Html
from provider import process
from providers.definitions import definitions, longest
from filtering import apply_filters, Filtering
from client import USER_AGENT, Client, get_cloudhole_key, get_cloudhole_clearance
from utils import ADDON_ICON, notify, translation, sizeof, get_icon_path, get_enabled_providers

provider_names = []
provider_results = []
available_providers = 0
request_time = time.time()
timeout = get_setting("timeout", int)


def search(payload, method="general"):
    """ Main search entrypoint

    Args:
        payload (dict): Search payload from Quasar.
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``

    Returns:
        list: All filtered results in the format Quasar expects
    """
    log.debug("Searching with payload (%s): %s" % (method, repr(payload)))

    if method == 'general':