Exemplo n.º 1
0
class NyaaProvider(TorrentProvider):
    def __init__(self):
        super(NyaaProvider, self).__init__("NyaaTorrents", 'https://nyaa.si',
                                           False)

        self.supports_absolute_numbering = True
        self.anime_only = True
        self.confirmed = False

        self.minseed = None
        self.minleech = None

        self.cache = TVCache(self, min_time=20)

    def search(self, search_strings, age=0, ep_obj=None):
        """
        Search a provider and parse the results.

        :param search_strings: A dict with mode (key) and the search value (value)
        :param age: Not used
        :param ep_obj: Not used
        :returns: A list of search results (structure)
        """
        results = []

        # Search Params
        search_params = {
            'page': 'rss',
            'c': '1_0',  # All Anime
            'f': 0,  # No filter
            'q': '',
        }

        for mode in search_strings:
            sickrage.app.log.debug('Search mode: {}'.format(mode))

            if self.confirmed:
                search_params['f'] = 2  # Trusted only
                sickrage.app.log.debug('Searching only confirmed torrents')

            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.app.log.debug(
                        'Search string: {}'.format(search_string))
                    search_params['q'] = search_string

                data = self.cache.getRSSFeed(self.urls['base_url'],
                                             params=search_params)
                if not data:
                    sickrage.app.log.debug('No data returned from provider')
                    continue
                if not data.get('entries'):
                    sickrage.app.log.debug(
                        'Data returned from provider does not contain any {0}torrents',
                        'confirmed ' if self.confirmed else '')
                    continue

                results += self.parse(data['entries'], mode)

        return results

    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        for item in data:
            try:
                title = item['title']
                download_url = item['link']
                if not all([title, download_url]):
                    continue

                seeders = try_int(item['nyaa_seeders'])
                leechers = try_int(item['nyaa_leechers'])

                size = convert_size(
                    item['nyaa_size'],
                    -1,
                    units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'])

                results += [{
                    'title': title,
                    'link': download_url,
                    'size': size,
                    'seeders': seeders,
                    'leechers': leechers
                }]

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Exemplo n.º 2
0
class NyaaProvider(TorrentProvider):
    def __init__(self):
        super(NyaaProvider, self).__init__("NyaaTorrents", 'http://nyaa.si',
                                           False)

        self.supports_backlog = True

        self.supports_absolute_numbering = True
        self.anime_only = True
        self.ratio = None

        self.cache = TVCache(self, min_time=15)

        self.minseed = 0
        self.minleech = 0
        self.confirmed = False

    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        if self.show and not self.show.is_anime:
            return []

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s" %
                                                   search_string)

                params = {
                    "page": 'rss',
                    "cats": '1_0',  # All anime
                    "sort": 2,  # Sort Descending By Seeders
                    "order": 1
                }
                if mode != 'RSS':
                    params["term"] = search_string.encode('utf-8')

                searchURL = self.urls['base_url'] + '?' + urllib.urlencode(
                    params)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
                s = re.compile(summary_regex, re.DOTALL)

                results = []
                for curItem in self.cache.getRSSFeed(
                        searchURL)['entries'] or []:
                    title = curItem['title']
                    download_url = curItem['link']
                    if not all([title, download_url]):
                        continue

                    seeders, leechers, size, verified = s.findall(
                        curItem['summary'])[0]
                    size = convert_size(size)

                    # Filter unseeded torrent
                    if seeders < self.minseed or leechers < self.minleech:
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})"
                                .format(title, seeders, leechers))
                        continue

                    if self.confirmed and not verified and mode != 'RSS':
                        sickrage.srCore.srLogger.debug(
                            "Found result " + title +
                            " but that doesn't seem like a verified result so I'm ignoring it"
                        )
                        continue

                    item = title, download_url, size, seeders, leechers
                    if mode != 'RSS':
                        sickrage.srCore.srLogger.debug("Found result: %s " %
                                                       title)

                    items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results

    def seed_ratio(self):
        return self.ratio
Exemplo n.º 3
0
class NyaaProvider(TorrentProvider):
    def __init__(self):
        super(NyaaProvider, self).__init__("NyaaTorrents", 'http://nyaa.si', False)

        self.supports_absolute_numbering = True
        self.anime_only = True

        self.minseed = 0
        self.minleech = 0
        self.confirmed = False

        self.cache = TVCache(self, min_time=20)

    def search(self, search_strings, age=0, ep_obj=None):
        """
        Search a provider and parse the results.

        :param search_strings: A dict with mode (key) and the search value (value)
        :param age: Not used
        :param ep_obj: Not used
        :returns: A list of search results (structure)
        """
        results = []

        # Search Params
        search_params = {
            'page': 'rss',
            'c': '1_0',  # All Anime
            'f': 0,  # No filter
            'q': '',
        }

        for mode in search_strings:
            sickrage.app.log.debug('Search mode: {}'.format(mode))

            if self.confirmed:
                search_params['f'] = 2  # Trusted only
                sickrage.app.log.debug('Searching only confirmed torrents')

            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.app.log.debug('Search string: {}'.format(search_string))
                    search_params['q'] = search_string

                data = self.cache.getRSSFeed(self.urls['base_url'], params=search_params)
                if not data:
                    sickrage.app.log.debug('No data returned from provider')
                    continue
                if not data.get('entries'):
                    sickrage.app.log.debug('Data returned from provider does not contain any {0}torrents',
                                                   'confirmed ' if self.confirmed else '')
                    continue

                results += self.parse(data['entries'], mode)

        return results

    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        for item in data:
            try:
                title = item['title']
                download_url = item['link']
                if not all([title, download_url]):
                    continue

                seeders = try_int(item['nyaa_seeders'])
                leechers = try_int(item['nyaa_leechers'])

                # Filter unseeded torrent
                if seeders < min(self.minseed, 1):
                    if mode != 'RSS':
                        sickrage.app.log.debug("Discarding torrent because it doesn't meet the "
                                                       "minimum seeders: {}. Seeders: {}".format(title, seeders))
                    continue

                size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'])

                item = {
                    'title': title,
                    'link': download_url,
                    'size': size,
                    'seeders': seeders,
                    'leechers': leechers
                }
                if mode != 'RSS':
                    sickrage.app.log.debug('Found result: {}'.format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Exemplo n.º 4
0
class NewznabProvider(NZBProvider):
    type = 'newznab'

    def __init__(self,
                 name,
                 url,
                 private,
                 key='',
                 catIDs='5030,5040',
                 search_mode='eponly',
                 search_fallback=False,
                 enable_daily=False,
                 enable_backlog=False,
                 default=False):
        super(NewznabProvider, self).__init__(name, url, private)

        self.key = key

        self.search_mode = search_mode
        self.search_fallback = search_fallback
        self.enable_daily = enable_daily
        self.enable_backlog = enable_backlog
        self.supports_backlog = True

        self.catIDs = catIDs
        self.default = default

        self.cache = TVCache(self, min_time=30)

    def get_newznab_categories(self):
        """
        Uses the newznab provider url and apikey to get the capabilities.
        Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
        Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
        {"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
        """
        success = False
        categories = []
        message = ""

        self.check_auth()

        params = {"t": "caps"}
        if self.key:
            params['apikey'] = self.key

        try:
            resp = sickrage.srCore.srWebSession.get("{}api?{}".format(
                self.urls['base_url'], urllib.urlencode(params)))
            data = xmltodict.parse(resp.content)

            for category in data["caps"]["categories"]["category"]:
                if category.get('@name') == 'TV':
                    categories += [{
                        "id": category['@id'],
                        "name": category['@name']
                    }]
                    categories += [{
                        "id": x["@id"],
                        "name": x["@name"]
                    } for x in category["subcat"]]

            success = True
        except Exception as e:
            sickrage.srCore.srLogger.debug("[%s] failed to list categories" %
                                           self.name)
            message = "[%s] failed to list categories" % self.name

        return success, categories, message

    def _get_season_search_strings(self, ep_obj):

        to_return = []
        params = {}
        if not ep_obj:
            return to_return

        params['maxage'] = (
            datetime.datetime.now() - datetime.datetime.combine(
                ep_obj.airdate, datetime.datetime.min.time())).days + 1
        params['tvdbid'] = ep_obj.show.indexerid

        # season
        if ep_obj.show.air_by_date or ep_obj.show.sports:
            date_str = str(ep_obj.airdate).split('-')[0]
            params['season'] = date_str
            params['q'] = date_str.replace('-', '.')
        else:
            params['season'] = str(ep_obj.scene_season)

        save_q = ' ' + params['q'] if 'q' in params else ''

        # add new query strings for exceptions
        name_exceptions = list(
            set([ep_obj.show.name] +
                get_scene_exceptions(ep_obj.show.indexerid)))
        for cur_exception in name_exceptions:
            params['q'] = sanitizeSceneName(cur_exception) + save_q
            to_return.append(dict(params))

        return to_return

    def _get_episode_search_strings(self, ep_obj, add_string=''):
        to_return = []
        params = {}
        if not ep_obj:
            return to_return

        params['maxage'] = (
            datetime.datetime.now() - datetime.datetime.combine(
                ep_obj.airdate, datetime.datetime.min.time())).days + 1
        params['tvdbid'] = ep_obj.show.indexerid

        if ep_obj.show.air_by_date or ep_obj.show.sports:
            date_str = str(ep_obj.airdate)
            params['season'] = date_str.partition('-')[0]
            params['ep'] = date_str.partition('-')[2].replace('-', '/')
        else:
            params['season'] = ep_obj.scene_season
            params['ep'] = ep_obj.scene_episode

        # add new query strings for exceptions
        name_exceptions = list(
            set([ep_obj.show.name] +
                get_scene_exceptions(ep_obj.show.indexerid)))
        for cur_exception in name_exceptions:
            params['q'] = sanitizeSceneName(cur_exception)
            if add_string:
                params['q'] += ' ' + add_string

            to_return.append(dict(params))

        return to_return

    def _doGeneralSearch(self, search_string):
        return self.search({'q': search_string})

    def check_auth(self):
        if self.private and not len(self.key):
            sickrage.srCore.srLogger.warning(
                'Invalid api key for {}. Check your settings'.format(
                    self.name))
            return False

        return True

    def _checkAuthFromData(self, data):
        """

        :type data: dict
        """
        if all([x in data for x in ['feed', 'entries']]):
            return self.check_auth()

        try:
            if int(data['bozo']) == 1:
                raise data['bozo_exception']
        except (AttributeError, KeyError):
            pass

        try:
            err_code = data['feed']['error']['code']
            err_desc = data['feed']['error']['description']

            if int(err_code) == 100:
                raise AuthException("Your API key for " + self.name +
                                    " is incorrect, check your config.")
            elif int(err_code) == 101:
                raise AuthException(
                    "Your account on " + self.name +
                    " has been suspended, contact the administrator.")
            elif int(err_code) == 102:
                raise AuthException(
                    "Your account isn't allowed to use the API on " +
                    self.name + ", contact the administrator")
            raise Exception("Error {}: {}".format(err_code, err_desc))
        except (AttributeError, KeyError):
            pass

        return False

    def search(self,
               search_params,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []

        if not self.check_auth():
            return results

        params = {
            "t": "tvsearch",
            "maxage": min(age, sickrage.srCore.srConfig.USENET_RETENTION),
            "limit": 100,
            "offset": 0,
            "cat": self.catIDs or '5030,5040'
        }

        params.update(search_params)

        if self.key:
            params['apikey'] = self.key

        offset = total = 0
        last_search = datetime.datetime.now()
        while total >= offset:
            if (datetime.datetime.now() - last_search).seconds < 5:
                continue

            search_url = self.urls['base_url'] + '/api'
            sickrage.srCore.srLogger.debug(
                "Search url: %s?%s" % (search_url, urllib.urlencode(params)))

            data = self.cache.getRSSFeed(search_url, params=params)

            last_search = datetime.datetime.now()

            if not self._checkAuthFromData(data):
                break

            for item in data['entries']:

                (title, url) = self._get_title_and_url(item)

                if title and url:
                    results.append(item)

            # get total and offset attribs
            try:
                if total == 0:
                    total = int(data['feed'].newznab_response['total'] or 0)
                offset = int(data['feed'].newznab_response['offset'] or 0)
            except AttributeError:
                break

            # No items found, prevent from doing another search
            if total == 0:
                break

            if offset != params['offset']:
                sickrage.srCore.srLogger.info(
                    "Tell your newznab provider to fix their bloody newznab responses"
                )
                break

            params['offset'] += params['limit']
            if (total > int(params['offset'])) and (offset < 500):
                offset = int(params['offset'])
                # if there are more items available then the amount given in one call, grab some more
                sickrage.srCore.srLogger.debug(
                    '%d' % (total - offset) +
                    ' more items to be fetched from provider.' +
                    'Fetching another %d' % int(params['limit']) + ' items.')
            else:
                break

        return results

    def find_propers(self, search_date=datetime.datetime.today()):
        results = []
        dbData = []

        for show in [
                s['doc'] for s in sickrage.srCore.mainDB.db.all('tv_shows',
                                                                with_doc=True)
        ]:
            for episode in [
                    e['doc'] for e in sickrage.srCore.mainDB.db.get_many(
                        'tv_episodes', show['indexer_id'], with_doc=True)
            ]:
                if episode['airdate'] >= str(search_date.toordinal()) \
                        and episode['status'] in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST:

                    self.show = findCertainShow(sickrage.srCore.SHOWLIST,
                                                int(show["showid"]))
                    if not self.show: continue

                    curEp = self.show.getEpisode(int(episode["season"]),
                                                 int(episode["episode"]))
                    searchStrings = self._get_episode_search_strings(
                        curEp, add_string='PROPER|REPACK')
                    for searchString in searchStrings:
                        for item in self.search(searchString):
                            title, url = self._get_title_and_url(item)
                            if re.match(r'.*(REPACK|PROPER).*', title, re.I):
                                results += [
                                    Proper(title, url,
                                           datetime.datetime.today(),
                                           self.show)
                                ]

        return results

    @classmethod
    def getProviders(cls):
        providers = cls.getDefaultProviders()

        try:
            for curProviderStr in sickrage.srCore.srConfig.CUSTOM_PROVIDERS.split(
                    '!!!'):
                if not len(curProviderStr):
                    continue

                try:
                    cur_type, curProviderData = curProviderStr.split('|', 1)

                    if cur_type == "newznab":
                        cur_name, cur_url, cur_key, cur_cat = curProviderData.split(
                            '|')
                        cur_url = clean_url(cur_url)

                        provider = NewznabProvider(cur_name,
                                                   cur_url,
                                                   bool(not cur_key == 0),
                                                   key=cur_key,
                                                   catIDs=cur_cat)

                        providers += [provider]
                except Exception:
                    continue
        except Exception:
            pass

        return providers

    @classmethod
    def getDefaultProviders(cls):
        return [
            cls('SickBeard', 'lolo.sickbeard.com', False, '', '5030,5040',
                'eponly', False, False, False, True),
            cls('NZB.Cat', 'nzb.cat', True, '', '5030,5040,5010', 'eponly',
                True, True, True, True),
            cls('NZBGeek', 'api.nzbgeek.info', True, '', '5030,5040', 'eponly',
                False, False, False, True),
            cls('NZBs.org', 'nzbs.org', True, '', '5030,5040', 'eponly', False,
                False, False, True),
            cls('Usenet-Crawler', 'usenet-crawler.com', True, '', '5030,5040',
                'eponly', False, False, False, True)
        ]
Exemplo n.º 5
0
class NewznabProvider(NZBProvider):
    type = 'newznab'

    def __init__(self,
                 name,
                 url,
                 private,
                 key='',
                 catIDs='5030,5040',
                 search_mode='eponly',
                 search_fallback=False,
                 enable_daily=False,
                 enable_backlog=False,
                 default=False):
        super(NewznabProvider, self).__init__(name, url, private)

        self.key = key

        self.search_mode = search_mode
        self.search_fallback = search_fallback
        self.enable_daily = enable_daily
        self.enable_backlog = enable_backlog
        self.supports_backlog = True

        self.catIDs = catIDs
        self.default = default

        self.cache = TVCache(self, min_time=30)

    def get_newznab_categories(self):
        """
        Uses the newznab provider url and apikey to get the capabilities.
        Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
        Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
        {"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
        """
        success = False
        categories = []
        message = ""

        self.check_auth()

        params = {"t": "caps"}
        if self.key:
            params['apikey'] = self.key

        try:
            resp = sickrage.srCore.srWebSession.get("{}api?{}".format(self.urls['base_url'], urllib.urlencode(params)))
            data = xmltodict.parse(resp.content)

            for category in data["caps"]["categories"]["category"]:
                if category.get('@name') == 'TV':
                    categories += [{"id": category['@id'], "name": category['@name']}]
                    categories += [{"id": x["@id"], "name": x["@name"]} for x in category["subcat"]]

            success = True
        except Exception as e:
            sickrage.srCore.srLogger.debug("[%s] failed to list categories" % self.name)
            message = "[%s] failed to list categories" % self.name

        return success, categories, message

    def _get_season_search_strings(self, ep_obj):

        to_return = []
        params = {}
        if not ep_obj:
            return to_return

        params['maxage'] = (datetime.datetime.now() - datetime.datetime.combine(ep_obj.airdate,
                                                                                datetime.datetime.min.time())).days + 1
        params['tvdbid'] = ep_obj.show.indexerid

        # season
        if ep_obj.show.air_by_date or ep_obj.show.sports:
            date_str = str(ep_obj.airdate).split('-')[0]
            params['season'] = date_str
            params['q'] = date_str.replace('-', '.')
        else:
            params['season'] = str(ep_obj.scene_season)

        save_q = ' ' + params['q'] if 'q' in params else ''

        # add new query strings for exceptions
        name_exceptions = list(
            set([ep_obj.show.name] + get_scene_exceptions(ep_obj.show.indexerid)))
        for cur_exception in name_exceptions:
            params['q'] = sanitizeSceneName(cur_exception) + save_q
            to_return.append(dict(params))

        return to_return

    def _get_episode_search_strings(self, ep_obj, add_string=''):
        to_return = []
        params = {}
        if not ep_obj:
            return to_return

        params['maxage'] = (datetime.datetime.now() - datetime.datetime.combine(ep_obj.airdate,
                                                                                datetime.datetime.min.time())).days + 1
        params['tvdbid'] = ep_obj.show.indexerid

        if ep_obj.show.air_by_date or ep_obj.show.sports:
            date_str = str(ep_obj.airdate)
            params['season'] = date_str.partition('-')[0]
            params['ep'] = date_str.partition('-')[2].replace('-', '/')
        else:
            params['season'] = ep_obj.scene_season
            params['ep'] = ep_obj.scene_episode

        # add new query strings for exceptions
        name_exceptions = list(
            set([ep_obj.show.name] + get_scene_exceptions(ep_obj.show.indexerid)))
        for cur_exception in name_exceptions:
            params['q'] = sanitizeSceneName(cur_exception)
            if add_string:
                params['q'] += ' ' + add_string

            to_return.append(dict(params))

        return to_return

    def _doGeneralSearch(self, search_string):
        return self.search({'q': search_string})

    def check_auth(self):
        if self.private and not len(self.key):
            sickrage.srCore.srLogger.warning('Invalid api key for {}. Check your settings'.format(self.name))
            return False

        return True

    def _checkAuthFromData(self, data):

        """

        :type data: dict
        """
        if all([x in data for x in ['feed', 'entries']]):
            return self.check_auth()

        try:
            if int(data['bozo']) == 1:
                raise data['bozo_exception']
        except (AttributeError, KeyError):
            pass

        try:
            err_code = data['feed']['error']['code']
            err_desc = data['feed']['error']['description']

            if int(err_code) == 100:
                raise AuthException("Your API key for " + self.name + " is incorrect, check your config.")
            elif int(err_code) == 101:
                raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
            elif int(err_code) == 102:
                raise AuthException(
                    "Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
            raise Exception("Error {}: {}".format(err_code, err_desc))
        except (AttributeError, KeyError):
            pass

        return False

    def search(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
        results = []

        if not self.check_auth():
            return results

        params = {
            "t": "tvsearch",
            "maxage": min(age, sickrage.srCore.srConfig.USENET_RETENTION),
            "limit": 100,
            "offset": 0,
            "cat": self.catIDs or '5030,5040'
        }

        params.update(search_params)

        if self.key:
            params['apikey'] = self.key

        offset = total = 0
        last_search = datetime.datetime.now()
        while total >= offset:
            if (datetime.datetime.now() - last_search).seconds < 5:
                continue

            search_url = self.urls['base_url'] + '/api'
            sickrage.srCore.srLogger.debug("Search url: %s?%s" % (search_url, urllib.urlencode(params)))

            data = self.cache.getRSSFeed(search_url, params=params)

            last_search = datetime.datetime.now()

            if not self._checkAuthFromData(data):
                break

            for item in data['entries']:

                (title, url) = self._get_title_and_url(item)

                if title and url:
                    results.append(item)

            # get total and offset attribs
            try:
                if total == 0:
                    total = int(data['feed'].newznab_response['total'] or 0)
                offset = int(data['feed'].newznab_response['offset'] or 0)
            except AttributeError:
                break

            # No items found, prevent from doing another search
            if total == 0:
                break

            if offset != params['offset']:
                sickrage.srCore.srLogger.info("Tell your newznab provider to fix their bloody newznab responses")
                break

            params['offset'] += params['limit']
            if (total > int(params['offset'])) and (offset < 500):
                offset = int(params['offset'])
                # if there are more items available then the amount given in one call, grab some more
                sickrage.srCore.srLogger.debug('%d' % (total - offset) + ' more items to be fetched from provider.' +
                                               'Fetching another %d' % int(params['limit']) + ' items.')
            else:
                break

        return results

    def find_propers(self, search_date=datetime.datetime.today()):
        results = []
        dbData = []

        for show in [s['doc'] for s in sickrage.srCore.mainDB.db.all('tv_shows', with_doc=True)]:
            for episode in [e['doc'] for e in sickrage.srCore.mainDB.db.get_many('tv_episodes', show['indexer_id'], with_doc=True)]:
                if episode['airdate'] >= str(search_date.toordinal()) \
                        and episode['status'] in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST:

                    self.show = findCertainShow(sickrage.srCore.SHOWLIST, int(show["showid"]))
                    if not self.show: continue

                    curEp = self.show.getEpisode(int(episode["season"]), int(episode["episode"]))
                    searchStrings = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
                    for searchString in searchStrings:
                        for item in self.search(searchString):
                            title, url = self._get_title_and_url(item)
                            if re.match(r'.*(REPACK|PROPER).*', title, re.I):
                                results += [Proper(title, url, datetime.datetime.today(), self.show)]

        return results

    @classmethod
    def getProviders(cls):
        providers = cls.getDefaultProviders()

        try:
            for curProviderStr in sickrage.srCore.srConfig.CUSTOM_PROVIDERS.split('!!!'):
                if not len(curProviderStr):
                    continue

                try:
                    cur_type, curProviderData = curProviderStr.split('|', 1)

                    if cur_type == "newznab":
                        cur_name, cur_url, cur_key, cur_cat = curProviderData.split('|')
                        cur_url = sickrage.srCore.srConfig.clean_url(cur_url)

                        provider = NewznabProvider(
                            cur_name,
                            cur_url,
                            bool(not cur_key == 0),
                            key=cur_key,
                            catIDs=cur_cat
                        )

                        providers += [provider]
                except Exception:
                    continue
        except Exception:
            pass

        return providers

    @classmethod
    def getDefaultProviders(cls):
        return [
            cls('SickBeard', 'lolo.sickbeard.com', False, '', '5030,5040', 'eponly', False, False, False, True),
            cls('NZB.Cat', 'nzb.cat', True, '', '5030,5040,5010', 'eponly', True, True, True, True),
            cls('NZBGeek', 'api.nzbgeek.info', True, '', '5030,5040', 'eponly', False, False, False, True),
            cls('NZBs.org', 'nzbs.org', True, '', '5030,5040', 'eponly', False, False, False, True),
            cls('Usenet-Crawler', 'usenet-crawler.com', True, '', '5030,5040', 'eponly', False, False, False, True)
        ]
Exemplo n.º 6
0
class NyaaProvider(TorrentProvider):
    def __init__(self):
        super(NyaaProvider, self).__init__("NyaaTorrents",'www.nyaa.se', False)

        self.supports_backlog = True

        self.supports_absolute_numbering = True
        self.anime_only = True
        self.ratio = None

        self.cache = TVCache(self, min_time=15)

        self.minseed = 0
        self.minleech = 0
        self.confirmed = False

    def search(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
        if self.show and not self.show.is_anime:
            return []

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s" % search_string)

                params = {
                    "page": 'rss',
                    "cats": '1_0',  # All anime
                    "sort": 2,  # Sort Descending By Seeders
                    "order": 1
                }
                if mode != 'RSS':
                    params["term"] = search_string.encode('utf-8')

                searchURL = self.urls['base_url'] + '?' + urllib.urlencode(params)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
                s = re.compile(summary_regex, re.DOTALL)

                results = []
                for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []:
                    title = curItem['title']
                    download_url = curItem['link']
                    if not all([title, download_url]):
                        continue

                    seeders, leechers, size, verified = s.findall(curItem['summary'])[0]
                    size = convert_size(size)

                    # Filter unseeded torrent
                    if seeders < self.minseed or leechers < self.minleech:
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                            title, seeders, leechers))
                        continue

                    if self.confirmed and not verified and mode != 'RSS':
                        sickrage.srCore.srLogger.debug(
                                "Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it")
                        continue

                    item = title, download_url, size, seeders, leechers
                    if mode != 'RSS':
                        sickrage.srCore.srLogger.debug("Found result: %s " % title)

                    items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results

    def seed_ratio(self):
        return self.ratio