Пример #1
0
    def _test_unicode(self, name, result):
        np = NameParser(True, showObj=self.show, validate_show=False)
        parse_result = np.parse(name)

        # this shouldn't raise an exception
        repr(str(parse_result))
        self.assertEqual(parse_result.extra_info, result.extra_info)
Пример #2
0
    def _test_unicode(self, name, result):
        np = NameParser(True, showObj=self.show)
        parse_result = np.parse(name)

        # this shouldn't raise an exception
        repr(str(parse_result))
        self.assertEqual(parse_result.extra_info, result.extra_info)
Пример #3
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if [
            x for x in MainDB().db.all('tv_episodes', with_doc=True)
            if x['doc']['release_name'] == dirName
    ]:
        return True
    else:
        if [
                x for x in MainDB().db.all('tv_episodes', with_doc=True)
                if x['doc']['release_name'] == [videofile.rpartition('.')[0]]
        ]:
            return True

        # Needed if we have downloaded the same episode @ different quality
        # But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName, tryIndexers=True)
        try:
            parse_result = np.parse(dirName)
        except:
            parse_result = False

        for h in [h['doc'] for h in MainDB().db.all('history', with_doc=True)]:
            for e in [
                    e['doc'] for e in MainDB().db.get_many(
                        'tv_episodes', h['showid'], with_doc=True)
                    if h['season'] == e['season'] and h['episode'] ==
                    e['episode'] and e['status'] in Quality.DOWNLOADED
                    and h['resource'].endswith(videofile)
            ]:

                # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query
                if parse_result and (parse_result.show.indexerid
                                     and parse_result.episode_numbers
                                     and parse_result.season_number):
                    if e['showid'] == int(
                            parse_result.show.indexerid
                    ) and e['season'] == int(
                            parse_result.season_number
                            and e['episode']) == int(
                                parse_result.episode_numbers[0]):
                        return True
                else:
                    return True

    return False
Пример #4
0
    def addCacheEntry(self, name, url, parse_result=None, indexer_id=0):
        # check if we passed in a parsed result or should we try and create one
        if not parse_result:
            # create showObj from indexer_id if available
            showObj = None
            if indexer_id:
                showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id)

            try:
                myParser = NameParser(showObj=showObj)
                parse_result = myParser.parse(name)
                if not parse_result:
                    return
            except (InvalidShowException, InvalidNameException):
                sickrage.srCore.srLogger.debug("RSS ITEM:[{}] IGNORED!".format(name))
                return

        if not parse_result.series_name:
            return

        # if we made it this far then lets add the parsed result to cache for usager later on
        season = parse_result.season_number if parse_result.season_number else 1
        episodes = parse_result.episode_numbers

        if season and episodes:
            # store episodes as a seperated string
            episodeText = "|" + "|".join(map(str, episodes)) + "|"

            # get the current timestamp
            curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))

            # get quality of release
            quality = parse_result.quality

            # get release group
            release_group = parse_result.release_group

            # get version
            version = parse_result.version

            if not len([x for x in sickrage.srCore.cacheDB.db.get_many('providers', self.providerID, with_doc=True)
                        if x['doc']['url'] == url]):
                sickrage.srCore.cacheDB.db.insert({
                    '_t': 'providers',
                    'provider': self.providerID,
                    'name': name,
                    'season': season,
                    'episodes': episodeText,
                    'indexerid': parse_result.show.indexerid,
                    'url': url,
                    'time': curTimestamp,
                    'quality': quality,
                    'release_group': release_group,
                    'version': version
                })

                sickrage.srCore.srLogger.debug("RSS ITEM:[%s] ADDED!", name)
Пример #5
0
    def _test_name(name):
        np = NameParser(True)
        try:
            parse_result = np.parse(name)
        except (InvalidNameException, InvalidShowException):
            return True

        if VERBOSE:
            print 'Actual: ', parse_result.which_regex, parse_result
        return False
Пример #6
0
    def _test_name(name):
        np = NameParser(True)
        try:
            parse_result = np.parse(name)
        except (InvalidNameException, InvalidShowException):
            return True

        if VERBOSE:
            print('Actual: ', parse_result.which_regex, parse_result)
        return False
Пример #7
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if main_db.MainDB().select(
            "SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]):
        # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", LOGGER.DEBUG)
        return True

    else:
        if main_db.MainDB().select(
                "SELECT * FROM tv_episodes WHERE release_name = ?",
            [videofile.rpartition('.')[0]]):
            # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG)
            return True

        # Needed if we have downloaded the same episode @ different quality
        # But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName, tryIndexers=True)
        try:
            parse_result = np.parse(dirName)
        except:
            parse_result = False

        search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"  # This part is always the same
        search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
        # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query
        if parse_result and (parse_result.show.indexerid
                             and parse_result.episode_numbers
                             and parse_result.season_number):
            search_sql += " and tv_episodes.showid = '" + str(
                parse_result.show.indexerid
            ) + "' and tv_episodes.season = '" + str(
                parse_result.season_number
            ) + "' and tv_episodes.episode = '" + str(
                parse_result.episode_numbers[0]) + "'"

        search_sql += " and tv_episodes.status IN (" + ",".join(
            [str(x) for x in Quality.DOWNLOADED]) + ")"
        search_sql += " and history.resource LIKE ?"
        if main_db.MainDB().select(search_sql, ['%' + videofile]):
            # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG)
            return True

    return False
Пример #8
0
    def already_postprocessed(self, dirName, videofile, force):
        """
        Check if we already post processed a file

        :param dirName: Directory a file resides in
        :param videofile: File name
        :param force: Force checking when already checking (currently unused)
        :return:
        """
        if force:
            return False

        session = sickrage.app.main_db.session()

        # Avoid processing the same dir again if we use a process method <> move
        if session.query(MainDB.TVEpisode).filter(
                or_(MainDB.TVEpisode.release_name.contains(dirName),
                    MainDB.TVEpisode.release_name.contains(
                        videofile))).count() > 0:
            return True

        # Needed if we have downloaded the same episode @ different quality
        # But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName)
        try:
            parse_result = np.parse(dirName)
        except:
            parse_result = False

        for h in session.query(MainDB.History).filter(
                MainDB.History.resource.endswith(videofile)):
            for e in session.query(MainDB.TVEpisode).filter_by(
                    series_id=h.series_id, season=h.season,
                    episode=h.episode).filter(
                        MainDB.TVEpisode.status.in_(
                            EpisodeStatus.composites(
                                EpisodeStatus.DOWNLOADED))):
                if parse_result and (parse_result.series_id
                                     and parse_result.episode_numbers
                                     and parse_result.season_number):
                    if e.series_id == int(
                            parse_result.series_id) and e.season == int(
                                parse_result.season_number
                                and e.episode) == int(
                                    parse_result.episode_numbers[0]):
                        return True
                else:
                    return True

        # Checks for processed file marker
        if os.path.isfile(os.path.join(dirName, videofile + '.sr_processed')):
            return True

        return False
Пример #9
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if [
            x for x in sickrage.app.main_db.all('tv_episodes')
            if x['release_name'] and (
                x['release_name'] in dirName or x['release_name'] in videofile)
    ]:
        return True

    # Needed if we have downloaded the same episode @ different quality
    # But we need to make sure we check the history of the episode we're going to PP, and not others
    np = NameParser(dirName)
    try:
        parse_result = np.parse(dirName)
    except:
        parse_result = False

    for h in (h for h in sickrage.app.main_db.all('history')
              if h['resource'].endswith(videofile)):
        for e in (e for e in sickrage.app.main_db.get_many(
                'tv_episodes', h['showid'])
                  if h['season'] == e['season'] and h['episode'] ==
                  e['episode'] and e['status'] in Quality.DOWNLOADED):

            # If we find a showid, a season number, and one or more episode numbers then we need to use those in the
            # query
            if parse_result and (parse_result.indexerid
                                 and parse_result.episode_numbers
                                 and parse_result.season_number):
                if e['showid'] == int(parse_result.indexerid) and \
                        e['season'] == int(parse_result.season_number and
                                           e['episode']) == int(parse_result.episode_numbers[0]):
                    return True
            else:
                return True

    # Checks for processed file marker
    if os.path.isfile(os.path.join(dirName, videofile + '.sr_processed')):
        return True

    return False
Пример #10
0
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False):
    """
    See if we understand a name

    :param pattern: Name to analyse
    :param multi: Is this a multi-episode name
    :param anime_type: Is this anime
    :param file_only: Is this just a file or a dir
    :param abd: Is air-by-date enabled
    :param sports: Is this sports
    :return: True if valid name, False if not
    """
    ep = generate_sample_ep(multi, abd, sports, anime_type)

    new_name = formatted_filename(ep.show, ep, pattern, multi, anime_type) + '.ext'
    new_path = formatted_dir(ep.show, ep, pattern, multi)
    if not file_only:
        new_name = os.path.join(new_path, new_name)

    if not new_name:
        sickrage.LOGGER.debug("Unable to create a name out of " + pattern)
        return False

    sickrage.LOGGER.debug("Trying to parse " + new_name)

    parser = NameParser(True, showObj=ep.show, naming_pattern=True)

    try:
        result = parser.parse(new_name)
    except Exception:
        sickrage.LOGGER.debug("Unable to parse " + new_name + ", not valid")
        return False

    sickrage.LOGGER.debug("Parsed " + new_name + " into " + str(result))

    if abd or sports:
        if result.air_date != ep.airdate:
            sickrage.LOGGER.debug("Air date incorrect in parsed episode, pattern isn't valid")
            return False
    elif anime_type != 3:
        if len(result.ab_episode_numbers) and result.ab_episode_numbers != [x.absolute_number for x in
                                                                            [ep] + ep.relatedEps]:
            sickrage.LOGGER.debug("Absolute numbering incorrect in parsed episode, pattern isn't valid")
            return False
    else:
        if result.season_number != ep.season:
            sickrage.LOGGER.debug("Season number incorrect in parsed episode, pattern isn't valid")
            return False
        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
            sickrage.LOGGER.debug("Episode numbering incorrect in parsed episode, pattern isn't valid")
            return False

    return True
Пример #11
0
    def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0):

        # check if we passed in a parsed result or should we try and create one
        if not parse_result:

            # create showObj from indexer_id if available
            showObj = None
            if indexer_id:
                showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id)

            try:
                myParser = NameParser(showObj=showObj)
                parse_result = myParser.parse(name)
            except (InvalidShowException, InvalidNameException):
                sickrage.srCore.srLogger.debug(
                    "RSS ITEM:[{}] IGNORED!".format(name))
                return

            if not parse_result or not parse_result.series_name:
                return

        # if we made it this far then lets add the parsed result to cache for usager later on
        season = parse_result.season_number if parse_result.season_number else 1
        episodes = parse_result.episode_numbers

        if season and episodes:
            # store episodes as a seperated string
            episodeText = "|" + "|".join(map(str, episodes)) + "|"

            # get the current timestamp
            curTimestamp = int(
                time.mktime(datetime.datetime.today().timetuple()))

            # get quality of release
            quality = parse_result.quality

            # get release group
            release_group = parse_result.release_group

            # get version
            version = parse_result.version

            sickrage.srCore.srLogger.debug("RSS ITEM:[{}] ADDED!".format(name))

            return [
                "INSERT OR IGNORE INTO [" + self.providerID +
                "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)",
                [
                    name, season, episodeText, parse_result.show.indexerid,
                    url, curTimestamp, quality, release_group, version
                ]
            ]
Пример #12
0
    def _is_season_pack(name):

        try:
            myParser = NameParser(tryIndexers=True)
            parse_result = myParser.parse(name)
        except InvalidNameException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid episode" % name)
            return False
        except InvalidShowException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid show" % name)
            return False

        if len([x for x in MainDB().db.get_many('tv_episodes', parse_result.show.indexerid, with_doc=True)
                if x['doc']['season'] == parse_result.season_number]) == len(parse_result.episode_numbers): return True
Пример #13
0
    def _is_season_pack(name):

        try:
            myParser = NameParser(tryIndexers=True)
            parse_result = myParser.parse(name)
        except InvalidNameException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid episode" % name)
            return False
        except InvalidShowException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename %s into a valid show" % name)
            return False

        if len([x for x in sickrage.srCore.mainDB.db.get_many('tv_episodes', parse_result.show.indexerid, with_doc=True)
                if x['doc']['season'] == parse_result.season_number]) == len(parse_result.episode_numbers): return True
Пример #14
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName]):
        # result.output += logHelper(u"You're trying to post process a dir that's already been processed, skipping", LOGGER.DEBUG)
        return True

    else:
        if main_db.MainDB().select("SELECT * FROM tv_episodes WHERE release_name = ?",
                                   [videofile.rpartition('.')[0]]):
            # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG)
            return True

        # Needed if we have downloaded the same episode @ different quality
        # But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName, tryIndexers=True)
        try:
            parse_result = np.parse(dirName)
        except:
            parse_result = False

        search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"  # This part is always the same
        search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
        # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query
        if parse_result and (
                        parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number):
            search_sql += " and tv_episodes.showid = '" + str(
                    parse_result.show.indexerid) + "' and tv_episodes.season = '" + str(
                    parse_result.season_number) + "' and tv_episodes.episode = '" + str(
                    parse_result.episode_numbers[0]) + "'"

        search_sql += " and tv_episodes.status IN (" + ",".join([str(x) for x in Quality.DOWNLOADED]) + ")"
        search_sql += " and history.resource LIKE ?"
        if main_db.MainDB().select(search_sql, ['%' + videofile]):
            # result.output += logHelper(u"You're trying to post process a video that's already been processed, skipping", LOGGER.DEBUG)
            return True

    return False
Пример #15
0
    def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0):

        # check if we passed in a parsed result or should we try and create one
        if not parse_result:

            # create showObj from indexer_id if available
            showObj = None
            if indexer_id:
                showObj = findCertainShow(sickrage.showList, indexer_id)

            try:
                myParser = NameParser(showObj=showObj)
                parse_result = myParser.parse(name)
            except InvalidNameException:
                sickrage.LOGGER.debug("Unable to parse the filename " + name + " into a valid episode")
                return None
            except InvalidShowException:
                sickrage.LOGGER.debug("Unable to parse the filename " + name + " into a valid show")
                return None

            if not parse_result or not parse_result.series_name:
                return None

        # if we made it this far then lets add the parsed result to cache for usager later on
        season = parse_result.season_number if parse_result.season_number else 1
        episodes = parse_result.episode_numbers

        if season and episodes:
            # store episodes as a seperated string
            episodeText = "|" + "|".join(map(str, episodes)) + "|"

            # get the current timestamp
            curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))

            # get quality of release
            quality = parse_result.quality

            # get release group
            release_group = parse_result.release_group

            # get version
            version = parse_result.version

            sickrage.LOGGER.debug("Added RSS item: [" + name + "] to cache: [" + self.providerID + "]")

            return [
                "INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)",
                [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group,
                 version]]
Пример #16
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if [x for x in sickrage.app.main_db.all('tv_episodes') if
        x['release_name'] and (x['release_name'] in dirName or x['release_name'] in videofile)]:
        return True

    # Needed if we have downloaded the same episode @ different quality
    # But we need to make sure we check the history of the episode we're going to PP, and not others
    np = NameParser(dirName)
    try:
        parse_result = np.parse(dirName)
    except:
        parse_result = False

    for h in (h for h in sickrage.app.main_db.all('history') if h['resource'].endswith(videofile)):
        for e in (e for e in sickrage.app.main_db.get_many('tv_episodes', h['showid'])
                  if h['season'] == e['season'] and h['episode'] == e['episode']
                     and e['status'] in Quality.DOWNLOADED):

            # If we find a showid, a season number, and one or more episode numbers then we need to use those in the
            # query
            if parse_result and (parse_result.indexerid and
                                 parse_result.episode_numbers and
                                 parse_result.season_number):
                if e['showid'] == int(parse_result.indexerid) and \
                        e['season'] == int(parse_result.season_number and
                                           e['episode']) == int(parse_result.episode_numbers[0]):
                    return True
            else:
                return True

    # Checks for processed file marker
    if os.path.isfile(os.path.join(dirName, videofile + '.sr_processed')):
        return True

    return False
Пример #17
0
    def release_group(show, name):
        if name:
            name = remove_non_release_groups(remove_extension(name))
        else:
            return ""

        try:
            np = NameParser(name, showObj=show, naming_pattern=True)
            parse_result = np.parse(name)
        except (InvalidNameException, InvalidShowException) as e:
            sickrage.LOGGER.debug("Unable to get parse release_group: {}".format(e))
            return ''

        if not parse_result.release_group:
            return ''
        return parse_result.release_group
Пример #18
0
    def _test_combo(self, name, result, which_regexes):
        if VERBOSE:
            print()
            print('Testing', name)

        np = NameParser(True, validate_show=False)
        test_result = np.parse(name)

        if DEBUG:
            print(test_result, test_result.which_regex)
            print(result, which_regexes)

        self.assertEqual(test_result, result)
        for cur_regex in which_regexes:
            self.assertTrue(cur_regex in test_result.which_regex)
        self.assertEqual(len(which_regexes), len(test_result.which_regex))
Пример #19
0
    def _is_season_pack(name):

        try:
            myParser = NameParser(tryIndexers=True)
            parse_result = myParser.parse(name)
        except InvalidNameException:
            sickrage.LOGGER.debug("Unable to parse the filename %s into a valid episode" % name)
            return False
        except InvalidShowException:
            sickrage.LOGGER.debug("Unable to parse the filename %s into a valid show" % name)
            return False

        sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?"
        episodes = main_db.MainDB().select(sql_selection, [parse_result.show.indexerid, parse_result.season_number])
        if int(episodes[0][b"count"]) == len(parse_result.episode_numbers):
            return True
Пример #20
0
    def _test_combo(self, name, result, which_regexes):
        if VERBOSE:
            print()
            print('Testing', name)

        np = NameParser(True, validate_show=False)
        test_result = np.parse(name)

        if DEBUG:
            print(test_result, test_result.which_regex)
            print(result, which_regexes)

        self.assertEqual(test_result, result)
        for cur_regex in which_regexes:
            self.assertTrue(cur_regex in test_result.which_regex)
        self.assertEqual(len(which_regexes), len(test_result.which_regex))
Пример #21
0
def already_postprocessed(dirName, videofile, force, result):
    """
    Check if we already post processed a file

    :param dirName: Directory a file resides in
    :param videofile: File name
    :param force: Force checking when already checking (currently unused)
    :param result: True if file is already postprocessed, False if not
    :return:
    """
    if force:
        return False

    # Avoid processing the same dir again if we use a process method <> move
    if [x for x in sickrage.srCore.mainDB.db.all('tv_episodes', with_doc=True)
        if x['doc']['release_name'] == dirName]:
        return True
    else:
        if [x for x in sickrage.srCore.mainDB.db.all('tv_episodes', with_doc=True)
            if x['doc']['release_name'] == [videofile.rpartition('.')[0]]]: return True

        # Needed if we have downloaded the same episode @ different quality
        # But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName, tryIndexers=True)
        try:
            parse_result = np.parse(dirName)
        except:
            parse_result = False

        for h in [h['doc'] for h in sickrage.srCore.mainDB.db.all('history', with_doc=True)
                  if h['doc']['resource'].endswith(videofile)]:
            for e in [e['doc'] for e in sickrage.srCore.mainDB.db.get_many('tv_episodes', h['showid'], with_doc=True)
                      if h['season'] == e['doc']['season']
                      and h['episode'] == e['doc']['episode']
                      and e['doc']['status'] in Quality.DOWNLOADED]:

                # If we find a showid, a season number, and one or more episode numbers then we need to use those in the query
                if parse_result and (
                        parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number):
                    if e['showid'] == int(parse_result.show.indexerid) and e['season'] == int(
                                    parse_result.season_number and e['episode']) == int(
                            parse_result.episode_numbers[0]):
                        return True
                else:
                    return True

    return False
Пример #22
0
    def release_group(show, name):
        if name:
            name = remove_non_release_groups(remove_extension(name))
        else:
            return ""

        try:
            np = NameParser(name, showObj=show, naming_pattern=True)
            parse_result = np.parse(name)
        except (InvalidNameException, InvalidShowException) as e:
            sickrage.LOGGER.debug(
                "Unable to get parse release_group: {}".format(e))
            return ''

        if not parse_result.release_group:
            return ''
        return parse_result.release_group
Пример #23
0
    def process(self):
        """
        Do the actual work

        :return: True
        """
        self._log("Failed download detected: (" + str(self.nzb_name) + ", " +
                  str(self.dir_name) + ")")

        releaseName = show_names.determineReleaseName(self.dir_name,
                                                      self.nzb_name)
        if releaseName is None:
            self._log("Warning: unable to find a valid release name.",
                      sickrage.srCore.srLogger.WARNING)
            raise FailedPostProcessingFailedException()

        try:
            parser = NameParser(False)
            parsed = parser.parse(releaseName)
        except InvalidNameException:
            self._log("Error: release name is invalid: " + releaseName,
                      sickrage.srCore.srLogger.DEBUG)
            raise FailedPostProcessingFailedException()
        except InvalidShowException:
            self._log(
                "Error: unable to parse release name " + releaseName +
                " into a valid show", sickrage.srCore.srLogger.DEBUG)
            raise FailedPostProcessingFailedException()

        sickrage.srCore.srLogger.debug("name_parser info: ")
        sickrage.srCore.srLogger.debug(" - " + str(parsed.series_name))
        sickrage.srCore.srLogger.debug(" - " + str(parsed.season_number))
        sickrage.srCore.srLogger.debug(" - " + str(parsed.episode_numbers))
        sickrage.srCore.srLogger.debug(" - " + str(parsed.extra_info))
        sickrage.srCore.srLogger.debug(" - " + str(parsed.release_group))
        sickrage.srCore.srLogger.debug(" - " + str(parsed.air_date))

        for episode in parsed.episode_numbers:
            sickrage.srCore.SEARCHQUEUE.put(
                FailedQueueItem(
                    parsed.show,
                    [parsed.show.getEpisode(parsed.season_number, episode)]))

        return True
Пример #24
0
    def _is_season_pack(name):

        try:
            myParser = NameParser(tryIndexers=True)
            parse_result = myParser.parse(name)
        except InvalidNameException:
            sickrage.LOGGER.debug(
                "Unable to parse the filename %s into a valid episode" % name)
            return False
        except InvalidShowException:
            sickrage.LOGGER.debug(
                "Unable to parse the filename %s into a valid show" % name)
            return False

        sql_selection = "SELECT count(*) AS count FROM tv_episodes WHERE showid = ? AND season = ?"
        episodes = main_db.MainDB().select(
            sql_selection,
            [parse_result.show.indexerid, parse_result.season_number])
        if int(episodes[0][b'count']) == len(parse_result.episode_numbers):
            return True
Пример #25
0
    def _test_combo(self, name, result, which_regexes):

        if VERBOSE:
            print
            print 'Testing', name

        np = NameParser(True)

        try:
            test_result = np.parse(name)
        except InvalidShowException:
            return False

        if DEBUG:
            print test_result, test_result.which_regex
            print result, which_regexes

        self.assertEqual(test_result, result)
        for cur_regex in which_regexes:
            self.assertTrue(cur_regex in test_result.which_regex)
        self.assertEqual(len(which_regexes), len(test_result.which_regex))
Пример #26
0
    def _test_combo(self, name, result, which_regexes):

        if VERBOSE:
            print()
            print('Testing', name)

        np = NameParser(True)

        try:
            test_result = np.parse(name)
        except InvalidShowException:
            return False

        if DEBUG:
            print(test_result, test_result.which_regex)
            print(result, which_regexes)

        self.assertEqual(test_result, result)
        for cur_regex in which_regexes:
            self.assertTrue(cur_regex in test_result.which_regex)
        self.assertEqual(len(which_regexes), len(test_result.which_regex))
Пример #27
0
    def process(self):
        """
        Do the actual work

        :return: True
        """
        self._log("Failed download detected: (" + str(self.nzb_name) + ", " + str(self.dir_name) + ")")

        releaseName = show_names.determineReleaseName(self.dir_name, self.nzb_name)
        if releaseName is None:
            self._log("Warning: unable to find a valid release name.", sickrage.LOGGER.WARNING)
            raise FailedPostProcessingFailedException()

        try:
            parser = NameParser(False)
            parsed = parser.parse(releaseName)
        except InvalidNameException:
            self._log("Error: release name is invalid: " + releaseName, sickrage.LOGGER.DEBUG)
            raise FailedPostProcessingFailedException()
        except InvalidShowException:
            self._log("Error: unable to parse release name " + releaseName + " into a valid show", sickrage.LOGGER.DEBUG)
            raise FailedPostProcessingFailedException()

        sickrage.LOGGER.debug("name_parser info: ")
        sickrage.LOGGER.debug(" - " + str(parsed.series_name))
        sickrage.LOGGER.debug(" - " + str(parsed.season_number))
        sickrage.LOGGER.debug(" - " + str(parsed.episode_numbers))
        sickrage.LOGGER.debug(" - " + str(parsed.extra_info))
        sickrage.LOGGER.debug(" - " + str(parsed.release_group))
        sickrage.LOGGER.debug(" - " + str(parsed.air_date))

        for episode in parsed.episode_numbers:
            segment = parsed.show.getEpisode(parsed.season_number, episode)

            cur_failed_queue_item = FailedQueueItem(parsed.show, [segment])
            sickrage.SEARCHQUEUE.add_item(cur_failed_queue_item)

        return True
Пример #28
0
    def _getProperList(self):
        """
        Walk providers for propers
        """
        propers = {}

        search_date = datetime.datetime.today() - datetime.timedelta(days=2)

        origThreadName = threading.currentThread().getName()

        recently_aired = []
        for show in sickrage.app.showlist:
            self._lastProperSearch = self._get_lastProperSearch(show.indexerid)

            for episode in sickrage.app.main_db.get_many('tv_episodes', show.indexerid):
                if episode['airdate'] >= str(search_date.toordinal()):
                    if episode['status'] in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST:
                        recently_aired += [episode]

            self._set_lastProperSearch(show.indexerid, datetime.datetime.today().toordinal())

        if not recently_aired:
            sickrage.app.log.info('No recently aired episodes, nothing to search for')
            return []

        # for each provider get a list of the
        for providerID, providerObj in sickrage.app.search_providers.sort(
                randomize=sickrage.app.config.randomize_providers).items():
            # check provider type and provider is enabled
            if not sickrage.app.config.use_nzbs and providerObj.type in [NZBProvider.type,
                                                                         NewznabProvider.type]:
                continue
            elif not sickrage.app.config.use_torrents and providerObj.type in [TorrentProvider.type,
                                                                               TorrentRssProvider.type]:
                continue
            elif not providerObj.isEnabled:
                continue

            threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]")

            sickrage.app.log.info("Searching for any new PROPER releases from " + providerObj.name)

            try:
                curPropers = providerObj.find_propers(recently_aired)
            except AuthException as e:
                sickrage.app.log.warning("Authentication error: {}".format(e))
                continue
            except Exception as e:
                sickrage.app.log.debug(
                    "Error while searching " + providerObj.name + ", skipping: {}".format(e))
                sickrage.app.log.debug(traceback.format_exc())
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                if not re.search(r'(^|[. _-])(proper|repack)([. _-]|$)', x.name, re.I):
                    sickrage.app.log.debug('findPropers returned a non-proper, we have caught and skipped it.')
                    continue

                name = self._genericName(x.name)
                if name not in propers:
                    sickrage.app.log.debug("Found new proper: " + x.name)
                    x.provider = providerObj
                    propers[name] = x

            threading.currentThread().setName(origThreadName)

        # take the list of unique propers and get it sorted by
        sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
        finalPropers = []

        for curProper in sortedPropers:
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(curProper.name)
            except InvalidNameException:
                sickrage.app.log.debug(
                    "Unable to parse the filename " + curProper.name + " into a valid episode")
                continue
            except InvalidShowException:
                sickrage.app.log.debug("Unable to parse the filename " + curProper.name + " into a valid show")
                continue

            if not parse_result.series_name:
                continue

            if not parse_result.episode_numbers:
                sickrage.app.log.debug(
                    "Ignoring " + curProper.name + " because it's for a full season rather than specific episode")
                continue

            sickrage.app.log.debug(
                "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name)

            # set the indexerid in the db to the show's indexerid
            curProper.indexerid = parse_result.indexerid

            # set the indexer in the db to the show's indexer
            curProper.indexer = parse_result.show.indexer

            # populate our Proper instance
            curProper.show = parse_result.show
            curProper.season = parse_result.season_number if parse_result.season_number is not None else 1
            curProper.episode = parse_result.episode_numbers[0]
            curProper.release_group = parse_result.release_group
            curProper.version = parse_result.version
            curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
            curProper.content = None

            # filter release
            bestResult = pickBestResult(curProper, parse_result.show)
            if not bestResult:
                sickrage.app.log.debug("Proper " + curProper.name + " were rejected by our release filters.")
                continue

            # only get anime proper if it has release group and version
            if bestResult.show.is_anime:
                if not bestResult.release_group and bestResult.version == -1:
                    sickrage.app.log.debug(
                        "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it")
                    continue

            # check if we actually want this proper (if it's the right quality)            
            dbData = [x for x in sickrage.app.main_db().get_many('tv_episodes', bestResult.indexerid)
                      if x['season'] == bestResult.season and x['episode'] == bestResult.episode]

            if not dbData:
                continue

            # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
            oldStatus, oldQuality = Quality.splitCompositeStatus(int(dbData[0]["status"]))
            if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality:
                continue

            # check if we actually want this proper (if it's the right release group and a higher version)
            if bestResult.show.is_anime:
                dbData = [x for x in sickrage.app.main_db.get_many('tv_episodes', bestResult.indexerid)
                          if x['season'] == bestResult.season and x['episode'] == bestResult.episode]

                oldVersion = int(dbData[0]["version"])
                oldRelease_group = (dbData[0]["release_group"])

                if -1 < oldVersion < bestResult.version:
                    sickrage.app.log.info(
                        "Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
                else:
                    continue

                if oldRelease_group != bestResult.release_group:
                    sickrage.app.log.info(
                        "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group)
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
            if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map(
                    operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
                sickrage.app.log.info("Found a proper that we need: " + str(bestResult.name))
                finalPropers.append(bestResult)

        return finalPropers
Пример #29
0
def splitNZBResult(result):
    """
    Split result into seperate episodes

    :param result: search result object
    :return: False upon failure, a list of episode objects otherwise
    """
    urlData = sickrage.srCore.srWebSession.get(result.url, needBytes=True)
    if urlData is None:
        sickrage.srCore.srLogger.error("Unable to load url " + result.url +
                                       ", can't download season NZB")
        return False

    # parse the season ep name
    try:
        np = NameParser(False, showObj=result.show)
        parse_result = np.parse(result.name)
    except InvalidNameException:
        sickrage.srCore.srLogger.debug("Unable to parse the filename " +
                                       result.name + " into a valid episode")
        return False
    except InvalidShowException:
        sickrage.srCore.srLogger.debug("Unable to parse the filename " +
                                       result.name + " into a valid show")
        return False

    # bust it up
    season = parse_result.season_number if parse_result.season_number is not None else 1

    separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season)

    resultList = []

    for newNZB in separateNZBs:

        sickrage.srCore.srLogger.debug("Split out " + newNZB + " from " +
                                       result.name)

        # parse the name
        try:
            np = NameParser(False, showObj=result.show)
            parse_result = np.parse(newNZB)
        except InvalidNameException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename " +
                                           newNZB + " into a valid episode")
            return False
        except InvalidShowException:
            sickrage.srCore.srLogger.debug("Unable to parse the filename " +
                                           newNZB + " into a valid show")
            return False

        # make sure the result is sane
        if (parse_result.season_number is not None
                and parse_result.season_number != season) or (
                    parse_result.season_number is None and season != 1):
            sickrage.srCore.srLogger.warning(
                "Found " + newNZB + " inside " + result.name +
                " but it doesn't seem to belong to the same season, ignoring it"
            )
            continue
        elif len(parse_result.episode_numbers) == 0:
            sickrage.srCore.srLogger.warning(
                "Found " + newNZB + " inside " + result.name +
                " but it doesn't seem to be a valid episode NZB, ignoring it")
            continue

        wantEp = True
        for epNo in parse_result.episode_numbers:
            if not result.extraInfo[0].wantEpisode(season, epNo,
                                                   result.quality):
                sickrage.srCore.srLogger.info(
                    "Ignoring result " + newNZB +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[result.quality])
                wantEp = False
                break
        if not wantEp:
            continue

        # get all the associated episode objects
        epObjList = []
        for curEp in parse_result.episode_numbers:
            epObjList.append(result.extraInfo[0].getEpisode(season, curEp))

        # make a result
        curResult = classes.NZBDataSearchResult(epObjList)
        curResult.name = newNZB
        curResult.provider = result.provider
        curResult.quality = result.quality
        curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)]

        resultList.append(curResult)

    return resultList
Пример #30
0
    def _getProperList(self):
        """
        Walk providers for propers
        """
        propers = {}

        search_date = datetime.datetime.today() - datetime.timedelta(days=2)

        origThreadName = threading.currentThread().getName()

        # for each provider get a list of the
        for providerID, providerObj in sickrage.srCore.providersDict.sort(
                randomize=sickrage.srCore.srConfig.RANDOMIZE_PROVIDERS).items():
            # check provider type and provider is enabled
            if not sickrage.srCore.srConfig.USE_NZBS and providerObj.type in [NZBProvider.type, NewznabProvider.type]:
                continue
            elif not sickrage.srCore.srConfig.USE_TORRENTS and providerObj.type in [TorrentProvider.type,
                                                                                    TorrentRssProvider.type]:
                continue
            elif not providerObj.isEnabled:
                continue

            threading.currentThread().setName(origThreadName + " :: [" + providerObj.name + "]")

            sickrage.srCore.srLogger.info("Searching for any new PROPER releases from " + providerObj.name)

            try:
                curPropers = providerObj.findPropers(search_date)
            except AuthException as e:
                sickrage.srCore.srLogger.debug("Authentication error: {}".format(e.message))
                continue
            except Exception as e:
                sickrage.srCore.srLogger.debug(
                    "Error while searching " + providerObj.name + ", skipping: {}".format(e.message))
                sickrage.srCore.srLogger.debug(traceback.format_exc())
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', x.name, re.I):
                    sickrage.srCore.srLogger.debug('findPropers returned a non-proper, we have caught and skipped it.')
                    continue

                name = self._genericName(x.name)
                if not name in propers:
                    sickrage.srCore.srLogger.debug("Found new proper: " + x.name)
                    x.provider = providerObj
                    propers[name] = x

            threading.currentThread().setName(origThreadName)

        # take the list of unique propers and get it sorted by
        sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
        finalPropers = []

        for curProper in sortedPropers:

            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(curProper.name)
            except InvalidNameException:
                sickrage.srCore.srLogger.debug(
                    "Unable to parse the filename " + curProper.name + " into a valid episode")
                continue
            except InvalidShowException:
                sickrage.srCore.srLogger.debug("Unable to parse the filename " + curProper.name + " into a valid show")
                continue

            if not parse_result.series_name:
                continue

            if not parse_result.episode_numbers:
                sickrage.srCore.srLogger.debug(
                    "Ignoring " + curProper.name + " because it's for a full season rather than specific episode")
                continue

            sickrage.srCore.srLogger.debug(
                "Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name)

            # set the indexerid in the db to the show's indexerid
            curProper.indexerid = parse_result.show.indexerid

            # set the indexer in the db to the show's indexer
            curProper.indexer = parse_result.show.indexer

            # populate our Proper instance
            curProper.show = parse_result.show
            curProper.season = parse_result.season_number if parse_result.season_number is not None else 1
            curProper.episode = parse_result.episode_numbers[0]
            curProper.release_group = parse_result.release_group
            curProper.version = parse_result.version
            curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
            curProper.content = None

            # filter release
            bestResult = pickBestResult(curProper, parse_result.show)
            if not bestResult:
                sickrage.srCore.srLogger.debug("Proper " + curProper.name + " were rejected by our release filters.")
                continue

            # only get anime proper if it has release group and version
            if bestResult.show.is_anime:
                if not bestResult.release_group and bestResult.version == -1:
                    sickrage.srCore.srLogger.debug(
                        "Proper " + bestResult.name + " doesn't have a release group and version, ignoring it")
                    continue

            # check if we actually want this proper (if it's the right quality)            
            sqlResults = main_db.MainDB().select(
                "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
                [bestResult.indexerid, bestResult.season, bestResult.episode])
            if not sqlResults:
                continue

            # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
            oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
            if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality:
                continue

            # check if we actually want this proper (if it's the right release group and a higher version)
            if bestResult.show.is_anime:
                sqlResults = main_db.MainDB().select(
                    "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
                    [bestResult.indexerid, bestResult.season, bestResult.episode])

                oldVersion = int(sqlResults[0]["version"])
                oldRelease_group = (sqlResults[0]["release_group"])

                if oldVersion > -1 and oldVersion < bestResult.version:
                    sickrage.srCore.srLogger.info(
                        "Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
                else:
                    continue

                if oldRelease_group != bestResult.release_group:
                    sickrage.srCore.srLogger.info(
                        "Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group)
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
            if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in map(
                    operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
                sickrage.srCore.srLogger.info("Found a proper that we need: " + str(bestResult.name))
                finalPropers.append(bestResult)

        return finalPropers
Пример #31
0
def validate_name(pattern,
                  multi=None,
                  anime_type=None,
                  file_only=False,
                  abd=False,
                  sports=False):
    """
    See if we understand a name

    :param pattern: Name to analyse
    :param multi: Is this a multi-episode name
    :param anime_type: Is this anime
    :param file_only: Is this just a file or a dir
    :param abd: Is air-by-date enabled
    :param sports: Is this sports
    :return: True if valid name, False if not
    """
    ep = generate_sample_ep(multi, abd, sports, anime_type)

    new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext'
    new_path = ep.formatted_dir(pattern, multi)
    if not file_only:
        new_name = os.path.join(new_path, new_name)

    if not new_name:
        sickrage.srCore.srLogger.debug("Unable to create a name out of " +
                                       pattern)
        return False

    sickrage.srCore.srLogger.debug("Trying to parse " + new_name)

    parser = NameParser(True, showObj=ep.show, naming_pattern=True)

    try:
        result = parser.parse(new_name)
    except Exception:
        sickrage.srCore.srLogger.debug("Unable to parse " + new_name +
                                       ", not valid")
        return False

    sickrage.srCore.srLogger.debug("Parsed " + new_name + " into " +
                                   str(result))

    if abd or sports:
        if result.air_date != ep.airdate:
            sickrage.srCore.srLogger.debug(
                "Air date incorrect in parsed episode, pattern isn't valid")
            return False
    elif anime_type != 3:
        if len(result.ab_episode_numbers) and result.ab_episode_numbers != [
                x.absolute_number for x in [ep] + ep.relatedEps
        ]:
            sickrage.srCore.srLogger.debug(
                "Absolute numbering incorrect in parsed episode, pattern isn't valid"
            )
            return False
    else:
        if result.season_number != ep.season:
            sickrage.srCore.srLogger.debug(
                "Season number incorrect in parsed episode, pattern isn't valid"
            )
            return False
        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
            sickrage.srCore.srLogger.debug(
                "Episode numbering incorrect in parsed episode, pattern isn't valid"
            )
            return False

    return True
Пример #32
0
    def _getProperList(self):
        """
        Walk providers for propers
        """
        propers = {}

        search_date = datetime.datetime.today() - datetime.timedelta(days=2)

        origThreadName = threading.currentThread().getName()

        # for each provider get a list of the
        for providerID, providerObj in sickrage.srCore.providersDict.sort(
                randomize=sickrage.srCore.srConfig.RANDOMIZE_PROVIDERS).items(
                ):
            # check provider type and provider is enabled
            if not sickrage.srCore.srConfig.USE_NZBS and providerObj.type in [
                    NZBProvider.type, NewznabProvider.type
            ]:
                continue
            elif not sickrage.srCore.srConfig.USE_TORRENTS and providerObj.type in [
                    TorrentProvider.type, TorrentRssProvider.type
            ]:
                continue
            elif not providerObj.isEnabled:
                continue

            threading.currentThread().setName(origThreadName + " :: [" +
                                              providerObj.name + "]")

            sickrage.srCore.srLogger.info(
                "Searching for any new PROPER releases from " +
                providerObj.name)

            try:
                curPropers = providerObj.find_propers(search_date)
            except AuthException as e:
                sickrage.srCore.srLogger.warning(
                    "Authentication error: {}".format(e.message))
                continue
            except Exception as e:
                sickrage.srCore.srLogger.debug(
                    "Error while searching " + providerObj.name +
                    ", skipping: {}".format(e.message))
                sickrage.srCore.srLogger.debug(traceback.format_exc())
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                if not re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)',
                                 x.name, re.I):
                    sickrage.srCore.srLogger.debug(
                        'findPropers returned a non-proper, we have caught and skipped it.'
                    )
                    continue

                name = self._genericName(x.name)
                if not name in propers:
                    sickrage.srCore.srLogger.debug("Found new proper: " +
                                                   x.name)
                    x.provider = providerObj
                    propers[name] = x

            threading.currentThread().setName(origThreadName)

        # take the list of unique propers and get it sorted by
        sortedPropers = sorted(propers.values(),
                               key=operator.attrgetter('date'),
                               reverse=True)
        finalPropers = []

        for curProper in sortedPropers:

            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(curProper.name)
            except InvalidNameException:
                sickrage.srCore.srLogger.debug(
                    "Unable to parse the filename " + curProper.name +
                    " into a valid episode")
                continue
            except InvalidShowException:
                sickrage.srCore.srLogger.debug(
                    "Unable to parse the filename " + curProper.name +
                    " into a valid show")
                continue

            if not parse_result.series_name:
                continue

            if not parse_result.episode_numbers:
                sickrage.srCore.srLogger.debug(
                    "Ignoring " + curProper.name +
                    " because it's for a full season rather than specific episode"
                )
                continue

            sickrage.srCore.srLogger.debug("Successful match! Result " +
                                           parse_result.original_name +
                                           " matched to show " +
                                           parse_result.show.name)

            # set the indexerid in the db to the show's indexerid
            curProper.indexerid = parse_result.show.indexerid

            # set the indexer in the db to the show's indexer
            curProper.indexer = parse_result.show.indexer

            # populate our Proper instance
            curProper.show = parse_result.show
            curProper.season = parse_result.season_number if parse_result.season_number is not None else 1
            curProper.episode = parse_result.episode_numbers[0]
            curProper.release_group = parse_result.release_group
            curProper.version = parse_result.version
            curProper.quality = Quality.nameQuality(curProper.name,
                                                    parse_result.is_anime)
            curProper.content = None

            # filter release
            bestResult = pickBestResult(curProper, parse_result.show)
            if not bestResult:
                sickrage.srCore.srLogger.debug(
                    "Proper " + curProper.name +
                    " were rejected by our release filters.")
                continue

            # only get anime proper if it has release group and version
            if bestResult.show.is_anime:
                if not bestResult.release_group and bestResult.version == -1:
                    sickrage.srCore.srLogger.debug(
                        "Proper " + bestResult.name +
                        " doesn't have a release group and version, ignoring it"
                    )
                    continue

            # check if we actually want this proper (if it's the right quality)
            dbData = [
                x['doc'] for x in sickrage.srCore.mainDB().db.get_many(
                    'tv_episodes', bestResult.indexerid, with_doc=True)
                if x['doc']['season'] == bestResult.season
                and x['doc']['episode'] == bestResult.episode
            ]
            if not dbData: continue

            # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
            oldStatus, oldQuality = Quality.splitCompositeStatus(
                int(dbData[0]["status"]))
            if oldStatus not in (DOWNLOADED,
                                 SNATCHED) or oldQuality != bestResult.quality:
                continue

            # check if we actually want this proper (if it's the right release group and a higher version)
            if bestResult.show.is_anime:
                dbData = [
                    x['doc'] for x in sickrage.srCore.mainDB.db.get_many(
                        'tv_episodes', bestResult.indexerid, with_doc=True)
                    if x['doc']['season'] == bestResult.season
                    and x['doc']['episode'] == bestResult.episode
                ]

                oldVersion = int(dbData[0]["version"])
                oldRelease_group = (dbData[0]["release_group"])

                if -1 < oldVersion < bestResult.version:
                    sickrage.srCore.srLogger.info("Found new anime v" +
                                                  str(bestResult.version) +
                                                  " to replace existing v" +
                                                  str(oldVersion))
                else:
                    continue

                if oldRelease_group != bestResult.release_group:
                    sickrage.srCore.srLogger.info(
                        "Skipping proper from release group: " +
                        bestResult.release_group +
                        ", does not match existing release group: " +
                        oldRelease_group)
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
            if bestResult.indexerid != -1 and (
                    bestResult.indexerid, bestResult.season,
                    bestResult.episode) not in map(
                        operator.attrgetter('indexerid', 'season', 'episode'),
                        finalPropers):
                sickrage.srCore.srLogger.info("Found a proper that we need: " +
                                              str(bestResult.name))
                finalPropers.append(bestResult)

        return finalPropers
Пример #33
0
    def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):

        if not self._checkAuth:
            return

        self.show = show

        results = {}
        itemList = []

        searched_scene_season = None
        for epObj in episodes:
            # search cache for episode result
            cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality)
            if cacheResult:
                if epObj.episode not in results:
                    results[epObj.episode] = cacheResult
                else:
                    results[epObj.episode].extend(cacheResult)

                # found result, search next episode
                continue

            # skip if season already searched
            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season:
                continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = epObj.scene_season

            search_strings = []
            if len(episodes) > 1 and search_mode == 'sponly':
                # get season search results
                search_strings = self._get_season_search_strings(epObj)
            elif search_mode == 'eponly':
                # get single episode search results
                search_strings = self._get_episode_search_strings(epObj)

            first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0]
            if first:
                sickrage.srCore.srLogger.debug('First search_string has rid')

            for curString in search_strings:
                itemList += self.search(curString, search_mode, len(episodes), epObj=epObj)
                if first:
                    first = False
                    if itemList:
                        sickrage.srCore.srLogger.debug(
                            'First search_string had rid, and returned results, skipping query by string')
                        break
                    else:
                        sickrage.srCore.srLogger.debug(
                            'First search_string had rid, but returned no results, searching with string query')

        # if we found what we needed already from cache then return results and exit
        if len(results) == len(episodes):
            return results

        # sort list by quality
        if len(itemList):
            items = {}
            itemsUnknown = []
            for item in itemList:
                quality = self.getQuality(item, anime=show.is_anime)
                if quality == Quality.UNKNOWN:
                    itemsUnknown += [item]
                else:
                    if quality not in items:
                        items[quality] = [item]
                    else:
                        items[quality].append(item)

            itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
            itemList += itemsUnknown or []

        # filter results
        cl = []
        for item in itemList:
            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(title)
            except InvalidNameException:
                sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid episode")
                continue
            except InvalidShowException:
                sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid show")
                continue

            showObj = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version

            addCacheEntry = False
            if not (showObj.air_by_date or showObj.sports):
                if search_mode == 'sponly':
                    if len(parse_result.episode_numbers):
                        sickrage.srCore.srLogger.debug(
                            "This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it")
                        addCacheEntry = True
                    if len(parse_result.episode_numbers) and (
                                    parse_result.season_number not in set([ep.season for ep in episodes])
                            or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring")
                        addCacheEntry = True
                else:
                    if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
                                                                                                     episodes if
                                                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring")
                        addCacheEntry = True
                    elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
                                                                    ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring")
                        addCacheEntry = True

                if not addCacheEntry:
                    # we just use the existing info for normal searches
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                if not parse_result.is_air_by_date:
                    sickrage.srCore.srLogger.debug(
                        "This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it")
                    addCacheEntry = True
                else:
                    airdate = parse_result.air_date.toordinal()
                    sql_results = main_db.MainDB().select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [showObj.indexerid, airdate])

                    if len(sql_results) != 1:
                        sickrage.srCore.srLogger.warning(
                            "Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it")
                        addCacheEntry = True

                if not addCacheEntry:
                    actual_season = int(sql_results[0]["season"])
                    actual_episodes = [int(sql_results[0]["episode"])]

            # add parsed result to cache for usage later on
            if addCacheEntry:
                sickrage.srCore.srLogger.debug("Adding item from search to cache: " + title)
                ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
                if ci is not None:
                    cl.append(ci)
                continue

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality):
                    wantEp = False
                    break

            if not wantEp:
                sickrage.srCore.srLogger.info("RESULT:[{}] QUALITY:[{}] IGNORED!".format(title, Quality.qualityStrings[quality]))
                continue

            sickrage.srCore.srLogger.debug("FOUND RESULT:[{}] URL:[{}]".format(title, url))

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(showObj.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            result.show = showObj
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(epObj) == 1:
                epNum = epObj[0].episode
                sickrage.srCore.srLogger.debug("Single episode result.")
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                sickrage.srCore.srLogger.debug(
                    "Separating multi-episode result to check for later - result contains episodes: " + str(
                        parse_result.episode_numbers))
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                sickrage.srCore.srLogger.debug("Separating full season result to check for later")

            if epNum not in results:
                results[epNum] = [result]
            else:
                results[epNum].append(result)

        # check if we have items to add to cache
        if len(cl) > 0:
            self.cache._getDB().mass_action(cl)
            del cl  # cleanup

        return results
Пример #34
0
    def addCacheEntry(self, name, url, parse_result=None, indexer_id=0):
        # check if we passed in a parsed result or should we try and create one
        if not parse_result:
            # create showObj from indexer_id if available
            showObj = None
            if indexer_id:
                showObj = findCertainShow(sickrage.srCore.SHOWLIST, indexer_id)

            try:
                myParser = NameParser(showObj=showObj)
                parse_result = myParser.parse(name)
                if not parse_result:
                    return
            except (InvalidShowException, InvalidNameException):
                sickrage.srCore.srLogger.debug(
                    "RSS ITEM:[{}] IGNORED!".format(name))
                return

        if not parse_result.series_name:
            return

        # if we made it this far then lets add the parsed result to cache for usager later on
        season = parse_result.season_number if parse_result.season_number else 1
        episodes = parse_result.episode_numbers

        if season and episodes:
            # store episodes as a seperated string
            episodeText = "|" + "|".join(map(str, episodes)) + "|"

            # get the current timestamp
            curTimestamp = int(
                time.mktime(datetime.datetime.today().timetuple()))

            # get quality of release
            quality = parse_result.quality

            # get release group
            release_group = parse_result.release_group

            # get version
            version = parse_result.version

            if not len([
                    x for x in sickrage.srCore.cacheDB.db.get_many(
                        'providers', self.providerID, with_doc=True)
                    if x['doc']['url'] == url
            ]):
                sickrage.srCore.cacheDB.db.insert({
                    '_t':
                    'providers',
                    'provider':
                    self.providerID,
                    'name':
                    name,
                    'season':
                    season,
                    'episodes':
                    episodeText,
                    'indexerid':
                    parse_result.show.indexerid,
                    'url':
                    url,
                    'time':
                    curTimestamp,
                    'quality':
                    quality,
                    'release_group':
                    release_group,
                    'version':
                    version
                })

                sickrage.srCore.srLogger.debug("RSS ITEM:[%s] ADDED!", name)
Пример #35
0
def splitNZBResult(result):
    """
    Split result into seperate episodes

    :param result: search result object
    :return: False upon failure, a list of episode objects otherwise
    """
    urlData = WebSession().get(result.url, needBytes=True)
    if urlData is None:
        sickrage.app.log.error("Unable to load url " + result.url + ", can't download season NZB")
        return False

    # parse the season ep name
    try:
        np = NameParser(False, showObj=result.show)
        parse_result = np.parse(result.name)
    except InvalidNameException:
        sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid episode")
        return False
    except InvalidShowException:
        sickrage.app.log.debug("Unable to parse the filename " + result.name + " into a valid show")
        return False

    # bust it up
    season = parse_result.season_number if parse_result.season_number is not None else 1

    separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season)

    resultList = []

    for newNZB in separateNZBs:

        sickrage.app.log.debug("Split out " + newNZB + " from " + result.name)

        # parse the name
        try:
            np = NameParser(False, showObj=result.show)
            parse_result = np.parse(newNZB)
        except InvalidNameException:
            sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid episode")
            return False
        except InvalidShowException:
            sickrage.app.log.debug("Unable to parse the filename " + newNZB + " into a valid show")
            return False

        # make sure the result is sane
        if (parse_result.season_number is not None and parse_result.season_number != season) or (
                        parse_result.season_number is None and season != 1):
            sickrage.app.log.warning(
                "Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it")
            continue
        elif len(parse_result.episode_numbers) == 0:
            sickrage.app.log.warning(
                "Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it")
            continue

        wantEp = True
        for epNo in parse_result.episode_numbers:
            if not result.extraInfo[0].want_episode(season, epNo, result.quality):
                sickrage.app.log.info(
                    "Ignoring result " + newNZB + " because we don't want an episode that is " +
                    Quality.qualityStrings[result.quality])
                wantEp = False
                break
        if not wantEp:
            continue

        # get all the associated episode objects
        epObjList = []
        for curEp in parse_result.episode_numbers:
            epObjList.append(result.extraInfo[0].get_episode(season, curEp))

        # make a result
        curResult = classes.NZBDataSearchResult(epObjList)
        curResult.name = newNZB
        curResult.provider = result.provider
        curResult.quality = result.quality
        curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)]

        resultList.append(curResult)

    return resultList
Пример #36
0
    def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):

        if not self._checkAuth:
            return

        self.show = show

        results = {}
        itemList = []

        searched_scene_season = None
        for epObj in episodes:
            # search cache for episode result
            cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality)
            if cacheResult:
                if epObj.episode not in results:
                    results[epObj.episode] = cacheResult
                else:
                    results[epObj.episode].extend(cacheResult)

                # found result, search next episode
                continue

            # skip if season already searched
            if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season:
                continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = epObj.scene_season

            search_strings = []
            if len(episodes) > 1 and search_mode == 'sponly':
                # get season search results
                search_strings = self._get_season_search_strings(epObj)
            elif search_mode == 'eponly':
                # get single episode search results
                search_strings = self._get_episode_search_strings(epObj)

            first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0]
            if first:
                sickrage.srCore.srLogger.debug('First search_string has rid')

            for curString in search_strings:
                itemList += self.search(curString, search_mode, len(episodes), epObj=epObj)
                if first:
                    first = False
                    if itemList:
                        sickrage.srCore.srLogger.debug(
                            'First search_string had rid, and returned results, skipping query by string')
                        break
                    else:
                        sickrage.srCore.srLogger.debug(
                            'First search_string had rid, but returned no results, searching with string query')

        # if we found what we needed already from cache then return results and exit
        if len(results) == len(episodes):
            return results

        # sort list by quality
        if len(itemList):
            items = {}
            itemsUnknown = []
            for item in itemList:
                quality = self.getQuality(item, anime=show.is_anime)
                if quality == Quality.UNKNOWN:
                    itemsUnknown += [item]
                else:
                    if quality not in items:
                        items[quality] = [item]
                    else:
                        items[quality].append(item)

            itemList = list(itertools.chain(*[v for (k, v) in sorted(items.items(), reverse=True)]))
            itemList += itemsUnknown or []

        # filter results
        cl = []
        for item in itemList:
            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(title)
            except InvalidNameException:
                sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid episode")
                continue
            except InvalidShowException:
                sickrage.srCore.srLogger.debug("Unable to parse the filename " + title + " into a valid show")
                continue

            showObj = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version

            addCacheEntry = False
            if not (showObj.air_by_date or showObj.sports):
                if search_mode == 'sponly':
                    if len(parse_result.episode_numbers):
                        sickrage.srCore.srLogger.debug(
                            "This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it")
                        addCacheEntry = True
                    if len(parse_result.episode_numbers) and (
                                    parse_result.season_number not in set([ep.season for ep in episodes])
                            or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring")
                        addCacheEntry = True
                else:
                    if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
                                                                                                     episodes if
                                                                                                     ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring")
                        addCacheEntry = True
                    elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
                                                                    ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
                        sickrage.srCore.srLogger.debug(
                            "The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring")
                        addCacheEntry = True

                if not addCacheEntry:
                    # we just use the existing info for normal searches
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                if not parse_result.is_air_by_date:
                    sickrage.srCore.srLogger.debug(
                        "This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it")
                    addCacheEntry = True
                else:
                    airdate = parse_result.air_date.toordinal()
                    sql_results = main_db.MainDB().select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [showObj.indexerid, airdate])

                    if len(sql_results) != 1:
                        sickrage.srCore.srLogger.warning(
                            "Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it")
                        addCacheEntry = True

                if not addCacheEntry:
                    actual_season = int(sql_results[0]["season"])
                    actual_episodes = [int(sql_results[0]["episode"])]

            # add parsed result to cache for usage later on
            if addCacheEntry:
                sickrage.srCore.srLogger.debug("Adding item from search to cache: " + title)
                ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
                if ci is not None:
                    cl.append(ci)
                continue

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality):
                    wantEp = False
                    break

            if not wantEp:
                sickrage.srCore.srLogger.info("RESULT:[{}] QUALITY:[{}] IGNORED!".format(title, Quality.qualityStrings[quality]))
                continue

            sickrage.srCore.srLogger.debug("FOUND RESULT:[{}] URL:[{}]".format(title, url))

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(showObj.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            result.show = showObj
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(epObj) == 1:
                epNum = epObj[0].episode
                sickrage.srCore.srLogger.debug("Single episode result.")
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                sickrage.srCore.srLogger.debug(
                    "Separating multi-episode result to check for later - result contains episodes: " + str(
                        parse_result.episode_numbers))
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                sickrage.srCore.srLogger.debug("Separating full season result to check for later")

            if epNum not in results:
                results[epNum] = [result]
            else:
                results[epNum].append(result)

        # check if we have items to add to cache
        if len(cl) > 0:
            self.cache._getDB().mass_action(cl)
            del cl  # cleanup

        return results