Esempio n. 1
0
    def parse(self, name):

        name = self._unicodify(name)

        cached = name_parser_cache.get(name)
        if cached:
            return cached

        # break it into parts if there are any (dirname, file name, extension)
        dir_name, file_name = ek.ek(os.path.split, name)

        if self.is_file_name:
            base_file_name = helpers.remove_non_release_groups(helpers.remove_extension(file_name))
        else:
            base_file_name = file_name

        # use only the direct parent dir
        dir_name = ek.ek(os.path.basename, dir_name)

        # set up a result to use
        final_result = ParseResult(name)

        # try parsing the file name
        file_name_result = self._parse_string(base_file_name)

        # parse the dirname for extra info if needed
        dir_name_result = self._parse_string(dir_name)

        # build the ParseResult object
        final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date')

        if not final_result.air_date:
            final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number')
            final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers')

        final_result.is_proper = self._combine_results(file_name_result, dir_name_result, 'is_proper')

        # if the dirname has a release group/show name I believe it over the filename
        final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
        final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
        final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')

        final_result.which_regex = []
        if final_result == file_name_result:
            final_result.which_regex = file_name_result.which_regex
        elif final_result == dir_name_result:
            final_result.which_regex = dir_name_result.which_regex
        else:
            if file_name_result:
                final_result.which_regex += file_name_result.which_regex
            if dir_name_result:
                final_result.which_regex += dir_name_result.which_regex

        # if there's no useful info in it then raise an exception
        if final_result.season_number is None and not final_result.episode_numbers and final_result.air_date is None and not final_result.series_name:
            raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))

        name_parser_cache.add(name, final_result)
        # return it
        return final_result
Esempio n. 2
0
    def _finalize(self, parse_result):
        self.release_group = parse_result.release_group

        # remember whether it's a proper
        self.is_proper = parse_result.is_proper

        # if the result is complete then remember that for later
        if parse_result.series_name and parse_result.season_number is not None and parse_result.episode_numbers and parse_result.release_group:
            if not self.release_name:
                self.release_name = helpers.remove_extension(
                    ek.ek(os.path.basename, parse_result.original_name))

        else:
            logger.log(
                u"Parse result not sufficient (all following have to be set). will not save release name",
                logger.DEBUG)
            logger.log(
                u"Parse result(series_name): " + str(parse_result.series_name),
                logger.DEBUG)
            logger.log(
                u"Parse result(season_number): " +
                str(parse_result.season_number), logger.DEBUG)
            logger.log(
                u"Parse result(episode_numbers): " +
                str(parse_result.episode_numbers), logger.DEBUG)
            logger.log(
                u"Parse result(release_group): " +
                str(parse_result.release_group), logger.DEBUG)
Esempio n. 3
0
    def _finalize(self, parse_result):
        """
        Store parse result if it is complete and final

        :param parse_result: Result of parsers
        """
        self.release_group = parse_result.release_group

        # remember whether it's a proper
        if parse_result.extra_info:
            self.is_proper = re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None

        # if the result is complete then remember that for later
        # if the result is complete then set release name
        if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers)
                                         or parse_result.air_date) and parse_result.release_group:

            if not self.release_name:
                self.release_name = helpers.remove_non_release_groups(helpers.remove_extension(ek(os.path.basename, parse_result.original_name)))

        else:
            logger.log(u"Parse result not sufficient (all following have to be set). will not save release name",
                       logger.DEBUG)
            logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
            logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
            logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
            logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
            logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
Esempio n. 4
0
    def _finalize(self, parse_result):

        self.release_group = parse_result.release_group

        # remember whether it's a proper
        self.is_proper = parse_result.is_proper

        # if the result is complete then set release name
        if (
            parse_result.series_name
            and ((parse_result.season_number is not None and parse_result.episode_numbers) or parse_result.air_date)
            and parse_result.release_group
        ):

            if not self.release_name:
                self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))

        else:
            logger.log(
                u"Parse result not sufficient (all following have to be set). will not save release name", logger.DEBUG
            )
            logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
            logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
            logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
            logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
            logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
Esempio n. 5
0
    def _finalize(self, parse_result):
        """
        Store parse result if it is complete and final

        :param parse_result: Result of parsers
        """
        self.release_group = parse_result.release_group

        # remember whether it's a proper
        if parse_result.extra_info:
            self.is_proper = re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None

        # if the result is complete then remember that for later
        # if the result is complete then set release name
        if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers)
                                         or parse_result.air_date) and parse_result.release_group:

            if not self.release_name:
                self.release_name = helpers.remove_non_release_groups(helpers.remove_extension(ek(os.path.basename, parse_result.original_name)))

        else:
            logger.log(u"Parse result not sufficient (all following have to be set). will not save release name",
                       logger.DEBUG)
            logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
            logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
            logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
            logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
            logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
Esempio n. 6
0
    def _get_quality(self, ep_obj):
        """
        Determines the quality of the file that is being post processed by parsing through the data available.

        ep_obj: The TVEpisode object related to the file we are post processing

        Returns: A quality value found in common.Quality
        """

        ep_quality = common.Quality.UNKNOWN

        # nzb name is the most reliable if it exists, followed by folder name and lastly file name
        filename = self.file_name;
        orig_extension = filename.rpartition('.')[-1]
        filename = helpers.handle_reversed_names(helpers.remove_extension(filename)) + '.' + orig_extension
        name_list = [self.nzb_name, self.folder_name, filename]

        # search all possible names for our new quality, in case the file or dir doesn't have it
        for cur_name in name_list:

            # some stuff might be None at this point still
            if not cur_name:
                continue

            ep_quality = common.Quality.nameQuality(cur_name)
            self._log(u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)

            # if we find a good one then use it
            if ep_quality != common.Quality.UNKNOWN:
                logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
                return ep_quality

        # Try getting quality from the episode (snatched) status
        if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
            oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)  # @UnusedVariable
            if ep_quality != common.Quality.UNKNOWN:
                self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
                return ep_quality

        # Try guessing quality from the file name
        ep_quality = common.Quality.assumeQuality(filename)
        self._log(u"Guessing quality for name " + filename + u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
        if ep_quality != common.Quality.UNKNOWN:
            logger.log(filename + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
            return ep_quality

        return ep_quality
Esempio n. 7
0
    def _analyze_name(self, name, file=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.

        name: A string which we want to analyze to determine show info from (unicode)

        Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [], None, None)

        if not name:
            return to_return

        name = helpers.remove_non_release_groups(
            helpers.remove_extension(name))

        # parse the name to break it into show name, season, and episode
        np = NameParser(file,
                        tryIndexers=True,
                        trySceneExceptions=True,
                        convert=True)
        parse_result = np.parse(name)

        # show object
        show = parse_result.show

        if parse_result.is_air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (show, season, episodes, parse_result.quality, None)

        self._finalize(parse_result)
        return to_return
Esempio n. 8
0
    def _analyze_name(self, name, file=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.

        name: A string which we want to analyze to determine show info from (unicode)

        Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [], None, None)

        if not name:
            return to_return

        name = helpers.remove_non_release_groups(helpers.remove_extension(name))

        # parse the name to break it into show name, season, and episode
        np = NameParser(file, tryIndexers=True, convert=True)
        parse_result = np.parse(name)

        # show object
        show = parse_result.show

        if parse_result.is_air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        elif parse_result.is_sports:
            season = -1
            episodes = [parse_result.sports_air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (show, season, episodes, parse_result.quality, None)

        self._finalize(parse_result)
        return to_return
Esempio n. 9
0
    def _finalize(self, parse_result):

        self.release_group = parse_result.release_group

        # remember whether it's a proper
        if parse_result.extra_info:
            self.is_proper = None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I)

        # if the result is complete then set release name
        if parse_result.series_name and\
                ((None is not parse_result.season_number and parse_result.episode_numbers) or parse_result.air_date)\
                and parse_result.release_group:

            if not self.release_name:
                self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))

        else:
            logger.log(u'Parse result not sufficient (all following have to be set). will not save release name', logger.DEBUG)
            logger.log(u'Parse result(series_name): ' + str(parse_result.series_name), logger.DEBUG)
            logger.log(u'Parse result(season_number): ' + str(parse_result.season_number), logger.DEBUG)
            logger.log(u'Parse result(episode_numbers): ' + str(parse_result.episode_numbers), logger.DEBUG)
            logger.log(u' or Parse result(air_date): ' + str(parse_result.air_date), logger.DEBUG)
            logger.log(u'Parse result(release_group): ' + str(parse_result.release_group), logger.DEBUG)
Esempio n. 10
0
    def _analyze_name(self, name, resource=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.

        name: A string which we want to analyze to determine show info from (unicode)

        Returns a (indexer_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u'Analyzing name ' + repr(name))

        to_return = (None, None, [], None)

        if not name:
            return to_return

        name = helpers.remove_non_release_groups(helpers.remove_extension(name))

        # parse the name to break it into show name, season, and episode
        np = NameParser(resource, try_indexers=True, try_scene_exceptions=True, convert=True)
        parse_result = np.parse(name)
        self._log(u'Parsed %s<br />.. into %s' % (name, str(parse_result).decode('utf-8', 'xmlcharrefreplace')), logger.DEBUG)

        if parse_result.is_air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        # show object
        show = parse_result.show
        to_return = (show, season, episodes, parse_result.quality)

        self._finalize(parse_result)
        return to_return
Esempio n. 11
0
    def _analyze_name(self, name, file_name=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.

        name: A string which we want to analyze to determine show info from (unicode)

        Returns a (tvdb_id, season, [episodes], quality) tuple. tvdb_id, season, quality may be None and episodes may be [].
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [], None)

        if not name:
            return to_return

        name = helpers.remove_non_release_groups(helpers.remove_extension(name))

        # parse the name to break it into show name, season, and episode
        np = NameParser(False)
        parse_result = np.parse(name)
        self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG)

        if parse_result.air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (None, season, episodes, None)

        # do a scene reverse-lookup to get a list of all possible names
        name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name)

        if not name_list:
            return (None, season, episodes, None)

        # try finding name in DB
        for cur_name in name_list:
            self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
            db_result = helpers.searchDBForShow(cur_name)
            if db_result:
                self._log(u"Lookup successful, using tvdb id " + str(db_result[0]), logger.DEBUG)
                self._finalize(parse_result)
                return (int(db_result[0]), season, episodes, None)

        # try finding name in scene exceptions
        for cur_name in name_list:
            self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG)
            scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
            if scene_id:
                self._log(u"Scene exception lookup got tvdb id " + str(scene_id) + u", using that", logger.DEBUG)
                self._finalize(parse_result)
                return (scene_id, season, episodes, None)

        # try finding name on TVDB
        for cur_name in name_list:
            try:
                t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS)

                self._log(u"Looking up name " + cur_name + u" on TVDB", logger.DEBUG)
                showObj = t[cur_name]
            except (tvdb_exceptions.tvdb_exception):
                # if none found, search on all languages
                try:
                    # There's gotta be a better way of doing this but we don't wanna
                    # change the language value elsewhere
                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                    ltvdb_api_parms['search_all_languages'] = True
                    t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms)

                    self._log(u"Looking up name " + cur_name + u" in all languages on TVDB", logger.DEBUG)
                    showObj = t[cur_name]
                except (tvdb_exceptions.tvdb_exception, IOError):
                    pass

                continue
            except (IOError):
                continue

            self._log(u"Lookup successful, using tvdb id " + str(showObj["id"]), logger.DEBUG)
            self._finalize(parse_result)
            return (int(showObj["id"]), season, episodes, None)

        self._finalize(parse_result)
        return to_return
Esempio n. 12
0
    def parse(self, name, cache_result=True):
        name = self._unicodify(name)

        if self.naming_pattern:
            cache_result = False

        cached = name_parser_cache.get(name)
        if cached:
            return cached

        # break it into parts if there are any (dirname, file name, extension)
        dir_name, file_name = ek.ek(os.path.split, name)

        if self.file_name:
            base_file_name = helpers.remove_extension(file_name)
        else:
            base_file_name = file_name

        # set up a result to use
        final_result = ParseResult(name)

        # try parsing the file name
        file_name_result = self._parse_string(base_file_name)

        # use only the direct parent dir
        dir_name = ek.ek(os.path.basename, dir_name)

        # parse the dirname for extra info if needed
        dir_name_result = self._parse_string(dir_name)

        # build the ParseResult object
        final_result.air_date = self._combine_results(file_name_result,
                                                      dir_name_result,
                                                      'air_date')

        # anime absolute numbers
        final_result.ab_episode_numbers = self._combine_results(
            file_name_result, dir_name_result, 'ab_episode_numbers')

        # season and episode numbers
        final_result.season_number = self._combine_results(
            file_name_result, dir_name_result, 'season_number')
        final_result.episode_numbers = self._combine_results(
            file_name_result, dir_name_result, 'episode_numbers')

        # if the dirname has a release group/show name I believe it over the filename
        final_result.series_name = self._combine_results(
            dir_name_result, file_name_result, 'series_name')
        final_result.extra_info = self._combine_results(
            dir_name_result, file_name_result, 'extra_info')
        final_result.release_group = self._combine_results(
            dir_name_result, file_name_result, 'release_group')
        final_result.version = self._combine_results(dir_name_result,
                                                     file_name_result,
                                                     'version')

        final_result.which_regex = []
        if final_result == file_name_result:
            final_result.which_regex = file_name_result.which_regex
        elif final_result == dir_name_result:
            final_result.which_regex = dir_name_result.which_regex
        else:
            if file_name_result:
                final_result.which_regex += file_name_result.which_regex
            if dir_name_result:
                final_result.which_regex += dir_name_result.which_regex

        final_result.show = self._combine_results(file_name_result,
                                                  dir_name_result, 'show')
        final_result.quality = self._combine_results(file_name_result,
                                                     dir_name_result,
                                                     'quality')

        if not final_result.show:
            if self.testing:
                pass
            else:
                raise InvalidShowException(
                    'Unable to parse %s' %
                    name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))

        # if there's no useful info in it then raise an exception
        if None is final_result.season_number and not final_result.episode_numbers and None is final_result.air_date \
                and not final_result.ab_episode_numbers and not final_result.series_name:
            raise InvalidNameException(
                'Unable to parse %s' %
                name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))

        if cache_result:
            name_parser_cache.add(name, final_result)

        logger.log(
            u'Parsed %s into %s' %
            (name, str(final_result).decode('utf-8', 'xmlcharrefreplace')),
            logger.DEBUG)
        return final_result
Esempio n. 13
0
    def execute(self):
        backupDatabase(16)

        logger.log(u"Setting special episodes status to SKIPPED.")
        self.connection.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND season = 0", [common.SKIPPED, common.WANTED])

        fix_ep_rls_group = []
        fix_ep_release_name = []

        # re-analyze snatched data
        logger.log(u"Analyzing history to correct bad data (this could take a moment, be patient)...")
        history_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 2 ORDER BY date ASC")
        for cur_result in history_results:
            # find the associated download, if there isn't one then ignore it
            download_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 4 AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date > ?",
                                                    [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]])
            # only continue if there was a download found (thats newer than the snatched)
            if not download_results:
                logger.log(u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG)
                continue

            # take the snatched nzb, clean it up so we can store it for the corresponding tv_episodes entry
            clean_nzb_name = helpers.remove_non_release_groups(helpers.remove_extension(cur_result["resource"]))

            # fixed known bad release_group data
            if download_results[0]["provider"].upper() in ["-1", "RP", "NZBGEEK"] or "." in download_results[0]["provider"]:
                try:
                    np = NameParser(False)
                    parse_result = np.parse(clean_nzb_name)
                except InvalidNameException:
                    continue

                # leave off check for episode number so we can update season rip data as well?
                if parse_result.series_name and parse_result.season_number is not None and parse_result.release_group:
                    fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \
                               [parse_result.release_group, download_results[0]["action"], download_results[0]["showid"], download_results[0]["season"], download_results[0]["episode"], download_results[0]["quality"], download_results[0]["date"]]
                               ])

            # find the associated episode on disk
            ep_results = self.connection.select("SELECT episode_id, status, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
                                                [cur_result["showid"], cur_result["season"], cur_result["episode"]])
            if not ep_results:
                logger.log(u"The episode " + cur_result["resource"] + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG)
                continue

            # skip items that appears to have a 'scene' name already to avoid replacing locally pp/manually moved items
            match = re.search(".(xvid|x264|h.?264|mpeg-?2)", ep_results[0]["release_name"], re.I)
            if match:
                continue

            # get the status/quality of the existing ep and make sure it's what we expect
            ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]["status"]))
            if ep_status != common.DOWNLOADED:
                continue

            if ep_quality != int(cur_result["quality"]):
                continue

            # take the extension off the filename, it's not needed
            file_name = ek.ek(os.path.basename, download_results[0]["resource"])
            if '.' in file_name:
                file_name = file_name.rpartition('.')[0]

            # make sure this is actually a real release name and not a season pack or something
            for cur_name in (clean_nzb_name, file_name):
                logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG)
                try:
                    np = NameParser(False)
                    parse_result = np.parse(cur_name)
                except InvalidNameException:
                    continue

                if parse_result.series_name and parse_result.season_number is not None and parse_result.episode_numbers and parse_result.release_group:
                    # if all is well by this point we'll just put the release name into the database
                    fix_ep_release_name.append(["UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]]])
                    break

        logger.log(u"Corrected " + str(len(fix_ep_release_name)) + " release names (" + str(len(fix_ep_rls_group)) + " release groups) out of the " + str(len(history_results)) + " releases analyzed.")
        if len(fix_ep_rls_group) > 0:
            self.connection.mass_action(fix_ep_rls_group)
        if len(fix_ep_release_name) > 0:
            self.connection.mass_action(fix_ep_release_name)

        # now cleanup all downloaded release groups in the history
        fix_ep_rls_group = []
        logger.log(u"Analyzing downloaded history release groups...")
        history_results = self.connection.select("SELECT * FROM history WHERE action % 100 = 4 ORDER BY date ASC")
        for cur_result in history_results:
            clean_provider = helpers.remove_non_release_groups(helpers.remove_extension(cur_result["provider"]))
            # take the data on the left of the _, fixes 'LOL_repost'
            if clean_provider and "_" in clean_provider:
                clean_provider = clean_provider.rsplit('_', 1)[0]
            if clean_provider != cur_result["provider"]:
                fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \
                                         [clean_provider, cur_result["action"], cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]]
                                         ])
        logger.log(u"Corrected " + str(len(fix_ep_rls_group)) + " release groups.")
        if len(fix_ep_rls_group) > 0:
            self.connection.mass_action(fix_ep_rls_group)

        self.incDBVersion()

        # cleanup and reduce db if any previous data was removed
        logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
        self.connection.action("VACUUM")
Esempio n. 14
0
    def execute(self):
        backupDatabase(16)

        logger.log(u"Setting special episodes status to SKIPPED.")
        self.connection.action(
            "UPDATE tv_episodes SET status = ? WHERE status = ? AND season = 0",
            [common.SKIPPED, common.WANTED])

        fix_ep_rls_group = []
        fix_ep_release_name = []

        # re-analyze snatched data
        logger.log(
            u"Analyzing history to correct bad data (this could take a moment, be patient)..."
        )
        history_results = self.connection.select(
            "SELECT * FROM history WHERE action % 100 = 2 ORDER BY date ASC")
        for cur_result in history_results:
            # find the associated download, if there isn't one then ignore it
            download_results = self.connection.select(
                "SELECT * FROM history WHERE action % 100 = 4 AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date > ?",
                [
                    cur_result["showid"], cur_result["season"],
                    cur_result["episode"], cur_result["quality"],
                    cur_result["date"]
                ])
            # only continue if there was a download found (thats newer than the snatched)
            if not download_results:
                logger.log(
                    u"Found a snatch in the history for " +
                    cur_result["resource"] +
                    " but couldn't find the associated download, skipping it",
                    logger.DEBUG)
                continue

            # take the snatched nzb, clean it up so we can store it for the corresponding tv_episodes entry
            clean_nzb_name = helpers.remove_non_release_groups(
                helpers.remove_extension(cur_result["resource"]))

            # fixed known bad release_group data
            if download_results[0]["provider"].upper() in [
                    "-1", "RP", "NZBGEEK"
            ] or "." in download_results[0]["provider"]:
                try:
                    np = NameParser(False)
                    parse_result = np.parse(clean_nzb_name)
                except InvalidNameException:
                    continue

                # leave off check for episode number so we can update season rip data as well?
                if parse_result.series_name and parse_result.season_number is not None and parse_result.release_group:
                    fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \
                               [parse_result.release_group, download_results[0]["action"], download_results[0]["showid"], download_results[0]["season"], download_results[0]["episode"], download_results[0]["quality"], download_results[0]["date"]]
                               ])

            # find the associated episode on disk
            ep_results = self.connection.select(
                "SELECT episode_id, status, release_name FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
                [
                    cur_result["showid"], cur_result["season"],
                    cur_result["episode"]
                ])
            if not ep_results:
                logger.log(
                    u"The episode " + cur_result["resource"] +
                    " was found in history but doesn't exist on disk anymore, skipping",
                    logger.DEBUG)
                continue

            # skip items that appears to have a 'scene' name already to avoid replacing locally pp/manually moved items
            match = re.search(".(xvid|x264|h.?264|mpeg-?2)",
                              ep_results[0]["release_name"], re.I)
            if match:
                continue

            # get the status/quality of the existing ep and make sure it's what we expect
            ep_status, ep_quality = common.Quality.splitCompositeStatus(
                int(ep_results[0]["status"]))
            if ep_status != common.DOWNLOADED:
                continue

            if ep_quality != int(cur_result["quality"]):
                continue

            # take the extension off the filename, it's not needed
            file_name = ek.ek(os.path.basename,
                              download_results[0]["resource"])
            if '.' in file_name:
                file_name = file_name.rpartition('.')[0]

            # make sure this is actually a real release name and not a season pack or something
            for cur_name in (clean_nzb_name, file_name):
                logger.log(
                    u"Checking if " + cur_name +
                    " is actually a good release name", logger.DEBUG)
                try:
                    np = NameParser(False)
                    parse_result = np.parse(cur_name)
                except InvalidNameException:
                    continue

                if parse_result.series_name and parse_result.season_number is not None and parse_result.episode_numbers and parse_result.release_group:
                    # if all is well by this point we'll just put the release name into the database
                    fix_ep_release_name.append([
                        "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
                        [cur_name, ep_results[0]["episode_id"]]
                    ])
                    break

        logger.log(u"Corrected " + str(len(fix_ep_release_name)) +
                   " release names (" + str(len(fix_ep_rls_group)) +
                   " release groups) out of the " + str(len(history_results)) +
                   " releases analyzed.")
        if len(fix_ep_rls_group) > 0:
            self.connection.mass_action(fix_ep_rls_group)
        if len(fix_ep_release_name) > 0:
            self.connection.mass_action(fix_ep_release_name)

        # now cleanup all downloaded release groups in the history
        fix_ep_rls_group = []
        logger.log(u"Analyzing downloaded history release groups...")
        history_results = self.connection.select(
            "SELECT * FROM history WHERE action % 100 = 4 ORDER BY date ASC")
        for cur_result in history_results:
            clean_provider = helpers.remove_non_release_groups(
                helpers.remove_extension(cur_result["provider"]))
            # take the data on the left of the _, fixes 'LOL_repost'
            if clean_provider and "_" in clean_provider:
                clean_provider = clean_provider.rsplit('_', 1)[0]
            if clean_provider != cur_result["provider"]:
                fix_ep_rls_group.append(["UPDATE history SET provider = ? WHERE action = ? AND showid = ? AND season = ? AND episode = ? AND quality = ? AND date = ?", \
                                         [clean_provider, cur_result["action"], cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["quality"], cur_result["date"]]
                                         ])
        logger.log(u"Corrected " + str(len(fix_ep_rls_group)) +
                   " release groups.")
        if len(fix_ep_rls_group) > 0:
            self.connection.mass_action(fix_ep_rls_group)

        self.incDBVersion()

        # cleanup and reduce db if any previous data was removed
        logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
        self.connection.action("VACUUM")
Esempio n. 15
0
    def parse(self, name, cache_result=True):
        if self.naming_pattern:
            cache_result = False

        cached = name_parser_cache.get(name)
        if cached:
            return cached

        # break it into parts if there are any (dirname, file name, extension)
        dir_name, file_name = ek(os.path.split, name)

        if self.file_name:
            base_file_name = helpers.remove_extension(file_name)
        else:
            base_file_name = file_name

        # set up a result to use
        final_result = ParseResult(name)

        # try parsing the file name
        file_name_result = self._parse_string(base_file_name)

        # use only the direct parent dir
        dir_name = ek(os.path.basename, dir_name)

        # parse the dirname for extra info if needed
        dir_name_result = self._parse_string(dir_name)

        # build the ParseResult object
        final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date')

        # anime absolute numbers
        final_result.ab_episode_numbers = self._combine_results(file_name_result, dir_name_result, 'ab_episode_numbers')

        # season and episode numbers
        final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number')
        final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers')

        # if the dirname has a release group/show name I believe it over the filename
        final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
        final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
        final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')
        final_result.version = self._combine_results(dir_name_result, file_name_result, 'version')

        final_result.which_regex = []
        if final_result == file_name_result:
            final_result.which_regex = file_name_result.which_regex
        elif final_result == dir_name_result:
            final_result.which_regex = dir_name_result.which_regex
        else:
            if file_name_result:
                final_result.which_regex += file_name_result.which_regex
            if dir_name_result:
                final_result.which_regex += dir_name_result.which_regex

        final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
        final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')

        if not final_result.show:
            raise InvalidShowException(
                "Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))

        # if there's no useful info in it then raise an exception
        if final_result.season_number is None and not final_result.episode_numbers and final_result.air_date is None and not final_result.ab_episode_numbers and not final_result.series_name:
            raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))

        if cache_result:
            name_parser_cache.add(name, final_result)

        logger.log("Parsed " + name + " into " + ss(final_result), logger.DEBUG)
        return final_result
Esempio n. 16
0
    def parse(self, name, cache_result=True):
        if self.naming_pattern:
            cache_result = False

        cached = name_parser_cache.get(name)
        if cached:
            return cached

        # break it into parts if there are any (dirname, file name, extension)
        dir_name, file_name = ek(os.path.split, name)

        if self.file_name:
            base_file_name = helpers.remove_extension(file_name)
        else:
            base_file_name = file_name

        # set up a result to use
        final_result = ParseResult(name)

        # try parsing the file name
        file_name_result = self._parse_string(base_file_name)

        # use only the direct parent dir
        dir_name = ek(os.path.basename, dir_name)

        # parse the dirname for extra info if needed
        dir_name_result = self._parse_string(dir_name)

        # build the ParseResult object
        final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date')

        # anime absolute numbers
        final_result.ab_episode_numbers = self._combine_results(file_name_result, dir_name_result, 'ab_episode_numbers')

        # season and episode numbers
        final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number')
        final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers')

        # if the dirname has a release group/show name I believe it over the filename
        final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
        final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
        final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')
        final_result.version = self._combine_results(dir_name_result, file_name_result, 'version')

        final_result.which_regex = []
        if final_result == file_name_result:
            final_result.which_regex = file_name_result.which_regex
        elif final_result == dir_name_result:
            final_result.which_regex = dir_name_result.which_regex
        else:
            if file_name_result:
                final_result.which_regex += file_name_result.which_regex
            if dir_name_result:
                final_result.which_regex += dir_name_result.which_regex

        final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
        final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')

        if not final_result.show:
            raise InvalidShowException("Unable to parse {}".format(name))

        # if there's no useful info in it then raise an exception
        if final_result.season_number is None and not final_result.episode_numbers and final_result.air_date is None and not final_result.ab_episode_numbers and not final_result.series_name:
            raise InvalidNameException("Unable to parse {}".format(name))

        if cache_result:
            name_parser_cache.add(name, final_result)

        logging.debug("Parsed " + name + " into " + ss(final_result))
        return final_result