def getSearchResults(self, show, season, ep_objs, seasonSearch=False, manualSearch=False):

        self._checkAuth()
        self.show = show

        itemList = []
        results = {}

        useDate = False
        if self.show.air_by_date or self.show.sports:
            useDate = True

        for ep_obj in ep_objs:
            logger.log(u'Searching "%s" for "%s" as "%s"' % (self.name, ep_obj.prettyName(), ep_obj.scene_prettyName()))

            if seasonSearch:
                for curString in self._get_season_search_strings(ep_obj):
                    itemList += self._doSearch(curString)
            else:
                for curString in self._get_episode_search_strings(ep_obj):
                    itemList += self._doSearch(curString)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            quality = self.getQuality(item)

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(title).convert()
            except InvalidNameException:
                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
                continue

            if not useDate:
                # this check is meaningless for non-season searches
                if (parse_result.season_number is not None and parse_result.season_number != season) or (
                                parse_result.season_number is None and season != 1):
                    logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(
                        season) + ", ignoring", logger.DEBUG)
                    continue

                if manualSearch and (
                        parse_result.season_number != season or ep_objs[0].episode not in parse_result.episode_numbers):
                    logger.log(u"Episode " + title + " isn't " + str(season) + "x" + str(
                        ep_objs[0].episode) + ", skipping it", logger.DEBUG)
                    continue

                # we just use the existing info for normal searches
                actual_season = season if manualSearch else parse_result.season_number
                actual_episodes = [ep_objs[0].episode] if manualSearch else parse_result.episode_numbers
            else:
                if not (parse_result.air_by_date or parse_result.sports):
                    logger.log(
                        u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
                        logger.DEBUG)
                    continue

                if manualSearch and ((parse_result.air_date != ep_objs[0].airdate and parse_result.air_by_date) or (
                                parse_result.sports_event_date != ep_objs[0].airdate and parse_result.sports)):
                    logger.log(u"Episode " + title + " didn't air on " + str(ep_objs[0].airdate) + ", skipping it",
                               logger.DEBUG)
                    continue

                if not manualSearch:
                    myDB = db.DBConnection()
                    if parse_result.air_by_date:
                        sql_results = myDB.select(
                            "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                            [self.show.indexerid, parse_result.air_date.toordinal()])
                    elif parse_result.sports:
                        sql_results = myDB.select(
                            "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                            [self.show.indexerid, parse_result.sports_event_date.toordinal()])

                    if len(sql_results) != 1:
                        logger.log(
                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
                            logger.WARNING)
                        continue

                actual_season = season if manualSearch else int(sql_results[0]["season"])
                actual_episodes = [ep_objs[0].episode] if manualSearch else [int(sql_results[0]["episode"])]


            # make sure we want the episode
            epObj = None
            wantEp = True
            for epNo in actual_episodes:
                epObj = self.show.getEpisode(actual_season, epNo)
                if not epObj or not self.show.wantEpisode(epObj.season, epObj.episode, quality,manualSearch=manualSearch):
                    wantEp = False
                    break

            if not epObj:
                logger.log(u"Ignoring result " + title + " because episode scene info is invalid.")
                continue

            if not wantEp:
                logger.log(
                    u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
                        quality], logger.DEBUG)
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            # make a result object
            epObjs = [epObj]

            result = self.getResult(epObjs)
            result.url = url
            result.name = title
            result.quality = quality
            result.provider = self
            result.content = None

            if len(epObjs) == 1:
                epNum = epObjs[0].episode
            elif len(epObjs) > 1:
                epNum = MULTI_EP_RESULT
                logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
                    parse_result.episode_numbers), logger.DEBUG)
            elif len(epObjs) == 0:
                epNum = SEASON_RESULT
                result.extraInfo = [self.show]
                logger.log(u"Separating full season result to check for later", logger.DEBUG)

            if epNum in results:
                results[epNum].append(result)
            else:
                results = {epNum: [result]}

        return results
Esempio n. 2
0
def validateDir(path, dirName, nzbNameOriginal, failed):
    global process_result, returnStr

    returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    if ek.ek(os.path.basename, dirName).startswith('_FAILED_'):
        returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
        failed = True
    elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
        returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.",
                               logger.DEBUG)
        failed = True
    elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'):
        returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.",
                               logger.DEBUG)
        return False

    if failed:
        process_failed(os.path.join(path, dirName), nzbNameOriginal)
        return False

    if helpers.is_hidden_folder(dirName):
        returnStr += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG)
        return False

    # make sure the dir isn't inside a show dir
    myDB = db.DBConnection()
    sqlResults = myDB.select("SELECT * FROM tv_shows")

    for sqlShow in sqlResults:
        if dirName.lower().startswith(
                        ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
                os.path.realpath, sqlShow["location"]).lower():
            returnStr += logHelper(
                u"You're trying to post process an episode that's already been moved to its show dir, skipping",
                logger.ERROR)
            return False

    # Get the videofile list for the next checks
    allFiles = []
    allDirs = []
    for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dirName), topdown=False):
        allDirs += processDir
        allFiles += fileList

    videoFiles = filter(helpers.isMediaFile, allFiles)
    allDirs.append(dirName)

    #check if the dir have at least one tv video file
    for video in videoFiles:
        try:
            NameParser().parse(video, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    for dir in allDirs:
        try:
            NameParser().parse(dir, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    if sickbeard.UNPACK:
        #Search for packed release
        packedFiles = filter(helpers.isRarFile, allFiles)

        for packed in packedFiles:
            try:
                NameParser().parse(packed, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException):
                pass

    return False
Esempio n. 3
0
def validate_dir(process_path, release_name, failed, result):  # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
    """
    Check if directory is valid for processing

    :param process_path: Directory to check
    :param release_name: Original NZB/Torrent name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    result.output += log_helper("Processing folder " + process_path,
                                logger.DEBUG)

    upper_name = ek(os.path.basename, process_path).upper()
    if upper_name.startswith('_FAILED_') or upper_name.endswith('_FAILED_'):
        result.output += log_helper(
            "The directory name indicates it failed to extract.", logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNDERSIZED_') or upper_name.endswith(
            '_UNDERSIZED_'):
        result.output += log_helper(
            "The directory name indicates that it was previously rejected for being undersized.",
            logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNPACK') or upper_name.endswith('_UNPACK'):
        result.output += log_helper(
            "The directory name indicates that this release is in the process of being unpacked.",
            logger.DEBUG)
        result.missed_files.append("{0} : Being unpacked".format(process_path))
        return False

    if failed:
        process_failed(process_path, release_name, result)
        result.missed_files.append(
            "{0} : Failed download".format(process_path))
        return False

    if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(
            process_path) != helpers.real_path(
                sickbeard.TV_DOWNLOAD_DIR) and helpers.is_hidden_folder(
                    process_path):
        result.output += log_helper(
            "Ignoring hidden folder: {0}".format(process_path), logger.DEBUG)
        result.missed_files.append("{0} : Hidden folder".format(process_path))
        return False

    # make sure the dir isn't inside a show dir
    main_db_con = db.DBConnection()
    sql_results = main_db_con.select("SELECT location FROM tv_shows")

    for sqlShow in sql_results:
        if process_path.lower().startswith(ek(os.path.realpath, sqlShow[b"location"]).lower() + os.sep) or \
                process_path.lower() == ek(os.path.realpath, sqlShow[b"location"]).lower():

            result.output += log_helper(
                "Cannot process an episode that's already been moved to its show dir, skipping "
                + process_path, logger.WARNING)
            return False

    for current_directory, directory_names, file_names in ek(
            os.walk,
            process_path,
            topdown=False,
            followlinks=sickbeard.PROCESSOR_FOLLOW_SYMLINKS):
        sync_files = filter(is_sync_file, file_names)
        if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
            result.output += log_helper(
                "Found temporary sync files: {0} in path: {1}".format(
                    sync_files, ek(os.path.join, process_path, sync_files[0])))
            result.output += log_helper(
                "Skipping post processing for folder: {0}".format(
                    process_path))
            result.missed_files.append("{0} : Sync files found".format(
                ek(os.path.join, process_path, sync_files[0])))
            continue

        found_files = filter(helpers.is_media_file, file_names)
        if sickbeard.UNPACK == 1:
            found_files += filter(helpers.is_rar_file, file_names)

        if current_directory != sickbeard.TV_DOWNLOAD_DIR and found_files:
            found_files.append(ek(os.path.basename, current_directory))

        for found_file in found_files:
            try:
                NameParser().parse(found_file, cache_result=False)
            except (InvalidNameException, InvalidShowException) as e:
                pass
            else:
                return True

    result.output += log_helper(
        "{0} : No processable items found in folder".format(process_path),
        logger.DEBUG)
    return False
Esempio n. 4
0
    def _find_season_quality(self, title, torrent_id, ep_number):
        """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """

        mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
                           'vob', 'dvr-ms', 'wtv', 'ts'
                                                   'ogv', 'rar', 'zip', 'mp4']

        quality = Quality.UNKNOWN

        fileName = None

        fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id))

        if self.proxy and self.proxy.isEnabled():
            self.headers.update({'referer': self.proxy.getProxyURL()})

        data = self.getURL(fileURL)
        if not data:
            return None

        filesList = re.findall('<td.+>(.*?)</td>', data)

        if not filesList:
            logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR)

        videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList)

        #Filtering SingleEpisode/MultiSeason Torrent
        if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1):
            logger.log(
                u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(
                    len(videoFiles)), logger.DEBUG)
            logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...",
                       logger.DEBUG)
            return None

        if Quality.sceneQuality(title) != Quality.UNKNOWN:
            return title

        for fileName in videoFiles:
            quality = Quality.sceneQuality(os.path.basename(fileName))
            if quality != Quality.UNKNOWN: break

        if fileName is not None and quality == Quality.UNKNOWN:
            quality = Quality.assumeQuality(os.path.basename(fileName))

        if quality == Quality.UNKNOWN:
            logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG)
            return None

        try:
            myParser = NameParser(showObj=self.show)
            parse_result = myParser.parse(fileName)
        except (InvalidNameException, InvalidShowException):
            return None

        logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG)

        if parse_result.series_name and parse_result.season_number:
            title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality(
                quality)

        return title
Esempio n. 5
0
    def findSearchResults(self,
                          show,
                          episodes,
                          search_mode,
                          manualSearch=False):

        self._checkAuth()
        self.show = show

        results = {}
        itemList = []

        searched_scene_season = None
        for epObj in episodes:
            # search cache for episode result
            cacheResult = self.cache.searchCache(epObj, manualSearch)
            if cacheResult:
                if epObj.episode not in results:
                    results[epObj.episode] = cacheResult
                else:
                    results[epObj.episode].extend(cacheResult)

                # found result, search next episode
                continue

            # skip if season already searched
            if len(episodes
                   ) > 1 and searched_scene_season == epObj.scene_season:
                continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = epObj.scene_season

            if len(episodes) > 1:
                # get season search results
                for curString in self._get_season_search_strings(epObj):
                    itemList += self._doSearch(curString, search_mode,
                                               len(episodes))
            else:
                # get single episode search results
                for curString in self._get_episode_search_strings(epObj):
                    itemList += self._doSearch(curString, 'eponly',
                                               len(episodes))

        # if we found what we needed already from cache then return results and exit
        if len(results) == len(episodes):
            return results

        # sort list by quality
        if len(itemList):
            items = {}
            itemsUnknown = []
            for item in itemList:
                quality = self.getQuality(item, anime=show.is_anime)
                if quality == Quality.UNKNOWN:
                    itemsUnknown += [item]
                else:
                    if quality not in items:
                        items[quality] = [item]
                    else:
                        items[quality].append(item)

            itemList = list(
                itertools.chain(
                    *[v for (k, v) in sorted(items.items(), reverse=True)]))
            itemList += itemsUnknown if itemsUnknown else []

        # filter results
        cl = []
        for item in itemList:
            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                myParser = NameParser(False, convert=True)
                parse_result = myParser.parse(title)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid episode", logger.DEBUG)
                continue
            except InvalidShowException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid show", logger.DEBUG)
                continue

            showObj = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version

            addCacheEntry = False
            if not (showObj.air_by_date or showObj.sports):
                if search_mode == 'sponly':
                    if len(parse_result.episode_numbers):
                        logger.log(
                            u"This is supposed to be a season pack search but the result "
                            + title +
                            " is not a valid season pack, skipping it",
                            logger.DEBUG)
                        addCacheEntry = True
                    if len(parse_result.episode_numbers) and (
                            parse_result.season_number not in set(
                                [ep.season for ep in episodes]) or not [
                                    ep for ep in episodes if ep.scene_episode
                                    in parse_result.episode_numbers
                                ]):
                        logger.log(
                            u"The result " + title +
                            " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
                            logger.DEBUG)
                        addCacheEntry = True
                else:
                    if not len(
                            parse_result.episode_numbers
                    ) and parse_result.season_number and not [
                            ep for ep in episodes
                            if ep.season == parse_result.season_number
                            and ep.episode in parse_result.episode_numbers
                    ]:
                        logger.log(
                            u"The result " + title +
                            " doesn't seem to be a valid season that we are trying to snatch, ignoring",
                            logger.DEBUG)
                        addCacheEntry = True
                    elif len(parse_result.episode_numbers) and not [
                            ep for ep in episodes
                            if ep.season == parse_result.season_number
                            and ep.episode in parse_result.episode_numbers
                    ]:
                        logger.log(
                            u"The result " + title +
                            " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
                            logger.DEBUG)
                        addCacheEntry = True

                if not addCacheEntry:
                    # we just use the existing info for normal searches
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                if not (parse_result.is_air_by_date):
                    logger.log(
                        u"This is supposed to be a date search but the result "
                        + title + " didn't parse as one, skipping it",
                        logger.DEBUG)
                    addCacheEntry = True
                else:
                    airdate = parse_result.air_date.toordinal()
                    myDB = db.DBConnection()
                    sql_results = myDB.select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [showObj.indexerid, airdate])

                    if len(sql_results) != 1:
                        logger.log(
                            u"Tried to look up the date for the episode " +
                            title +
                            " but the database didn't give proper results, skipping it",
                            logger.WARNING)
                        addCacheEntry = True

                if not addCacheEntry:
                    actual_season = int(sql_results[0]["season"])
                    actual_episodes = [int(sql_results[0]["episode"])]

            # add parsed result to cache for usage later on
            if addCacheEntry:
                logger.log(u"Adding item from search to cache: " + title,
                           logger.DEBUG)
                ci = self.cache._addCacheEntry(title,
                                               url,
                                               parse_result=parse_result)
                if ci is not None:
                    cl.append(ci)
                continue

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not showObj.wantEpisode(actual_season, epNo, quality,
                                           manualSearch):
                    wantEp = False
                    break

            if not wantEp:
                logger.log(
                    u"Ignoring result " + title +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[quality], logger.DEBUG)

                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(showObj.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            result.show = showObj
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.content = None
            result.version = version

            if len(epObj) == 1:
                epNum = epObj[0].episode
                logger.log(u"Single episode result.", logger.DEBUG)
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                logger.log(
                    u"Separating multi-episode result to check for later - result contains episodes: "
                    + str(parse_result.episode_numbers), logger.DEBUG)
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                logger.log(u"Separating full season result to check for later",
                           logger.DEBUG)

            if epNum not in results:
                results[epNum] = [result]
            else:
                results[epNum].append(result)

        # check if we have items to add to cache
        if len(cl) > 0:
            myDB = self.cache._getDB()
            myDB.mass_action(cl)

        return results
Esempio n. 6
0
    def findSeasonResults(self, show, season):

        itemList = []
        results = {}

        for curString in self._get_season_search_strings(show, season):
            itemList += self._doSearch(curString, show=show, season=season)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            quality = self.getQuality(item)

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(title)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid episode", logger.WARNING)
                continue

            language = self._get_language(title, item)

            if not show.air_by_date:
                # this check is meaningless for non-season searches
                if (parse_result.season_number != None
                        and parse_result.season_number != season
                    ) or (parse_result.season_number == None and season != 1):
                    logger.log(
                        u"The result " + title +
                        " doesn't seem to be a valid episode for season " +
                        str(season) + ", ignoring")
                    continue

                # we just use the existing info for normal searches
                actual_season = season
                actual_episodes = parse_result.episode_numbers

            else:
                if not parse_result.air_by_date:
                    logger.log(
                        u"This is supposed to be an air-by-date search but the result "
                        + title + " didn't parse as one, skipping it",
                        logger.DEBUG)
                    continue

                myDB = db.DBConnection()
                sql_results = myDB.select(
                    "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                    [show.tvdbid,
                     parse_result.air_date.toordinal()])

                if len(sql_results) != 1:
                    logger.log(
                        u"Tried to look up the date for the episode " + title +
                        " but the database didn't give proper results, skipping it",
                        logger.WARNING)
                    continue

                actual_season = int(sql_results[0]["season"])
                actual_episodes = [int(sql_results[0]["episode"])]

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not show.wantEpisode(actual_season, epNo, quality):
                    wantEp = False
                    break

            if not wantEp:
                logger.log(
                    u"Ignoring result " + title +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[quality], logger.DEBUG)
                continue

            if not language == show.audio_lang:
                logger.log(u"Ignoring result " + title +
                           " because the language: " +
                           showLanguages[parse_result.audio_langs] +
                           " does not match the desired language: " +
                           showLanguages[show.audio_lang])
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(show.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            if hasattr(item, 'getNZB'):
                result.extraInfo = [item.getNZB()]
            elif hasattr(item, 'extraInfo'):
                result.extraInfo = item.extraInfo
            result.url = url
            result.name = title
            result.quality = quality

            if hasattr(item, 'audio_langs'):
                result.audio_lang = ''.join(item.audio_langs)

            else:
                result.audio_lang = language

            if len(epObj) == 1:
                epNum = epObj[0].episode
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                logger.log(
                    u"Separating multi-episode result to check for later - result contains episodes: "
                    + str(parse_result.episode_numbers), logger.DEBUG)
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                if result.extraInfo:
                    result.extraInfo.append(show)
                else:
                    result.extraInfo = [show]
                logger.log(u"Separating full season result to check for later",
                           logger.DEBUG)

            if epNum in results:
                results[epNum].append(result)
            else:
                results[epNum] = [result]

        return results
Esempio n. 7
0
    def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False):

        self._checkAuth()
        self.show = show

        results = {}
        searchItems = {}

        searched_scene_season = None
        for epObj in episodes:
            itemList = []

            if search_mode == 'sponly' and searched_scene_season:
                if searched_scene_season == epObj.scene_season:
                    continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = epObj.scene_season

            if search_mode == 'sponly':
                for curString in self._get_season_search_strings(epObj):
                    itemList += self._doSearch(curString, len(episodes))
            else:
                cacheResult = self.cache.searchCache([epObj], manualSearch)
                if len(cacheResult):
                    results.update({epObj.episode: cacheResult[epObj]})
                    continue

                for curString in self._get_episode_search_strings(epObj):
                    itemList += self._doSearch(curString, len(episodes))

            # next episode if no search results
            if not len(itemList):
                continue

            # remove duplicate items
            searchItems[epObj] = itemList

        #if we have cached results return them.
        if len(results):
            return results

        for ep_obj in searchItems:
            for item in searchItems[ep_obj]:

                (title, url) = self._get_title_and_url(item)

                # parse the file name
                try:
                    myParser = NameParser(False, showObj=show, epObj=ep_obj, convert=True)
                    parse_result = myParser.parse(title)
                except InvalidNameException:
                    logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
                    continue

                quality = self.getQuality(item, parse_result.is_anime)

                if not (self.show.air_by_date or self.show.sports):
                    if search_mode == 'sponly' and len(parse_result.episode_numbers):
                        logger.log(
                            u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
                            logger.DEBUG)
                        continue

                    if not len(parse_result.episode_numbers) and (
                                    parse_result.season_number != None and parse_result.season_number != ep_obj.season) or (
                                    parse_result.season_number == None and ep_obj.season != 1):
                        logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
                            ep_obj.season) + ", ignoring", logger.DEBUG)
                        continue
                    elif len(parse_result.episode_numbers) and (
                                    parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
                        logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
                            ep_obj.episode) + ", skipping it", logger.DEBUG)
                        continue

                    # we just use the existing info for normal searches
                    actual_season = ep_obj.season
                    actual_episodes = parse_result.episode_numbers
                else:
                    if not (parse_result.air_by_date or parse_result.sports):
                        logger.log(
                            u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
                            logger.DEBUG)
                        continue

                    if (parse_result.air_by_date and parse_result.air_date != ep_obj.airdate) or (
                                parse_result.sports and parse_result.sports_event_date != ep_obj.airdate):
                        logger.log("Episode " + title + " didn't air on " + str(ep_obj.airdate) + ", skipping it",
                                   logger.DEBUG)
                        continue

                    myDB = db.DBConnection()
                    sql_results = myDB.select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [show.indexerid,
                         parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])

                    if len(sql_results) != 1:
                        logger.log(
                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
                            logger.WARNING)
                        continue

                    actual_season = int(sql_results[0]["season"])
                    actual_episodes = [int(sql_results[0]["episode"])]

                # make sure we want the episode
                wantEp = True
                for epNo in actual_episodes:
                    if not show.wantEpisode(actual_season, epNo, quality, manualSearch):
                        wantEp = False
                        break

                if not wantEp:
                    logger.log(
                        u"Ignoring result " + title + " because we don't want an episode that is " +
                        Quality.qualityStrings[
                            quality], logger.DEBUG)

                    continue

                logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

                # make a result object
                epObj = []
                for curEp in actual_episodes:
                    epObj.append(show.getEpisode(actual_season, curEp))

                result = self.getResult(epObj)
                result.url = url
                result.name = title
                result.quality = quality
                result.provider = self
                result.content = None

                if len(epObj) == 1:
                    epNum = epObj[0].episode
                    logger.log(u"Single episode result.", logger.DEBUG)
                elif len(epObj) > 1:
                    epNum = MULTI_EP_RESULT
                    logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
                        parse_result.episode_numbers), logger.DEBUG)
                elif len(epObj) == 0:
                    epNum = SEASON_RESULT
                    logger.log(u"Separating full season result to check for later", logger.DEBUG)

                if not result:
                    continue

                if epNum in results:
                    results[epNum].append(result)
                else:
                    results[epNum] = [result]

        return results
Esempio n. 8
0
def validate_name(
        pattern,
        multi=None,
        anime_type=None,  # pylint: disable=too-many-arguments, too-many-return-statements
        file_only=False,
        abd=False,
        sports=False):
    """
    See if we understand a name

    :param pattern: Name to analyse
    :param multi: Is this a multi-episode name
    :param anime_type: Is this anime
    :param file_only: Is this just a file or a dir
    :param abd: Is air-by-date enabled
    :param sports: Is this sports
    :return: True if valid name, False if not
    """
    ep = generate_sample_ep(multi, abd, sports, anime_type)

    new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext'
    new_path = ep.formatted_dir(pattern, multi, anime_type)
    if not file_only:
        new_name = ek(os.path.join, new_path, new_name)

    if not new_name:
        logger.log("Unable to create a name out of " + pattern, logger.DEBUG)
        return False

    logger.log("Trying to parse " + new_name, logger.DEBUG)

    try:
        result = NameParser(True, showObj=ep.show,
                            naming_pattern=True).parse(new_name)
    except (InvalidNameException, InvalidShowException) as error:
        logger.log("{0}".format(error), logger.DEBUG)
        return False

    logger.log("The name " + new_name + " parsed into " + str(result),
               logger.DEBUG)

    if abd or sports:
        if result.air_date != ep.airdate:
            logger.log(
                "Air date incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    elif anime_type != 3:
        if len(result.ab_episode_numbers) and result.ab_episode_numbers != [
                x.absolute_number for x in [ep] + ep.relatedEps
        ]:
            logger.log(
                "Absolute numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    else:
        if result.season_number != ep.season:
            logger.log(
                "Season number incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
            logger.log(
                "Episode numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False

    return True
Esempio n. 9
0
def filter_bad_releases(name, parse=True, show=None):
    """
    Filters out non-english and just all-around stupid releases by comparing them
    to the resultFilters contents.

    name: the release name to check

    Returns: True if the release name is OK, False if it's bad.
    """

    try:
        if parse:
            NameParser().parse(name)
    except InvalidNameException as error:
        logger.log(u"{0}".format(error), logger.DEBUG)
        return False
    except InvalidShowException:
        pass
    # except InvalidShowException as error:
    #    logger.log(u"{0}".format(error), logger.DEBUG)
    #    return False

    # if any of the bad strings are in the name then say no
    ignore_words = list(resultFilters)

    if show and show.rls_ignore_words:
        ignore_words.extend(show.rls_ignore_words.split(','))
    elif sickbeard.IGNORE_WORDS:
        ignore_words.extend(sickbeard.IGNORE_WORDS.split(','))

    if show and show.rls_require_words:
        ignore_words = list(
            set(ignore_words).difference(
                x.strip() for x in show.rls_require_words.split(',')
                if x.strip()))
    elif sickbeard.REQUIRE_WORDS and not (
            show and show.rls_ignore_words
    ):  # Only remove global require words from the list if we arent using show ignore words
        ignore_words = list(
            set(ignore_words).difference(
                x.strip() for x in sickbeard.REQUIRE_WORDS.split(',')
                if x.strip()))

    word = containsAtLeastOneWord(name, ignore_words)
    if word:
        logger.log(u"Release: " + name + " contains " + word + ", ignoring it",
                   logger.INFO)
        return False

    # if any of the good strings aren't in the name then say no

    require_words = []
    if show and show.rls_require_words:
        require_words.extend(show.rls_require_words.split(','))
    elif sickbeard.REQUIRE_WORDS:
        require_words.extend(sickbeard.REQUIRE_WORDS.split(','))

    if show and show.rls_ignore_words:
        require_words = list(
            set(require_words).difference(
                x.strip() for x in show.rls_ignore_words.split(',')
                if x.strip()))
    elif sickbeard.IGNORE_WORDS and not (
            show and show.rls_require_words
    ):  # Only remove global ignore words from the list if we arent using show require words
        require_words = list(
            set(require_words).difference(
                x.strip() for x in sickbeard.IGNORE_WORDS.split(',')
                if x.strip()))

    if require_words and not containsAtLeastOneWord(name, require_words):
        logger.log(
            u"Release: " + name + " doesn't contain any of " +
            ', '.join(set(require_words)) + ", ignoring it", logger.INFO)
        return False

    return True
Esempio n. 10
0
def already_postprocessed(dirName, videofile, force, result):

    if force:
        return False

    #Needed for accessing DB with a unicode DirName
    if not isinstance(dirName, unicode):
        dirName = unicode(dirName, 'utf_8')

    # Avoid processing the same dir again if we use a process method <> move
    myDB = db.DBConnection()
    sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?",
                            [dirName])
    if sqlResult:
        result.output += logHelper(
            u"You're trying to post process a dir that's already been processed, skipping",
            logger.DEBUG)
        return True

    else:
        # This is needed for video whose name differ from dirName
        if not isinstance(videofile, unicode):
            videofile = unicode(videofile, 'utf_8')

        sqlResult = myDB.select(
            "SELECT * FROM tv_episodes WHERE release_name = ?",
            [videofile.rpartition('.')[0]])
        if sqlResult:
            result.output += logHelper(
                u"You're trying to post process a video that's already been processed, skipping",
                logger.DEBUG)
            return True

        #Needed if we have downloaded the same episode @ different quality
        #But we need to make sure we check the history of the episode we're going to PP, and not others
        np = NameParser(dirName,
                        tryIndexers=True,
                        trySceneExceptions=True,
                        convert=True)
        try:  #if it fails to find any info (because we're doing an unparsable folder (like the TV root dir) it will throw an exception, which we want to ignore
            parse_result = np.parse(dirName)
        except:  #ignore the exception, because we kind of expected it, but create parse_result anyway so we can perform a check on it.
            parse_result = False
            pass

        search_sql = "SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid"  #This part is always the same
        search_sql += " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode"
        #If we find a showid, a season number, and one or more episode numbers then we need to use those in the query
        if parse_result and (parse_result.show.indexerid
                             and parse_result.episode_numbers
                             and parse_result.season_number):
            search_sql += " and tv_episodes.showid = '" + str(
                parse_result.show.indexerid
            ) + "' and tv_episodes.season = '" + str(
                parse_result.season_number
            ) + "' and tv_episodes.episode = '" + str(
                parse_result.episode_numbers[0]) + "'"

        search_sql += " and tv_episodes.status IN (" + ",".join(
            [str(x) for x in common.Quality.DOWNLOADED]) + ")"
        search_sql += " and history.resource LIKE ?"
        sqlResult = myDB.select(search_sql, [u'%' + videofile])
        if sqlResult:
            result.output += logHelper(
                u"You're trying to post process a video that's already been processed, skipping",
                logger.DEBUG)
            return True

    return False
Esempio n. 11
0
    def find_search_results(self,
                            show,
                            episodes,
                            search_mode,
                            manual_search=False):

        self._check_auth()
        self.show = show

        results = {}
        item_list = []

        searched_scene_season = None
        for ep_obj in episodes:
            # search cache for episode result
            cache_result = self.cache.searchCache(ep_obj, manual_search)
            if cache_result:
                if ep_obj.episode not in results:
                    results[ep_obj.episode] = cache_result
                else:
                    results[ep_obj.episode].extend(cache_result)

                # found result, search next episode
                continue

            # skip if season already searched
            if 1 < len(
                    episodes) and ep_obj.scene_season == searched_scene_season:
                continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = ep_obj.scene_season

            if 'sponly' == search_mode:
                # get season search results
                for curString in self._get_season_search_strings(ep_obj):
                    item_list += self._do_search(curString, search_mode,
                                                 len(episodes))
            else:
                # get single episode search results
                for curString in self._get_episode_search_strings(ep_obj):
                    item_list += self._do_search(curString, 'eponly',
                                                 len(episodes))

        # if we found what we needed already from cache then return results and exit
        if len(results) == len(episodes):
            return results

        # sort list by quality
        if len(item_list):
            items = {}
            items_unknown = []
            for item in item_list:
                quality = self.get_quality(item, anime=show.is_anime)
                if Quality.UNKNOWN == quality:
                    items_unknown += [item]
                else:
                    if quality not in items:
                        items[quality] = [item]
                    else:
                        items[quality].append(item)

            item_list = list(
                itertools.chain(
                    *[v for (k, v) in sorted(items.items(), reverse=True)]))
            item_list += items_unknown if items_unknown else []

        # filter results
        cl = []
        for item in item_list:
            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                parser = NameParser(False, convert=True)
                parse_result = parser.parse(title)
            except InvalidNameException:
                logger.log(
                    u'Unable to parse the filename ' + title +
                    ' into a valid episode', logger.DEBUG)
                continue
            except InvalidShowException:
                logger.log(
                    u'No show name or scene exception matched the parsed filename '
                    + title, logger.DEBUG)
                continue

            show_obj = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version

            add_cache_entry = False
            if not (show_obj.air_by_date or show_obj.sports):
                if 'sponly' == search_mode:
                    if len(parse_result.episode_numbers):
                        logger.log(
                            u'This is supposed to be a season pack search but the result '
                            + title +
                            u' is not a valid season pack, skipping it',
                            logger.DEBUG)
                        add_cache_entry = True
                    if len(parse_result.episode_numbers)\
                            and (parse_result.season_number not in set([ep.season for ep in episodes])
                                 or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
                        logger.log(
                            u'The result ' + title +
                            u' doesn\'t seem to be a valid episode that we are trying'
                            + u' to snatch, ignoring', logger.DEBUG)
                        add_cache_entry = True
                else:
                    if not len(parse_result.episode_numbers)\
                            and parse_result.season_number\
                            and not [ep for ep in episodes
                                     if ep.season == parse_result.season_number
                                     and ep.episode in parse_result.episode_numbers]:
                        logger.log(
                            u'The result ' + title +
                            u' doesn\'t seem to be a valid season that we are trying'
                            + u' to snatch, ignoring', logger.DEBUG)
                        add_cache_entry = True
                    elif len(parse_result.episode_numbers) and not [
                            ep for ep in episodes
                            if ep.season == parse_result.season_number
                            and ep.episode in parse_result.episode_numbers
                    ]:
                        logger.log(
                            u'The result ' + title +
                            ' doesn\'t seem to be a valid episode that we are trying'
                            + u' to snatch, ignoring', logger.DEBUG)
                        add_cache_entry = True

                if not add_cache_entry:
                    # we just use the existing info for normal searches
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                if not parse_result.is_air_by_date:
                    logger.log(
                        u'This is supposed to be a date search but the result '
                        + title + u' didn\'t parse as one, skipping it',
                        logger.DEBUG)
                    add_cache_entry = True
                else:
                    airdate = parse_result.air_date.toordinal()
                    my_db = db.DBConnection()
                    sql_results = my_db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_obj.indexerid, airdate])

                    if 1 != len(sql_results):
                        logger.log(
                            u'Tried to look up the date for the episode ' +
                            title + ' but the database didn\'t' +
                            u' give proper results, skipping it',
                            logger.WARNING)
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = int(sql_results[0]['season'])
                    actual_episodes = [int(sql_results[0]['episode'])]

            # add parsed result to cache for usage later on
            if add_cache_entry:
                logger.log(u'Adding item from search to cache: ' + title,
                           logger.DEBUG)
                ci = self.cache.add_cache_entry(title,
                                                url,
                                                parse_result=parse_result)
                if None is not ci:
                    cl.append(ci)
                continue

            # make sure we want the episode
            want_ep = True
            for epNo in actual_episodes:
                if not show_obj.wantEpisode(actual_season, epNo, quality,
                                            manual_search):
                    want_ep = False
                    break

            if not want_ep:
                logger.log(
                    u'Ignoring result %s because we don\'t want an episode that is %s'
                    % (title, Quality.qualityStrings[quality]), logger.DEBUG)
                continue

            logger.log(u'Found result %s at %s' % (title, url), logger.DEBUG)

            # make a result object
            ep_obj = []
            for curEp in actual_episodes:
                ep_obj.append(show_obj.getEpisode(actual_season, curEp))

            result = self.get_result(ep_obj, url)
            if None is result:
                continue
            result.show = show_obj
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.content = None
            result.version = version

            if 1 == len(ep_obj):
                ep_num = ep_obj[0].episode
                logger.log(u'Single episode result.', logger.DEBUG)
            elif 1 < len(ep_obj):
                ep_num = MULTI_EP_RESULT
                logger.log(
                    u'Separating multi-episode result to check for later - result contains episodes: '
                    + str(parse_result.episode_numbers), logger.DEBUG)
            elif 0 == len(ep_obj):
                ep_num = SEASON_RESULT
                logger.log(u'Separating full season result to check for later',
                           logger.DEBUG)

            if ep_num not in results:
                results[ep_num] = [result]
            else:
                results[ep_num].append(result)

        # check if we have items to add to cache
        if 0 < len(cl):
            my_db = self.cache.get_db()
            my_db.mass_action(cl)

        return results
Esempio n. 12
0
    def findEpisode(self, episode, manualSearch=False):

        logger.log(u"Searching " + self.name + " for " + episode.prettyName())

        self.cache.updateCache()
        results = self.cache.searchCache(episode, manualSearch)
        logger.log(u"Cache results: " + str(results), logger.DEBUG)

        # if we got some results then use them no matter what.
        # OR
        # return anyway unless we're doing a manual search
        if results or not manualSearch:
            return results

        itemList = []

        for cur_search_string in self._get_episode_search_strings(episode):
            itemList += self._doSearch(cur_search_string)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                myParser = NameParser()
                parse_result = myParser.parse(title)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid episode", logger.WARNING)
                continue

            if episode.show.air_by_date:
                if parse_result.air_date != episode.airdate:
                    logger.log(
                        "Episode " + title + " didn't air on " +
                        str(episode.airdate) + ", skipping it", logger.DEBUG)
                    continue
            elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
                logger.log(
                    "Episode " + title + " isn't " + str(episode.season) +
                    "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
                continue

            quality = self.getQuality(item)

            if not episode.show.wantEpisode(episode.season, episode.episode,
                                            quality, manualSearch):
                logger.log(
                    u"Ignoring result " + title +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[quality], logger.DEBUG)
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            result = self.getResult([episode])
            result.url = url
            result.name = title
            result.quality = quality
            result.provider = self
            result.content = None

            results.append(result)

        return results
Esempio n. 13
0
    def _find_season_quality(self, title, torrent_id, ep_number):
        """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """

        quality = Quality.UNKNOWN
        file_name = None
        data = None
        has_signature = False
        details_url = '/ajax_details_filelist.php?id=%s' % torrent_id
        for idx, url in enumerate(self.urls['config_provider_home_uri']):
            data = self.get_url(url + details_url)
            if data and re.search(r'<title>The\sPirate\sBay', data[33:200:]):
                has_signature = True
                break
            else:
                data = None

        if not has_signature:
            logger.log(
                u'Failed to identify a page from ThePirateBay at %s attempted urls (tpb blocked? general network issue or site dead)'
                % len(self.urls['config_provider_home_uri']), logger.ERROR)

        if not data:
            return None

        files_list = re.findall('<td.+>(.*?)</td>', data)

        if not files_list:
            logger.log(u'Unable to get the torrent file list for ' + title,
                       logger.ERROR)

        video_files = filter(
            lambda x: x.rpartition('.')[2].lower() in mediaExtensions,
            files_list)

        # Filtering SingleEpisode/MultiSeason Torrent
        if ep_number > len(video_files) or float(
                ep_number * 1.1) < len(video_files):
            logger.log(
                u'Result %s has episode %s and total episodes retrieved in torrent are %s'
                % (title, str(ep_number), str(len(video_files))), logger.DEBUG)
            logger.log(
                u'Result %s seems to be a single episode or multiseason torrent, skipping result...'
                % title, logger.DEBUG)
            return None

        if Quality.UNKNOWN != Quality.sceneQuality(title):
            return title

        for file_name in video_files:
            quality = Quality.sceneQuality(os.path.basename(file_name))
            if Quality.UNKNOWN != quality:
                break

        if None is not file_name and Quality.UNKNOWN == quality:
            quality = Quality.assumeQuality(os.path.basename(file_name))

        if Quality.UNKNOWN == quality:
            logger.log(u'Unable to obtain a Season Quality for ' + title,
                       logger.DEBUG)
            return None

        try:
            my_parser = NameParser(showObj=self.show)
            parse_result = my_parser.parse(file_name)
        except (InvalidNameException, InvalidShowException):
            return None

        logger.log(
            u'Season quality for %s is %s' %
            (title, Quality.qualityStrings[quality]), logger.DEBUG)

        if parse_result.series_name and parse_result.season_number:
            title = '%s S%02d %s' % (parse_result.series_name,
                                     int(parse_result.season_number),
                                     self._reverse_quality(quality))

        return title
Esempio n. 14
0
    def getSearchResults(self,
                         show,
                         season,
                         ep_objs,
                         seasonSearch=False,
                         manualSearch=False):

        itemList = []
        results = {}

        self._checkAuth()

        regexMethod = 0
        if show.sports:
            regexMethod = 1

        for ep_obj in ep_objs:
            # get scene season/episode info
            scene_season = ep_obj.scene_season
            scene_episode = ep_obj.scene_episode
            if show.air_by_date or show.sports:
                scene_episode = ep_obj.airdate

            if not seasonSearch:
                logger.log(u'Searching "%s" for "%s" as "%s"' %
                           (self.name, ep_obj.prettyName(),
                            ep_obj.scene_prettyName()))

            self.cache.updateCache()
            results = self.cache.searchCache(ep_obj, manualSearch)
            logger.log(u"Cache results: " + str(results), logger.DEBUG)
            logger.log(u"manualSearch: " + str(manualSearch), logger.DEBUG)

            # if we got some results then use them no matter what.
            # OR
            # return anyway unless we're doing a manual search
            if results:
                return results

            if seasonSearch:
                for curString in self._get_season_search_strings(
                        show, scene_season, scene_episode):
                    itemList += self._doSearch(curString, show=show)
            else:
                for curString in self._get_episode_search_strings(
                        show, scene_season, scene_episode):
                    itemList += self._doSearch(curString, show=show)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            quality = self.getQuality(item)

            # parse the file name
            try:
                myParser = NameParser(False, regexMethod)
                parse_result = myParser.parse(title, True)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid episode", logger.WARNING)
                continue

            if not show.air_by_date and not show.sports:
                # this check is meaningless for non-season searches
                if (parse_result.season_number != None
                        and parse_result.season_number != season
                    ) or (parse_result.season_number == None and season != 1):
                    logger.log(
                        u"The result " + title +
                        " doesn't seem to be a valid episode for season " +
                        str(season) + ", ignoring", logger.DEBUG)
                    continue

                # we just use the existing info for normal searches
                actual_season = season
                actual_episodes = parse_result.episode_numbers

            else:
                if show.air_by_date and not parse_result.air_by_date:
                    logger.log(
                        u"This is supposed to be an air-by-date search but the result "
                        + title + " didn't parse as one, skipping it",
                        logger.DEBUG)
                    continue

                if show.sports and not parse_result.sports:
                    logger.log(
                        u"This is supposed to be an sports search but the result "
                        + title + " didn't parse as one, skipping it",
                        logger.DEBUG)
                    continue

                myDB = db.DBConnection()
                if parse_result.air_by_date:
                    sql_results = myDB.select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [show.indexerid,
                         parse_result.air_date.toordinal()])
                elif parse_result.sports:
                    sql_results = myDB.select(
                        "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                        [show.indexerid,
                         parse_result.sports_date.toordinal()])

                if len(sql_results) != 1:
                    logger.log(
                        u"Tried to look up the date for the episode " + title +
                        " but the database didn't give proper results, skipping it",
                        logger.WARNING)
                    continue

                actual_season = int(sql_results[0]["season"])
                actual_episodes = [int(sql_results[0]["episode"])]

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not show.wantEpisode(
                        actual_season, epNo, quality,
                        manualSearch=manualSearch):
                    wantEp = False
                    break

            if not wantEp:
                logger.log(
                    u"Ignoring result " + title +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[quality], logger.DEBUG)
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(show.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            result.url = url
            result.name = title
            result.quality = quality
            result.provider = self
            result.content = None

            if len(epObj) == 1:
                epNum = epObj[0].episode
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                logger.log(
                    u"Separating multi-episode result to check for later - result contains episodes: "
                    + str(parse_result.episode_numbers), logger.DEBUG)
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                result.extraInfo = [show]
                logger.log(u"Separating full season result to check for later",
                           logger.DEBUG)

            if epNum in results:
                results[epNum].append(result)
            else:
                results = {epNum: [result]}

        return results
Esempio n. 15
0
    def _already_postprocessed(self, dir_name, videofile, force):

        if force and not self.any_vid_processed:
            return False

        # Needed for accessing DB with a unicode dir_name
        if not isinstance(dir_name, unicode):
            dir_name = unicode(dir_name, 'utf_8')

        parse_result = None
        try:
            parse_result = NameParser(try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False)
        except (InvalidNameException, InvalidShowException):
            # Does not parse, move on to directory check
            pass
        if None is parse_result:
            try:
                parse_result = NameParser(try_scene_exceptions=True,convert=True).parse(dir_name, cache_result=False)
            except (InvalidNameException, InvalidShowException):
                # If the filename doesn't parse, then return false as last
                # resort. We can assume that unparseable filenames are not
                # processed in the past
                return False

        showlink = (' for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name),
                    parse_result.show.name)[self.any_vid_processed]

        ep_detail_sql = ''
        if parse_result.show.indexerid and 0 < len(parse_result.episode_numbers) and parse_result.season_number:
            ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
                            % (str(parse_result.show.indexerid),
                                str(parse_result.season_number),
                                str(parse_result.episode_numbers[0]))

        # Avoid processing the same directory again if we use a process method <> move
        my_db = db.DBConnection()
        sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [dir_name])
        if sql_result:
            self._log_helper(u'Found a release directory %s that has already been processed,<br />.. skipping: %s'
                             % (showlink, dir_name))
            if ep_detail_sql:
                reset_status(parse_result.show.indexerid,
                             parse_result.season_number,
                             parse_result.episode_numbers[0])
            return True

        else:
            # This is needed for video whose name differ from dir_name
            if not isinstance(videofile, unicode):
                videofile = unicode(videofile, 'utf_8')

            sql_result = my_db.select('SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]])
            if sql_result:
                self._log_helper(u'Found a video, but that release %s was already processed,<br />.. skipping: %s'
                                 % (showlink, videofile))
                if ep_detail_sql:
                    reset_status(parse_result.show.indexerid,
                                 parse_result.season_number,
                                 parse_result.episode_numbers[0])
                return True

            # Needed if we have downloaded the same episode @ different quality
            search_sql = 'SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history'\
                         + ' ON history.showid=tv_episodes.showid'\
                         + ' WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode'\
                         + ep_detail_sql\
                         + ' and tv_episodes.status IN (%s)' % ','.join([str(x) for x in common.Quality.DOWNLOADED])\
                         + ' and history.resource LIKE ?'

            sql_result = my_db.select(search_sql, [u'%' + videofile])
            if sql_result:
                self._log_helper(u'Found a video, but the episode %s is already processed,<br />.. skipping: %s'
                                 % (showlink, videofile))
                if ep_detail_sql:
                    reset_status(parse_result.show.indexerid,
                                 parse_result.season_number,
                                 parse_result.episode_numbers[0])
                return True

        return False
Esempio n. 16
0
    def _analyze_name(self, name, file_name=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.

        name: A string which we want to analyze to determine show info from (unicode)

        Returns a (tvdb_id, season, [episodes], quality) tuple. tvdb_id, season, quality may be None and episodes may be [].
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [], None)

        if not name:
            return to_return

        name = helpers.remove_non_release_groups(
            helpers.remove_extension(name))

        # parse the name to break it into show name, season, and episode
        np = NameParser(False)
        parse_result = np.parse(name)
        self._log(
            u"Parsed " + name + " into " +
            str(parse_result).decode('utf-8', 'xmlcharrefreplace'),
            logger.DEBUG)

        if parse_result.air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (None, season, episodes, None)

        # do a scene reverse-lookup to get a list of all possible names
        name_list = show_name_helpers.sceneToNormalShowNames(
            parse_result.series_name)

        if not name_list:
            return (None, season, episodes, None)

        # try finding name in DB
        for cur_name in name_list:
            self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
            db_result = helpers.searchDBForShow(cur_name)
            if db_result:
                self._log(
                    u"Lookup successful, using tvdb id " + str(db_result[0]),
                    logger.DEBUG)
                self._finalize(parse_result)
                return (int(db_result[0]), season, episodes, None)

        # try finding name in scene exceptions
        for cur_name in name_list:
            self._log(u"Checking scene exceptions for a match on " + cur_name,
                      logger.DEBUG)
            scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
            if scene_id:
                self._log(
                    u"Scene exception lookup got tvdb id " + str(scene_id) +
                    u", using that", logger.DEBUG)
                self._finalize(parse_result)
                return (scene_id, season, episodes, None)

        # try finding name on TVDB
        for cur_name in name_list:
            try:
                t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                  **sickbeard.TVDB_API_PARMS)

                self._log(u"Looking up name " + cur_name + u" on TVDB",
                          logger.DEBUG)
                showObj = t[cur_name]
            except (tvdb_exceptions.tvdb_exception):
                # if none found, search on all languages
                try:
                    # There's gotta be a better way of doing this but we don't wanna
                    # change the language value elsewhere
                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                    ltvdb_api_parms['search_all_languages'] = True
                    t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                      **ltvdb_api_parms)

                    self._log(
                        u"Looking up name " + cur_name +
                        u" in all languages on TVDB", logger.DEBUG)
                    showObj = t[cur_name]
                except (tvdb_exceptions.tvdb_exception, IOError):
                    pass

                continue
            except (IOError):
                continue

            self._log(
                u"Lookup successful, using tvdb id " + str(showObj["id"]),
                logger.DEBUG)
            self._finalize(parse_result)
            return (int(showObj["id"]), season, episodes, None)

        self._finalize(parse_result)
        return to_return
Esempio n. 17
0
    def find_search_results(
            self,
            show,
            episodes,
            search_mode,  # pylint: disable=too-many-branches,too-many-arguments,too-many-locals,too-many-statements
            manual_search=False,
            download_current_quality=False):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.searchCache(
                episode,
                manualSearch=manual_search,
                downCurQuality=download_current_quality)
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(
                    episodes
            ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self._get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self._get_episode_search_strings(episode)

            for search_string in search_strings:
                items_list += self.search(search_string, ep_obj=episode)

        if len(results) == len(episodes):
            return results

        if items_list:
            items = {}
            unknown_items = []

            for item in items_list:
                quality = self.get_quality(item, anime=show.is_anime)

                if quality == Quality.UNKNOWN:
                    unknown_items.append(item)
                else:
                    if quality not in items:
                        items[quality] = []
                    items[quality].append(item)

            items_list = list(
                chain(
                    *
                    [v
                     for (k_, v) in sorted(items.iteritems(), reverse=True)]))
            items_list += unknown_items

        cl = []

        for item in items_list:
            (title, url) = self._get_title_and_url(item)

            try:
                parse_result = NameParser(
                    parse_method=('normal',
                                  'anime')[show.is_anime]).parse(title)
            except (InvalidNameException, InvalidShowException) as error:
                logger.log(u"{0}".format(error), logger.DEBUG)
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not (show_object.air_by_date or show_object.sports):
                if search_mode == 'sponly':
                    if parse_result.episode_numbers:
                        logger.log(
                            u'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True
                    elif not [
                            ep
                            for ep in episodes if parse_result.season_number ==
                        (ep.season, ep.scene_season)[ep.show.is_scene]
                    ]:
                        logger.log(
                            u'This season result {0} is for a season we are not searching for, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True

                else:
                    if not all([
                            # pylint: disable=bad-continuation
                            parse_result.season_number is not None,
                            parse_result.episode_numbers,
                        [
                            ep for ep in episodes if
                            (ep.season, ep.scene_season)[ep.show.is_scene] ==
                            (parse_result.season_number,
                             parse_result.scene_season)[ep.show.is_scene] and
                            (ep.episode, ep.scene_episode)[ep.show.is_scene] in
                            parse_result.episode_numbers
                        ]
                    ]):

                        logger.log(
                            u'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                same_day_special = False

                if not parse_result.is_air_by_date:
                    logger.log(
                        u'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it'
                        .format(title), logger.DEBUG)
                    add_cache_entry = True
                else:
                    air_date = parse_result.air_date.toordinal()
                    db = DBConnection()
                    sql_results = db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_object.indexerid, air_date])

                    if len(sql_results) == 2:
                        if int(sql_results[0]['season']) == 0 and int(
                                sql_results[1]['season']) != 0:
                            actual_season = int(sql_results[1]['season'])
                            actual_episodes = [int(sql_results[1]['episode'])]
                            same_day_special = True
                        elif int(sql_results[1]['season']) == 0 and int(
                                sql_results[0]['season']) != 0:
                            actual_season = int(sql_results[0]['season'])
                            actual_episodes = [int(sql_results[0]['episode'])]
                            same_day_special = True
                    elif len(sql_results) != 1:
                        logger.log(
                            u'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it'
                            .format(title), logger.WARNING)
                        add_cache_entry = True

                if not add_cache_entry and not same_day_special:
                    actual_season = int(sql_results[0]['season'])
                    actual_episodes = [int(sql_results[0]['episode'])]

            if add_cache_entry:
                logger.log(
                    u'Adding item from search to cache: {0}'.format(title),
                    logger.DEBUG)
                # pylint: disable=protected-access
                # Access to a protected member of a client class
                ci = self.cache._addCacheEntry(title,
                                               url,
                                               parse_result=parse_result)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            for episode_number in actual_episodes:
                if not show_object.wantEpisode(actual_season, episode_number,
                                               quality, manual_search,
                                               download_current_quality):
                    episode_wanted = False
                    break

            if not episode_wanted:
                logger.log(u'Ignoring result {0}.'.format(title), logger.DEBUG)
                continue

            logger.log(u'Found result {0} at {1}'.format(title, url),
                       logger.DEBUG)

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(
                    show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.log(u'Single episode result.', logger.DEBUG)
            elif len(episode_object) > 1:
                episode_number = MULTI_EP_RESULT
                logger.log(
                    u'Separating multi-episode result to check for later - result contains episodes: {0}'
                    .format(parse_result.episode_numbers), logger.DEBUG)
            elif len(episode_object) == 0:
                episode_number = SEASON_RESULT
                logger.log(u'Separating full season result to check for later',
                           logger.DEBUG)

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if cl:
            # pylint: disable=protected-access
            # Access to a protected member of a client class
            cache_db = self.cache._getDB()
            cache_db.mass_action(cl)

        return results
Esempio n. 18
0
def split_result(obj):
    """
    Split obj into separate episodes.

    :param obj: to search for results
    :return: a list of episode objects or an empty list
    """
    url_data = helpers.getURL(obj.url, session=requests.Session(), need_bytes=True)
    if url_data is None:
        logger.log(u"Unable to load url " + obj.url + ", can't download season NZB", logger.ERROR)
        return []

    # parse the season ep name
    try:
        parsed_obj = NameParser(False, showObj=obj.show).parse(obj.name)
    except (InvalidNameException, InvalidShowException) as error:
        logger.log(u"{}".format(error), logger.DEBUG)
        return []

    # bust it up
    season = 1 if parsed_obj.season_number is None else parsed_obj.season_number

    separate_nzbs, xmlns = get_season_nzbs(obj.name, url_data, season)

    result_list = []

    # TODO: Re-evaluate this whole section
    #   If we have valid results and hit an exception, we ignore the results found so far.
    #   Maybe we should return the results found or possibly continue with the next iteration of the loop
    #   Also maybe turn this into a function and generate the results_list with a list comprehension instead
    for new_nzb in separate_nzbs:
        logger.log(u"Split out " + new_nzb + " from " + obj.name, logger.DEBUG)  # pylint: disable=no-member

        # parse the name
        try:
            parsed_obj = NameParser(False, showObj=obj.show).parse(new_nzb)
        except (InvalidNameException, InvalidShowException) as error:
            logger.log(u"{}".format(error), logger.DEBUG)
            return []

        # make sure the result is sane
        if (parsed_obj.season_number != season) or (parsed_obj.season_number is None and season != 1):
            # pylint: disable=no-member
            logger.log(u"Found " + new_nzb + " inside " + obj.name + " but it doesn't seem to belong to the same season, ignoring it",
                       logger.WARNING)
            continue
        elif len(parsed_obj.episode_numbers) == 0:
            # pylint: disable=no-member
            logger.log(u"Found " + new_nzb + " inside " + obj.name + " but it doesn't seem to be a valid episode NZB, ignoring it",
                       logger.WARNING)
            continue

        want_ep = True
        for ep_num in parsed_obj.episode_numbers:
            if not obj.extraInfo[0].wantEpisode(season, ep_num, obj.quality):
                logger.log(u"Ignoring result: " + new_nzb, logger.DEBUG)
                want_ep = False
                break
        if not want_ep:
            continue

        # get all the associated episode objects
        ep_obj_list = [obj.extraInfo[0].getEpisode(season, ep) for ep in parsed_obj.episode_numbers]

        # make a result
        cur_obj = classes.NZBDataSearchResult(ep_obj_list)
        cur_obj.name = new_nzb
        cur_obj.provider = obj.provider
        cur_obj.quality = obj.quality
        cur_obj.extraInfo = [create_nzb_string(separate_nzbs[new_nzb], xmlns)]

        result_list.append(cur_obj)

    return result_list
Esempio n. 19
0
    def findEpisode(self, episode, manualSearch=False):

        logger.log(u"Searching " + self.name + " for " + episode.prettyName())

        self.cache.updateCache()
        results = self.cache.searchCache(episode, manualSearch)
        logger.log(u"Cache results: " + str(results), logger.DEBUG)

        # if we got some results then use them no matter what.
        # OR
        # return anyway unless we're doing a manual search
        if results or not manualSearch:
            return results

        data = self.getURL(self.search_url,
                           post_data=self._make_post_data_JSON(
                               show=episode.show, episode=episode))

        if not data:
            logger.log(u"No data returned from " + self.search_url,
                       logger.ERROR)
            return []

        parsedJSON = helpers.parse_json(data)

        if parsedJSON is None:
            logger.log(u"Error trying to load " + self.name + " JSON data",
                       logger.ERROR)
            return []

        if self._checkAuthFromData(parsedJSON):
            results = []

            if parsedJSON and 'data' in parsedJSON:
                items = parsedJSON['data']
            else:
                logger.log(
                    u"Resulting JSON from " + self.name +
                    " isn't correct, not parsing it", logger.ERROR)
                items = []

            for item in items:

                (title, url) = self._get_title_and_url(item)

                # parse the file name
                try:
                    myParser = NameParser()
                    parse_result = myParser.parse(title)
                except InvalidNameException:
                    logger.log(
                        u"Unable to parse the filename " + title +
                        " into a valid episode", logger.WARNING)
                    continue

                if episode.show.air_by_date:
                    if parse_result.air_date != episode.airdate:
                        logger.log(
                            u"Episode " + title + " didn't air on " +
                            str(episode.airdate) + ", skipping it",
                            logger.DEBUG)
                        continue
                elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
                    logger.log(
                        u"Episode " + title + " isn't " + str(episode.season) +
                        "x" + str(episode.episode) + ", skipping it",
                        logger.DEBUG)
                    continue

                quality = self.getQuality(item)

                if not episode.show.wantEpisode(episode.season,
                                                episode.episode, quality,
                                                manualSearch):
                    logger.log(
                        u"Ignoring result " + title +
                        " because we don't want an episode that is " +
                        Quality.qualityStrings[quality], logger.DEBUG)
                    continue

                logger.log(u"Found result " + title + " at " + url,
                           logger.DEBUG)

                result = self.getResult([episode])
                result.url = url
                result.name = title
                result.quality = quality

                results.append(result)

        return results
Esempio n. 20
0
    def execute(self):

        backupDatabase(11)

        if not self.hasColumn("tv_episodes", "file_size"):
            self.addColumn("tv_episodes", "file_size")

        if not self.hasColumn("tv_episodes", "release_name"):
            self.addColumn("tv_episodes", "release_name", "TEXT", "")

        ep_results = self.connection.select(
            "SELECT episode_id, location, file_size FROM tv_episodes")

        logger.log(
            u"Adding file size to all episodes in DB, please be patient")
        for cur_ep in ep_results:
            if not cur_ep["location"]:
                continue

            # if there is no size yet then populate it for us
            if (not cur_ep["file_size"]
                    or not int(cur_ep["file_size"])) and ek.ek(
                        os.path.isfile, cur_ep["location"]):
                cur_size = ek.ek(os.path.getsize, cur_ep["location"])
                self.connection.action(
                    "UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?",
                    [cur_size, int(cur_ep["episode_id"])])

        # check each snatch to see if we can use it to get a release name from
        history_results = self.connection.select(
            "SELECT * FROM history WHERE provider != -1 ORDER BY date ASC")

        logger.log(u"Adding release name to all episodes still in history")
        for cur_result in history_results:
            # find the associated download, if there isn't one then ignore it
            download_results = self.connection.select(
                "SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
                [
                    cur_result["showid"], cur_result["season"],
                    cur_result["episode"], cur_result["date"]
                ])
            if not download_results:
                logger.log(
                    u"Found a snatch in the history for " +
                    cur_result["resource"] +
                    " but couldn't find the associated download, skipping it",
                    logger.DEBUG)
                continue

            nzb_name = cur_result["resource"]
            file_name = ek.ek(os.path.basename,
                              download_results[0]["resource"])

            # take the extension off the filename, it's not needed
            if '.' in file_name:
                file_name = file_name.rpartition('.')[0]

            # find the associated episode on disk
            ep_results = self.connection.select(
                "SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
                [
                    cur_result["showid"], cur_result["season"],
                    cur_result["episode"]
                ])
            if not ep_results:
                logger.log(
                    u"The episode " + nzb_name +
                    " was found in history but doesn't exist on disk anymore, skipping",
                    logger.DEBUG)
                continue

            # get the status/quality of the existing ep and make sure it's what we expect
            ep_status, ep_quality = common.Quality.splitCompositeStatus(
                int(ep_results[0]["status"]))
            if ep_status != common.DOWNLOADED:
                continue

            if ep_quality != int(cur_result["quality"]):
                continue

            # make sure this is actually a real release name and not a season pack or something
            for cur_name in (nzb_name, file_name):
                logger.log(
                    u"Checking if " + cur_name +
                    " is actually a good release name", logger.DEBUG)
                try:
                    np = NameParser(False)
                    parse_result = np.parse(cur_name)
                except InvalidNameException:
                    continue

                if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
                    # if all is well by this point we'll just put the release name into the database
                    self.connection.action(
                        "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
                        [cur_name, ep_results[0]["episode_id"]])
                    break

        # check each snatch to see if we can use it to get a release name from
        empty_results = self.connection.select(
            "SELECT episode_id, location FROM tv_episodes WHERE release_name = ''"
        )

        logger.log(
            u"Adding release name to all episodes with obvious scene filenames"
        )
        for cur_result in empty_results:

            ep_file_name = ek.ek(os.path.basename, cur_result["location"])
            ep_file_name = os.path.splitext(ep_file_name)[0]

            # only want to find real scene names here so anything with a space in it is out
            if ' ' in ep_file_name:
                continue

            try:
                np = NameParser(False)
                parse_result = np.parse(ep_file_name)
            except InvalidNameException:
                continue

            if not parse_result.release_group:
                continue

            logger.log(
                u"Name " + ep_file_name + " gave release group of " +
                parse_result.release_group + ", seems valid", logger.DEBUG)
            self.connection.action(
                "UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
                [ep_file_name, cur_result["episode_id"]])

        self.incDBVersion()
Esempio n. 21
0
    def _analyze_name(self, name, file=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.
        
        Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [])

        if not name:
            return to_return

        # parse the name to break it into show name, season, and episode
        np = NameParser(file)
        parse_result = np.parse(name)
        self._log(
            "Parsed " + name + " into " + str(parse_result).decode('utf-8'),
            logger.DEBUG)

        if parse_result.air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (None, season, episodes)

        # do a scene reverse-lookup to get a list of all possible names
        name_list = sceneHelpers.sceneToNormalShowNames(
            parse_result.series_name)

        if not name_list:
            return (None, season, episodes)

        def _finalize(parse_result):
            self.release_group = parse_result.release_group
            if parse_result.extra_info:
                self.is_proper = re.search(
                    '(^|[\. _-])(proper|repack)([\. _-]|$)',
                    parse_result.extra_info, re.I) != None

        # for each possible interpretation of that scene name
        for cur_name in name_list:
            self._log(u"Checking scene exceptions for a match on " + cur_name,
                      logger.DEBUG)
            for exceptionID in common.sceneExceptions:
                # for each exception name
                for curException in common.sceneExceptions[exceptionID]:
                    if cur_name.lower() in (curException.lower(),
                                            sceneHelpers.sanitizeSceneName(
                                                curException).lower().replace(
                                                    '.', ' ')):
                        self._log(
                            u"Scene exception lookup got tvdb id " +
                            str(exceptionID) + u", using that", logger.DEBUG)
                        _finalize(parse_result)
                        return (exceptionID, season, episodes)

        # see if we can find the name directly in the DB, if so use it
        for cur_name in name_list:
            self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
            db_result = helpers.searchDBForShow(cur_name)
            if db_result:
                self._log(
                    u"Lookup successful, using tvdb id " + str(db_result[0]),
                    logger.DEBUG)
                _finalize(parse_result)
                return (int(db_result[0]), season, episodes)

        # see if we can find the name with a TVDB lookup
        for cur_name in name_list:
            try:
                t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                  **sickbeard.TVDB_API_PARMS)

                self._log(u"Looking up name " + cur_name + u" on TVDB",
                          logger.DEBUG)
                showObj = t[cur_name]
            except (tvdb_exceptions.tvdb_exception), e:
                # if none found, search on all languages
                try:
                    # There's gotta be a better way of doing this but we don't wanna
                    # change the language value elsewhere
                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                    ltvdb_api_parms['search_all_languages'] = True
                    t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                      **ltvdb_api_parms)

                    self._log(
                        u"Looking up name " + cur_name +
                        u" in all languages on TVDB", logger.DEBUG)
                    showObj = t[cur_name]
                except (tvdb_exceptions.tvdb_exception, IOError), e:
                    pass

                continue
Esempio n. 22
0
def validateDir(path, dirName, nzbNameOriginal, failed, result):
    """
    Check if directory is valid for processing

    :param path: Path to use
    :param dirName: Directory to check
    :param nzbNameOriginal: Original NZB name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    IGNORED_FOLDERS = ['.AppleDouble', '.@__thumb', '@eaDir']
    folder_name = ek(os.path.basename, dirName)
    if folder_name in IGNORED_FOLDERS:
        return False

    result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    if folder_name.startswith('_FAILED_'):
        result.output += logHelper(
            u"The directory name indicates it failed to extract.",
            logger.DEBUG)
        failed = True
    elif folder_name.startswith('_UNDERSIZED_'):
        result.output += logHelper(
            u"The directory name indicates that it was previously rejected for being undersized.",
            logger.DEBUG)
        failed = True
    elif folder_name.upper().startswith('_UNPACK'):
        result.output += logHelper(
            u"The directory name indicates that this release is in the process of being unpacked.",
            logger.DEBUG)
        result.missedfiles.append(dirName + " : Being unpacked")
        return False

    if failed:
        process_failed(os.path.join(path, dirName), nzbNameOriginal, result)
        result.missedfiles.append(dirName + " : Failed download")
        return False

    if helpers.is_hidden_folder(os.path.join(path, dirName)):
        result.output += logHelper(u"Ignoring hidden folder: " + dirName,
                                   logger.DEBUG)
        result.missedfiles.append(dirName + " : Hidden folder")
        return False

    # make sure the dir isn't inside a show dir
    myDB = db.DBConnection()
    sqlResults = myDB.select("SELECT * FROM tv_shows")

    for sqlShow in sqlResults:
        if dirName.lower().startswith(ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or \
            dirName.lower() == ek(os.path.realpath, sqlShow["location"]).lower():

            result.output += logHelper(
                u"Cannot process an episode that's already been moved to its show dir, skipping "
                + dirName, logger.WARNING)
            return False

    # Get the videofile list for the next checks
    allFiles = []
    allDirs = []
    for _, processdir, fileList in ek(os.walk,
                                      ek(os.path.join, path, dirName),
                                      topdown=False):
        allDirs += processdir
        allFiles += fileList

    videoFiles = [x for x in allFiles if helpers.isMediaFile(x)]
    allDirs.append(dirName)

    # check if the dir have at least one tv video file
    for video in videoFiles:
        try:
            NameParser().parse(video, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    for proc_dir in allDirs:
        try:
            NameParser().parse(proc_dir, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    if sickbeard.UNPACK:
        # Search for packed release
        packedFiles = [x for x in allFiles if helpers.isRarFile(x)]

        for packed in packedFiles:
            try:
                NameParser().parse(packed, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException):
                pass

    result.output += logHelper(
        dirName + " : No processable items found in folder", logger.DEBUG)
    return False
Esempio n. 23
0
    def searchProviders(self, show, season, episode=None, manualSearch=False):
        itemList = []
        results = {}

        logger.log(u"Searching for stuff we need from " + show.name +
                   " season " + str(season))

        # gather all episodes for season and then pick out the wanted episodes and compare to determin if we want whole season or just a few episodes
        if episode is None:
            seasonEps = self.show.getAllEpisodes(season)
            wantedEps = [
                x for x in seasonEps
                if self.show.getOverview(x.status) in (Overview.WANTED,
                                                       Overview.QUAL)
            ]
        else:
            wantedEps = [show.getEpisode(season, episode)]

        for ep_obj in wantedEps:
            season = ep_obj.scene_season
            episode = ep_obj.scene_episode

            self.cache.updateCache()
            results = self.cache.searchCache(episode, manualSearch)
            logger.log(u"Cache results: " + str(results), logger.DEBUG)

            # if we got some results then use them no matter what.
            # OR
            # return anyway unless we're doing a manual search
            if results or not manualSearch:
                return results

            itemList += self.getURL(self.search_url,
                                    post_data=self._make_post_data_JSON(
                                        show=show,
                                        season=season,
                                        episode=episode),
                                    json=True)

        for parsedJSON in itemList:
            if not parsedJSON:
                logger.log(u"No data returned from " + self.search_url,
                           logger.ERROR)
                return []

            if self._checkAuthFromData(parsedJSON):
                results = []

                if parsedJSON and 'data' in parsedJSON:
                    items = parsedJSON['data']
                else:
                    logger.log(
                        u"Resulting JSON from " + self.name +
                        " isn't correct, not parsing it", logger.ERROR)
                    items = []

                for item in items:

                    (title, url) = self._get_title_and_url(item)

                    # parse the file name
                    try:
                        myParser = NameParser()
                        parse_result = myParser.parse(title)
                    except InvalidNameException:
                        logger.log(
                            u"Unable to parse the filename " + title +
                            " into a valid episode", logger.WARNING)
                        continue

                    if episode.show.air_by_date or episode.sports:
                        if parse_result.air_date != episode.airdate:
                            logger.log(
                                u"Episode " + title + " didn't air on " +
                                str(episode.airdate) + ", skipping it",
                                logger.DEBUG)
                            continue
                    elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
                        logger.log(
                            u"Episode " + title + " isn't " +
                            str(episode.season) + "x" + str(episode.episode) +
                            ", skipping it", logger.DEBUG)
                        continue

                    quality = self.getQuality(item)

                    if not episode.show.wantEpisode(episode.season,
                                                    episode.episode, quality,
                                                    manualSearch):
                        logger.log(
                            u"Ignoring result " + title +
                            " because we don't want an episode that is " +
                            Quality.qualityStrings[quality], logger.DEBUG)
                        continue

                    logger.log(u"Found result " + title + " at " + url,
                               logger.DEBUG)

                    result = self.getResult([episode])
                    result.url = url
                    result.name = title
                    result.quality = quality

                    results.append(result)

        return results
Esempio n. 24
0
    def findEpisode(self, episode, manualSearch=False):

        self._checkAuth()

        # create a copy of the episode, using scene numbering
        episode_scene = copy.copy(episode)
        episode_scene.convertToSceneNumbering()

        logger.log(u'Searching "%s" for "%s" as "%s"'
                   % (self.name, episode.prettyName() , episode_scene.prettyName()))

        self.cache.updateCache()
        results = self.cache.searchCache(episode_scene, manualSearch)
        logger.log(u"Cache results: " + str(results), logger.DEBUG)
        logger.log(u"manualSearch: " + str(manualSearch), logger.DEBUG)

        # if we got some results then use them no matter what.
        # OR
        # return anyway unless we're doing a manual search
        if results or not manualSearch:
            return results

        itemList = []

        for cur_search_string in self._get_episode_search_strings(episode_scene):
            itemList += self._doSearch(cur_search_string, show=episode.show)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            if self.urlIsBlacklisted(url):
                logger.log(u'Ignoring %s as the url %s is blacklisted' % (title, url), logger.DEBUG)
                continue

            # parse the file name
            try:
                myParser = NameParser()
                parse_result = myParser.parse(title, fix_scene_numbering=True)
            except InvalidNameException:
                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
                continue

            if episode.show.air_by_date:
                if parse_result.air_date != episode.airdate:
                    logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
                    continue

            elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
                logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
                continue

            quality = self.getQuality(item)

            if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
                logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            result = self.getResult([episode])
            result.url = url
            result.name = title
            result.quality = quality

            results.append(result)

        return results
Esempio n. 25
0
    def _analyze_name(self, name, file=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.
        
        name: A string which we want to analyze to determine show info from (unicode)
        
        Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [])

        if not name:
            return to_return

        # parse the name to break it into show name, season, and episode
        np = NameParser(file)
        parse_result = np.parse(name)
        self._log(
            "Parsed " + name + " into " + str(parse_result).decode('utf-8'),
            logger.DEBUG)

        if parse_result.air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (None, season, episodes)

        # do a scene reverse-lookup to get a list of all possible names
        name_list = show_name_helpers.sceneToNormalShowNames(
            parse_result.series_name)

        if not name_list:
            return (None, season, episodes)

        def _finalize(parse_result):
            self.release_group = parse_result.release_group

            # remember whether it's a proper
            if parse_result.extra_info:
                self.is_proper = re.search(
                    '(^|[\. _-])(proper|repack)([\. _-]|$)',
                    parse_result.extra_info, re.I) != None

            # if the result is complete then remember that for later
            if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
                test_name = os.path.basename(name)
                if test_name == self.nzb_name:
                    self.good_results[self.NZB_NAME] = True
                elif test_name == self.folder_name:
                    self.good_results[self.FOLDER_NAME] = True
                elif test_name == self.file_name:
                    self.good_results[self.FILE_NAME] = True
                else:
                    logger.log(u"Nothing was good, found " + repr(test_name) +
                               " and wanted either " + repr(self.nzb_name) +
                               ", " + repr(self.folder_name) + ", or " +
                               repr(self.file_name))
            else:
                logger.log(
                    "Parse result not suficent(all folowing have to be set). will not save release name",
                    logger.DEBUG)
                logger.log(
                    "Parse result(series_name): " +
                    str(parse_result.series_name), logger.DEBUG)
                logger.log(
                    "Parse result(season_number): " +
                    str(parse_result.season_number), logger.DEBUG)
                logger.log(
                    "Parse result(episode_numbers): " +
                    str(parse_result.episode_numbers), logger.DEBUG)
                logger.log(
                    "Parse result(release_group): " +
                    str(parse_result.release_group), logger.DEBUG)

        # for each possible interpretation of that scene name
        for cur_name in name_list:
            self._log(u"Checking scene exceptions for a match on " + cur_name,
                      logger.DEBUG)
            scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
            if scene_id:
                self._log(
                    u"Scene exception lookup got tvdb id " + str(scene_id) +
                    u", using that", logger.DEBUG)
                _finalize(parse_result)
                return (scene_id, season, episodes)

        # see if we can find the name directly in the DB, if so use it
        for cur_name in name_list:
            self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
            db_result = helpers.searchDBForShow(cur_name)
            if db_result:
                self._log(
                    u"Lookup successful, using tvdb id " + str(db_result[0]),
                    logger.DEBUG)
                _finalize(parse_result)
                return (int(db_result[0]), season, episodes)

        # see if we can find the name with a TVDB lookup
        for cur_name in name_list:
            try:
                t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                  **sickbeard.TVDB_API_PARMS)

                self._log(u"Looking up name " + cur_name + u" on TVDB",
                          logger.DEBUG)
                showObj = t[cur_name]
            except (tvdb_exceptions.tvdb_exception):
                # if none found, search on all languages
                try:
                    # There's gotta be a better way of doing this but we don't wanna
                    # change the language value elsewhere
                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                    ltvdb_api_parms['search_all_languages'] = True
                    t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                      **ltvdb_api_parms)

                    self._log(
                        u"Looking up name " + cur_name +
                        u" in all languages on TVDB", logger.DEBUG)
                    showObj = t[cur_name]
                except (tvdb_exceptions.tvdb_exception, IOError):
                    pass

                continue
            except (IOError):
                continue

            self._log(
                u"Lookup successful, using tvdb id " + str(showObj["id"]),
                logger.DEBUG)
            _finalize(parse_result)
            return (int(showObj["id"]), season, episodes)

        _finalize(parse_result)
        return to_return
Esempio n. 26
0
    def _getProperList(self):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        """
        Walk providers for propers
        """
        propers = {}

        search_date = datetime.datetime.today() - datetime.timedelta(days=2)

        # for each provider get a list of the
        origThreadName = threading.currentThread().name
        providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active()]
        for curProvider in providers:
            threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"

            logger.log(u"Searching for any new PROPER releases from " + curProvider.name)

            try:
                curPropers = curProvider.find_propers(search_date)
            except AuthException as e:
                logger.log(u"Authentication error: " + ex(e), logger.WARNING)
                continue
            except Exception as e:
                logger.log(u"Exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                if not re.search(r'\b(proper|repack|real)\b', x.name, re.I):
                    logger.log(u'find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG)
                    continue

                name = self._genericName(x.name)
                if name not in propers:
                    logger.log(u"Found new proper: " + x.name, logger.DEBUG)
                    x.provider = curProvider
                    propers[name] = x

            threading.currentThread().name = origThreadName

        # take the list of unique propers and get it sorted by
        sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
        finalPropers = []

        for curProper in sortedPropers:

            try:
                parse_result = NameParser(False).parse(curProper.name)
            except (InvalidNameException, InvalidShowException) as error:
                logger.log(u"{0}".format(error), logger.DEBUG)
                continue

            if not parse_result.series_name:
                continue

            if not parse_result.episode_numbers:
                logger.log(
                    u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
                    logger.DEBUG)
                continue

            logger.log(
                u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name,
                logger.DEBUG)

            # set the indexerid in the db to the show's indexerid
            curProper.indexerid = parse_result.show.indexerid

            # set the indexer in the db to the show's indexer
            curProper.indexer = parse_result.show.indexer

            # populate our Proper instance
            curProper.show = parse_result.show
            curProper.season = parse_result.season_number if parse_result.season_number is not None else 1
            curProper.episode = parse_result.episode_numbers[0]
            curProper.release_group = parse_result.release_group
            curProper.version = parse_result.version
            curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
            curProper.content = None

            # filter release
            bestResult = pickBestResult(curProper, parse_result.show)
            if not bestResult:
                logger.log(u"Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG)
                continue

            # only get anime proper if it has release group and version
            if bestResult.show.is_anime:
                if not bestResult.release_group and bestResult.version == -1:
                    logger.log(u"Proper " + bestResult.name + " doesn't have a release group and version, ignoring it",
                               logger.DEBUG)
                    continue

            # check if we actually want this proper (if it's the right quality)
            main_db_con = db.DBConnection()
            sql_results = main_db_con.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
                                             [bestResult.indexerid, bestResult.season, bestResult.episode])
            if not sql_results:
                continue

            # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
            oldStatus, oldQuality = Quality.splitCompositeStatus(int(sql_results[0]["status"]))
            if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality:
                continue

            # check if we actually want this proper (if it's the right release group and a higher version)
            if bestResult.show.is_anime:
                main_db_con = db.DBConnection()
                sql_results = main_db_con.select(
                    "SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
                    [bestResult.indexerid, bestResult.season, bestResult.episode])

                oldVersion = int(sql_results[0]["version"])
                oldRelease_group = (sql_results[0]["release_group"])

                if -1 < oldVersion < bestResult.version:
                    logger.log(u"Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
                else:
                    continue

                if oldRelease_group != bestResult.release_group:
                    logger.log(u"Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group)
                    continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
            if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in {(p.indexerid, p.season, p.episode) for p in finalPropers}:
                logger.log(u"Found a proper that we need: " + str(bestResult.name))
                finalPropers.append(bestResult)

        return finalPropers
Esempio n. 27
0
    def _find_season_quality(self, title, torrent_link, ep_number):
        """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """

        mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
                           'vob', 'dvr-ms', 'wtv', 'ts'
                                                   'ogv', 'rar', 'zip', 'mp4']

        quality = Quality.UNKNOWN

        fileName = None

        data = self.getURL(torrent_link)
        if not data:
            return None

        try:
            with BS4Parser(data, features=["html5lib", "permissive"]) as soup:
                file_table = soup.find('table', attrs={'class': 'torrentFileList'})

                if not file_table:
                    return None

                files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})]
                videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files)

                #Filtering SingleEpisode/MultiSeason Torrent
                if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1):
                    logger.log(u"Result " + title + " have " + str(
                        ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG)
                    logger.log(
                        u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...",
                        logger.DEBUG)
                    return None

                if Quality.sceneQuality(title) != Quality.UNKNOWN:
                    return title

                for fileName in videoFiles:
                    quality = Quality.sceneQuality(os.path.basename(fileName))
                    if quality != Quality.UNKNOWN: break

                if fileName is not None and quality == Quality.UNKNOWN:
                    quality = Quality.assumeQuality(os.path.basename(fileName))

                if quality == Quality.UNKNOWN:
                    logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG)
                    return None

                try:
                    myParser = NameParser(showObj=self.show)
                    parse_result = myParser.parse(fileName)
                except (InvalidNameException, InvalidShowException):
                    return None

                logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG)

                if parse_result.series_name and parse_result.season_number:
                    title = parse_result.series_name + ' S%02d' % int(
                        parse_result.season_number) + ' ' + self._reverseQuality(quality)

                return title

        except Exception, e:
            logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
Esempio n. 28
0
    def _validate_dir(self, path, dir_name, nzb_name_original, failed):

        self._log_helper(u'Processing dir: ' + dir_name)

        if ek.ek(os.path.basename, dir_name).startswith('_FAILED_'):
            self._log_helper(u'The directory name indicates it failed to extract.')
            failed = True
        elif ek.ek(os.path.basename, dir_name).startswith('_UNDERSIZED_'):
            self._log_helper(u'The directory name indicates that it was previously rejected for being undersized.')
            failed = True
        elif ek.ek(os.path.basename, dir_name).upper().startswith('_UNPACK'):
            self._log_helper(u'The directory name indicates that this release is in the process of being unpacked.')
            return False

        if failed:
            self._process_failed(os.path.join(path, dir_name), nzb_name_original)
            return False

        if helpers.is_hidden_folder(dir_name):
            self._log_helper(u'Ignoring hidden folder: ' + dir_name)
            return False

        # make sure the directory isn't inside a show directory
        my_db = db.DBConnection()
        sql_results = my_db.select('SELECT * FROM tv_shows')

        for sqlShow in sql_results:
            if dir_name.lower().startswith(ek.ek(os.path.realpath, sqlShow['location']).lower() + os.sep)\
                    or dir_name.lower() == ek.ek(os.path.realpath, sqlShow['location']).lower():
                self._log_helper(
                    u'Found an episode that has already been moved to its show dir, skipping',
                    logger.ERROR)
                return False

        # Get the videofile list for the next checks
        all_files = []
        all_dirs = []
        for process_path, process_dir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir_name), topdown=False):
            all_dirs += process_dir
            all_files += fileList

        video_files = filter(helpers.isMediaFile, all_files)
        all_dirs.append(dir_name)

        # check if the directory have at least one tv video file
        for video in video_files:
            try:
                NameParser().parse(video, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException):
                pass

        for directory in all_dirs:
            try:
                NameParser().parse(directory, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException):
                pass

        if sickbeard.UNPACK:
            # Search for packed release
            packed_files = filter(helpers.isRarFile, all_files)

            for packed in packed_files:
                try:
                    NameParser().parse(packed, cache_result=False)
                    return True
                except (InvalidNameException, InvalidShowException):
                    pass

        return False
Esempio n. 29
0
    def findEpisode(self, episode, manualSearch=False):

        self._checkAuth()

        logger.log(u"Searching " + self.name + " for " + episode.prettyName())

        self.cache.updateCache()
        results = self.cache.searchCache(episode, manualSearch)
        logger.log(u"Cache results: " + str(results), logger.DEBUG)

        # if we got some results then use them no matter what.
        # OR
        # return anyway unless we're doing a manual search
        if results or not manualSearch:
            return results

        itemList = []

        for cur_search_string in self._get_episode_search_strings(episode):
            itemList += self._doSearch(cur_search_string, show=episode.show)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            # parse the file name
            try:
                myParser = NameParser()
                parse_result = myParser.parse(title)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + title +
                    " into a valid episode", logger.WARNING)
                continue

            language = self._get_language(title, item)

            if episode.show.air_by_date:
                if parse_result.air_date != episode.airdate:
                    logger.log(
                        "Episode " + title + " didn't air on " +
                        str(episode.airdate) + ", skipping it", logger.DEBUG)
                    continue
            elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
                logger.log(
                    "Episode " + title + " isn't " + str(episode.season) +
                    "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
                continue

            quality = self.getQuality(item)

            if not episode.show.wantEpisode(episode.season, episode.episode,
                                            quality, manualSearch):
                logger.log(
                    u"Ignoring result " + title +
                    " because we don't want an episode that is " +
                    Quality.qualityStrings[quality], logger.DEBUG)
                continue

            if not language == episode.show.audio_lang:
                logger.log(u"Ignoring result " + title +
                           " because the language: " +
                           showLanguages[language] +
                           " does not match the desired language: " +
                           showLanguages[episode.show.audio_lang])
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            result = self.getResult([episode])
            result.item = item
            if hasattr(item, 'getNZB'):
                result.extraInfo = [item.getNZB()]
            elif hasattr(item, 'extraInfo'):
                result.extraInfo = item.extraInfo
            result.url = url
            result.name = title
            result.quality = quality
            if hasattr(item, 'audio_langs'):
                result.audio_lang = ''.join(item.audio_langs)

            else:
                result.audio_lang = language
            results.append(result)

        return results
Esempio n. 30
0
    def _analyze_name(self, name, file=True):
        """
        Takes a name and tries to figure out a show, season, and episode from it.
        
        name: A string which we want to analyze to determine show info from (unicode)
        
        Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be []
        if none were found.
        """

        logger.log(u"Analyzing name " + repr(name))

        to_return = (None, None, [])

        if not name:
            return to_return

        trimprefix = [
            '^sof-', '^euhd-', '^amb-', '^itg-', '^idtv-', '^zzgtv-', '^itn-',
            '^tcpa-', '^tvp-'
        ]
        for regex in trimprefix:
            name = re.sub(regex, "", name)

        # parse the name to break it into show name, season, and episode
        np = NameParser(file)
        parse_result = np.parse(name)
        self._log(
            "Parsed " + name + " into " + str(parse_result).decode('utf-8'),
            logger.DEBUG)

        if parse_result.air_by_date:
            season = -1
            episodes = [parse_result.air_date]
        else:
            season = parse_result.season_number
            episodes = parse_result.episode_numbers

        to_return = (None, season, episodes)

        # do a scene reverse-lookup to get a list of all possible names
        name_list = show_name_helpers.sceneToNormalShowNames(
            parse_result.series_name)

        if not name_list:
            return (None, season, episodes)

        def _finalize(parse_result):
            self.release_group = parse_result.release_group
            if parse_result.extra_info:
                self.is_proper = re.search(
                    '(^|[\. _-])(proper|repack)([\. _-]|$)',
                    parse_result.extra_info, re.I) != None

        # for each possible interpretation of that scene name
        for cur_name in name_list:
            self._log(u"Checking scene exceptions for a match on " + cur_name,
                      logger.DEBUG)
            scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
            if scene_id:
                self._log(
                    u"Scene exception lookup got tvdb id " + str(scene_id) +
                    u", using that", logger.DEBUG)
                _finalize(parse_result)
                return (scene_id, season, episodes)

        # see if we can find the name directly in the DB, if so use it
        for cur_name in name_list:
            self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
            db_result = helpers.searchDBForShow(cur_name)
            if db_result:
                self._log(
                    u"Lookup successful, using tvdb id " + str(db_result[0]),
                    logger.DEBUG)
                _finalize(parse_result)
                return (int(db_result[0]), season, episodes)

        # see if we can find the name with a TVDB lookup
        for cur_name in name_list:
            try:
                t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                  **sickbeard.TVDB_API_PARMS)

                self._log(u"Looking up name " + cur_name + u" on TVDB",
                          logger.DEBUG)
                showObj = t[cur_name]
            except (tvdb_exceptions.tvdb_exception):
                # if none found, search on all languages
                try:
                    # There's gotta be a better way of doing this but we don't wanna
                    # change the language value elsewhere
                    ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                    ltvdb_api_parms['search_all_languages'] = True
                    t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI,
                                      **ltvdb_api_parms)

                    self._log(
                        u"Looking up name " + cur_name +
                        u" in all languages on TVDB", logger.DEBUG)
                    showObj = t[cur_name]
                except (tvdb_exceptions.tvdb_exception, IOError):
                    pass

                continue
            except (IOError):
                continue

            self._log(
                u"Lookup successful, using tvdb id " + str(showObj["id"]),
                logger.DEBUG)
            _finalize(parse_result)
            return (int(showObj["id"]), season, episodes)

        _finalize(parse_result)
        return to_return