Exemple #1
0
def logSuccess(release):
    release = prepareFailedName(release)

    myDB = db.DBConnection('failed.db')
    myDB.action("DELETE FROM history WHERE RELEASE=?", [release])
Exemple #2
0
def xem_refresh(indexer_id, indexer, force=False):
    """
    Refresh data from xem for a tv show
    
    @param indexer_id: int
    """
    if indexer_id is None:
        return

    indexer_id = int(indexer_id)
    indexer = int(indexer)

    # XEM API URL
    url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (
        indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])

    MAX_REFRESH_AGE_SECS = 86400  # 1 day

    myDB = db.DBConnection()
    rows = myDB.select(
        "SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
        [indexer, indexer_id])
    if rows:
        lastRefresh = int(rows[0]['last_refreshed'])
        refresh = int(time.mktime(datetime.datetime.today().timetuple())
                      ) > lastRefresh + MAX_REFRESH_AGE_SECS
    else:
        refresh = True

    if refresh or force:
        logger.log(
            u'Looking up XEM scene mapping using for show %s on %s' % (
                indexer_id,
                sickbeard.indexerApi(indexer).name,
            ), logger.DEBUG)

        # mark refreshed
        myDB.upsert(
            "xem_refresh", {
                'indexer':
                indexer,
                'last_refreshed':
                int(time.mktime(datetime.datetime.today().timetuple()))
            }, {'indexer_id': indexer_id})

        try:
            parsedJSON = sickbeard.helpers.getURL(url, json=True)
            if not parsedJSON or parsedJSON == '':
                logger.log(
                    u'No XEN data for show "%s on %s"' % (
                        indexer_id,
                        sickbeard.indexerApi(indexer).name,
                    ), logger.MESSAGE)
                return

            if 'success' in parsedJSON['result']:
                cl = []
                for entry in parsedJSON['data']:
                    if 'scene' in entry:
                        cl.append([
                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                            [
                                entry['scene']['season'],
                                entry['scene']['episode'],
                                entry['scene']['absolute'], indexer_id,
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['season'],
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['episode']
                            ]
                        ])
                    if 'scene_2' in entry:  # for doubles
                        cl.append([
                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                            [
                                entry['scene_2']['season'],
                                entry['scene_2']['episode'],
                                entry['scene_2']['absolute'], indexer_id,
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['season'],
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['episode']
                            ]
                        ])

                if len(cl) > 0:
                    myDB = db.DBConnection()
                    myDB.mass_action(cl)
            else:
                logger.log(
                    u"Empty lookup result - no XEM data for show %s on %s" % (
                        indexer_id,
                        sickbeard.indexerApi(indexer).name,
                    ), logger.DEBUG)
        except Exception, e:
            logger.log(
                u"Exception while refreshing XEM data for show " +
                str(indexer_id) + " on " + sickbeard.indexerApi(indexer).name +
                ": " + ex(e), logger.WARNING)
            logger.log(traceback.format_exc(), logger.DEBUG)
Exemple #3
0
    def _downloadPropers(self, properList):

        for curProper in properList:

            historyLimit = datetime.datetime.today() - datetime.timedelta(
                days=30)

            # make sure the episode has been downloaded before
            myDB = db.DBConnection()
            historyResults = myDB.select(
                "SELECT resource FROM history "
                "WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
                "AND action IN (" +
                ",".join([str(x) for x in Quality.SNATCHED]) + ")", [
                    curProper.tvdbid, curProper.season, curProper.episode,
                    curProper.quality,
                    historyLimit.strftime(history.dateFormat)
                ])

            # if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
            if len(historyResults) == 0:
                logger.log(
                    u"Unable to find an original history entry for proper " +
                    curProper.name + " so I'm not downloading it.")
                continue

            else:

                # make sure that none of the existing history downloads are the same proper we're trying to download
                isSame = False
                for curResult in historyResults:
                    # if the result exists in history already we need to skip it
                    if self._genericName(
                            curResult["resource"]) == self._genericName(
                                curProper.name):
                        isSame = True
                        break
                if isSame:
                    logger.log(
                        u"This proper is already in history, skipping it",
                        logger.DEBUG)
                    continue

                # get the episode object
                showObj = helpers.findCertainShow(sickbeard.showList,
                                                  curProper.tvdbid)
                if showObj == None:
                    logger.log(
                        u"Unable to find the show with tvdbid " +
                        str(curProper.tvdbid) +
                        " so unable to download the proper", logger.ERROR)
                    continue
                epObj = showObj.getEpisode(curProper.season, curProper.episode)

                # make the result object
                result = curProper.provider.getResult([epObj])
                result.url = curProper.url
                result.name = curProper.name
                result.quality = curProper.quality

                # snatch it
                downloadResult = search.snatchEpisode(result, SNATCHED_PROPER)

                return downloadResult
def makeSceneSeasonSearchString(show, segment, extraSearchType=None):

    myDB = db.DBConnection()

    if show.air_by_date:
        numseasons = 0

        # the search string for air by date shows is just
        seasonStrings = [segment]

    elif show.anime:
        """this part is from darkcube"""
        numseasons = 0
        episodeNumbersSQLResult = myDB.select(
            "SELECT absolute_number, status FROM tv_episodes WHERE showid = ? and season = ?",
            [show.tvdbid, segment])

        # get show qualities
        anyQualities, bestQualities = common.Quality.splitQuality(show.quality)

        # compile a list of all the episode numbers we need in this 'season'
        seasonStrings = []
        for episodeNumberResult in episodeNumbersSQLResult:

            # get quality of the episode
            curCompositeStatus = int(episodeNumberResult["status"])
            curStatus, curQuality = common.Quality.splitCompositeStatus(
                curCompositeStatus)

            if bestQualities:
                highestBestQuality = max(bestQualities)
            else:
                highestBestQuality = 0

            # if we need a better one then add it to the list of episodes to fetch
            if (curStatus in (common.DOWNLOADED, common.SNATCHED)
                    and curQuality < highestBestQuality
                ) or curStatus == common.WANTED:
                if isinstance(episodeNumberResult["absolute_number"], int):
                    ab_number = int(episodeNumberResult["absolute_number"])
                if ab_number > 0:
                    seasonStrings.append("%d" % ab_number)

    else:
        numseasonsSQlResult = myDB.select(
            "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
            [show.tvdbid])
        numseasons = int(numseasonsSQlResult[0][0])

        seasonStrings = ["S%02d" % segment]
        # since nzbmatrix allows more than one search per request we search SxEE results too
        if extraSearchType == "nzbmatrix":
            seasonStrings.append("%ix" % segment)

    bwl = BlackAndWhiteList(show.tvdbid)
    showNames = set(makeSceneShowSearchStrings(show))

    toReturn = []
    term_list = []

    # search each show name
    for curShow in showNames:
        # most providers all work the same way
        if not extraSearchType:
            # if there's only one season then we can just use the show name straight up
            if numseasons == 1:
                toReturn.append(curShow)
            # for providers that don't allow multiple searches in one request we only search for Sxx style stuff
            else:
                for cur_season in seasonStrings:
                    if len(bwl.whiteList) > 0:
                        for keyword in bwl.whiteList:
                            toReturn.append(keyword + '.' + curShow + "." +
                                            cur_season)
                    else:
                        toReturn.append(curShow + "." + cur_season)

        # nzbmatrix is special, we build a search string just for them
        elif extraSearchType == "nzbmatrix":
            if numseasons == 1:
                toReturn.append('"' + curShow + '"')
            elif numseasons == 0:
                if show.anime:
                    term_list = [
                        '(+"' + curShow + '"+"' + x + '")'
                        for x in seasonStrings
                    ]
                    toReturn.append('.'.join(term_list))
                else:
                    toReturn.append('"' + curShow + ' ' +
                                    str(segment).replace('-', ' ') + '"')
            else:
                term_list = [x + '*' for x in seasonStrings]
                if show.air_by_date:
                    term_list = ['"' + x + '"' for x in term_list]

                toReturn.append('"' + curShow + '"')

    if extraSearchType == "nzbmatrix":
        toReturn = ['+(' + ','.join(toReturn) + ')']
        if term_list:
            toReturn.append('+(' + ','.join(term_list) + ')')
    return toReturn
Exemple #5
0
    def _find_info(self):
        """
        For a given file try to find the showid, season, and episode.
        """

        tvdb_id = season = quality = None
        episodes = []

                        # try to look up the nzb in history
        attempt_list = [self._history_lookup,

                        # try to analyze the nzb name
                        lambda: self._analyze_name(self.nzb_name),

                        # try to analyze the file name
                        lambda: self._analyze_name(self.file_name),

                        # try to analyze the dir name
                        lambda: self._analyze_name(self.folder_name),

                        # try to analyze the file + dir names together
                        lambda: self._analyze_name(self.file_path),

                        # try to analyze the dir + file name together as one name
                        lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)

                        ]

        # attempt every possible method to get our info
        for cur_attempt in attempt_list:

            try:
                (cur_tvdb_id, cur_season, cur_episodes, cur_quality) = cur_attempt()
            except InvalidNameException, e:
                logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
                continue

            # if we already did a successful history lookup then keep that tvdb_id value
            if cur_tvdb_id and not (self.in_history and tvdb_id):
                tvdb_id = cur_tvdb_id

            if cur_quality and not (self.in_history and quality):
                quality = cur_quality

            if cur_season is not None:
                season = cur_season

            if cur_episodes:
                episodes = cur_episodes

            # for air-by-date shows we need to look up the season/episode from database
            if season == -1 and tvdb_id and episodes:
                self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", logger.DEBUG)
                airdate = episodes[0].toordinal()
                myDB = db.DBConnection()
                sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and airdate = ?", [tvdb_id, airdate])

                if sql_result:
                    season = int(sql_result[0][0])
                    episodes = [int(sql_result[0][1])]
                else:
                    self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(tvdb_id) + u", skipping", logger.DEBUG)
                    # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
                    episodes = []
                    continue

            # if there's no season then we can hopefully just use 1 automatically
            elif season == None and tvdb_id:
                myDB = db.DBConnection()
                numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [tvdb_id])
                if int(numseasonsSQlResult[0][0]) == 1 and season == None:
                    self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG)
                    season = 1

            if tvdb_id and season and episodes:
                return (tvdb_id, season, episodes, quality)
Exemple #6
0
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):

    if show.air_by_date or show.sports:
        numseasons = 0

        # the search string for air by date shows is just
        seasonStrings = [str(ep_obj.airdate).split('-')[0]]
    elif show.is_anime:
        numseasons = 0
        seasonEps = show.getAllEpisodes(ep_obj.season)

        # get show qualities
        anyQualities, bestQualities = common.Quality.splitQuality(show.quality)

        # compile a list of all the episode numbers we need in this 'season'
        seasonStrings = []
        for episode in seasonEps:

            # get quality of the episode
            curCompositeStatus = episode.status
            curStatus, curQuality = common.Quality.splitCompositeStatus(
                curCompositeStatus)

            if bestQualities:
                highestBestQuality = max(bestQualities)
            else:
                highestBestQuality = 0

            # if we need a better one then add it to the list of episodes to fetch
            if (curStatus in (common.DOWNLOADED, common.SNATCHED)
                    and curQuality < highestBestQuality
                ) or curStatus == common.WANTED:
                ab_number = episode.scene_absolute_number
                if ab_number > 0:
                    seasonStrings.append("%d" % ab_number)

    else:
        with db.DBConnection() as myDB:
            numseasonsSQlResult = myDB.select(
                "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
                [show.indexerid])

        numseasons = int(numseasonsSQlResult[0][0])
        seasonStrings = ["S%02d" % int(ep_obj.scene_season)]

    bwl = BlackAndWhiteList(show.indexerid)
    showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season))

    toReturn = []

    # search each show name
    for curShow in showNames:
        # most providers all work the same way
        if not extraSearchType:
            # if there's only one season then we can just use the show name straight up
            if numseasons == 1:
                toReturn.append(curShow)
            # for providers that don't allow multiple searches in one request we only search for Sxx style stuff
            else:
                for cur_season in seasonStrings:
                    if len(bwl.whiteList) > 0:
                        for keyword in bwl.whiteList:
                            toReturn.append(keyword + '.' + curShow + "." +
                                            cur_season)
                    else:
                        toReturn.append(curShow + "." + cur_season)

    return toReturn
Exemple #7
0
def searchProviders(show, episodes, manualSearch=False, downCurQuality=False):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
    """
    Walk providers for information on shows

    :param show: Show we are looking for
    :param episodes: Episodes we hope to find
    :param manualSearch: Boolean, is this a manual search?
    :param downCurQuality: Boolean, should we re-download currently available quality file
    :return: results for search
    """
    foundResults = {}
    finalResults = []

    didSearch = False

    # build name cache for show
    sickbeard.name_cache.buildNameCache(show)

    origThreadName = threading.currentThread().name

    providers = [
        x for x in sickbeard.providers.sortedProviderList(
            sickbeard.RANDOMIZE_PROVIDERS)
        if x.is_active() and x.enable_backlog
    ]
    for curProvider in providers:
        threading.currentThread(
        ).name = origThreadName + " :: [" + curProvider.name + "]"
        curProvider.cache.updateCache()

    threading.currentThread().name = origThreadName

    for curProvider in providers:
        threading.currentThread(
        ).name = origThreadName + " :: [" + curProvider.name + "]"

        if curProvider.anime_only and not show.is_anime:
            logger.log(u"" + str(show.name) + " is not an anime, skipping",
                       logger.DEBUG)
            continue

        foundResults[curProvider.name] = {}

        searchCount = 0
        search_mode = curProvider.search_mode

        # Always search for episode when manually searching when in sponly
        if search_mode == 'sponly' and manualSearch is True:
            search_mode = 'eponly'

        while True:
            searchCount += 1

            if search_mode == 'eponly':
                logger.log(u"Performing episode search for " + show.name)
            else:
                logger.log(u"Performing season pack search for " + show.name)

            try:
                searchResults = curProvider.find_search_results(
                    show, episodes, search_mode, manualSearch, downCurQuality)
            except AuthException as error:
                logger.log(u"Authentication error: {0!r}".format(error),
                           logger.ERROR)
                break
            except Exception as error:
                logger.log(
                    u"Exception while searching {0}. Error: {1!r}".format(
                        curProvider.name, error), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                break

            didSearch = True

            if len(searchResults):
                # make a list of all the results for this provider
                for curEp in searchResults:
                    if curEp in foundResults[curProvider.name]:
                        foundResults[
                            curProvider.name][curEp] += searchResults[curEp]
                    else:
                        foundResults[
                            curProvider.name][curEp] = searchResults[curEp]

                break
            elif not curProvider.search_fallback or searchCount == 2:
                break

            if search_mode == 'sponly':
                logger.log(u"Fallback episode search initiated", logger.DEBUG)
                search_mode = 'eponly'
            else:
                logger.log(u"Fallback season pack search initiate",
                           logger.DEBUG)
                search_mode = 'sponly'

        # skip to next provider if we have no results to process
        if not foundResults[curProvider.name]:
            continue

        # pick the best season NZB
        bestSeasonResult = None
        if SEASON_RESULT in foundResults[curProvider.name]:
            bestSeasonResult = pickBestResult(
                foundResults[curProvider.name][SEASON_RESULT], show)

        highest_quality_overall = 0
        for cur_episode in foundResults[curProvider.name]:
            for cur_result in foundResults[curProvider.name][cur_episode]:
                if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
                    highest_quality_overall = cur_result.quality
        logger.log(
            u"The highest quality of any match is " +
            Quality.qualityStrings[highest_quality_overall], logger.DEBUG)

        # see if every episode is wanted
        if bestSeasonResult:
            searchedSeasons = [str(x.season) for x in episodes]

            # get the quality of the season nzb
            seasonQual = bestSeasonResult.quality
            logger.log(
                u"The quality of the season " +
                bestSeasonResult.provider.provider_type + " is " +
                Quality.qualityStrings[seasonQual], logger.DEBUG)

            main_db_con = db.DBConnection()
            allEps = [
                int(x["episode"]) for x in main_db_con.select(
                    "SELECT episode FROM tv_episodes WHERE showid = ? AND ( season IN ( "
                    + ','.join(searchedSeasons) + " ) )", [show.indexerid])
            ]

            logger.log(
                u"Executed query: [SELECT episode FROM tv_episodes WHERE showid = {0} AND season in  {1}]"
                .format(show.indexerid, ','.join(searchedSeasons)))
            logger.log(u"Episode list: " + str(allEps), logger.DEBUG)

            allWanted = True
            anyWanted = False
            for curEpNum in allEps:
                for season in {x.season for x in episodes}:
                    if not show.wantEpisode(season, curEpNum, seasonQual,
                                            downCurQuality):
                        allWanted = False
                    else:
                        anyWanted = True

            # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
            if allWanted and bestSeasonResult.quality == highest_quality_overall:
                logger.log(
                    u"Every ep in this season is needed, downloading the whole "
                    + bestSeasonResult.provider.provider_type + " " +
                    bestSeasonResult.name)
                epObjs = []
                for curEpNum in allEps:
                    for season in {x.season for x in episodes}:
                        epObjs.append(show.getEpisode(season, curEpNum))
                bestSeasonResult.episodes = epObjs

                # Remove provider from thread name before return results
                threading.currentThread().name = origThreadName

                return [bestSeasonResult]

            elif not anyWanted:
                logger.log(
                    u"No eps from this season are wanted at this quality, ignoring the result of "
                    + bestSeasonResult.name, logger.DEBUG)

            else:

                if bestSeasonResult.provider.provider_type == GenericProvider.NZB:
                    logger.log(
                        u"Breaking apart the NZB and adding the individual ones to our results",
                        logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individualResults = nzbSplitter.split_result(
                        bestSeasonResult)
                    for curResult in individualResults:
                        if len(curResult.episodes) == 1:
                            epNum = curResult.episodes[0].episode
                        elif len(curResult.episodes) > 1:
                            epNum = MULTI_EP_RESULT

                        if epNum in foundResults[curProvider.name]:
                            foundResults[curProvider.name][epNum].append(
                                curResult)
                        else:
                            foundResults[curProvider.name][epNum] = [curResult]

                # If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
                    logger.log(
                        u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!"
                    )
                    epObjs = []
                    for curEpNum in allEps:
                        for season in {x.season for x in episodes}:
                            epObjs.append(show.getEpisode(season, curEpNum))
                    bestSeasonResult.episodes = epObjs

                    if MULTI_EP_RESULT in foundResults[curProvider.name]:
                        foundResults[curProvider.name][MULTI_EP_RESULT].append(
                            bestSeasonResult)
                    else:
                        foundResults[curProvider.name][MULTI_EP_RESULT] = [
                            bestSeasonResult
                        ]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multiResults = {}
        if MULTI_EP_RESULT in foundResults[curProvider.name]:
            for _multiResult in foundResults[
                    curProvider.name][MULTI_EP_RESULT]:

                logger.log(
                    u"Seeing if we want to bother with multi-episode result " +
                    _multiResult.name, logger.DEBUG)

                # Filter result by ignore/required/whitelist/blacklist/quality, etc
                multiResult = pickBestResult(_multiResult, show)
                if not multiResult:
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                neededEps = []
                notNeededEps = []
                for epObj in multiResult.episodes:
                    # if we have results for the episode
                    if epObj.episode in foundResults[curProvider.name] and len(
                            foundResults[curProvider.name][epObj.episode]) > 0:
                        notNeededEps.append(epObj.episode)
                    else:
                        neededEps.append(epObj.episode)

                logger.log(
                    u"Single-ep check result is neededEps: " + str(neededEps) +
                    ", notNeededEps: " + str(notNeededEps), logger.DEBUG)

                if not neededEps:
                    logger.log(
                        u"All of these episodes were covered by single episode results, ignoring this multi-episode result",
                        logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multiNeededEps = []
                multiNotNeededEps = []
                for epObj in multiResult.episodes:
                    if epObj.episode in multiResults:
                        multiNotNeededEps.append(epObj.episode)
                    else:
                        multiNeededEps.append(epObj.episode)

                logger.log(
                    u"Multi-ep check result is multiNeededEps: " +
                    str(multiNeededEps) + ", multiNotNeededEps: " +
                    str(multiNotNeededEps), logger.DEBUG)

                if not multiNeededEps:
                    logger.log(
                        u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
                        logger.DEBUG)
                    continue

                # don't bother with the single result if we're going to get it with a multi result
                for epObj in multiResult.episodes:
                    multiResults[epObj.episode] = multiResult
                    if epObj.episode in foundResults[curProvider.name]:
                        logger.log(
                            u"A needed multi-episode result overlaps with a single-episode result for ep #"
                            + str(epObj.episode) +
                            ", removing the single-episode results from the list",
                            logger.DEBUG)
                        del foundResults[curProvider.name][epObj.episode]

        # of all the single ep results narrow it down to the best one for each episode
        finalResults += set(multiResults.values())
        for curEp in foundResults[curProvider.name]:
            if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if not foundResults[curProvider.name][curEp]:
                continue

            # if all results were rejected move on to the next episode
            bestResult = pickBestResult(foundResults[curProvider.name][curEp],
                                        show)
            if not bestResult:
                continue

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(finalResults):
                for bestResultEp in bestResult.episodes:
                    if bestResultEp in result.episodes:
                        if result.quality < bestResult.quality:
                            finalResults.pop(i)
                        else:
                            found = True
            if not found:
                finalResults += [bestResult]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wantedEpCount = 0
        for wantedEp in episodes:
            for result in finalResults:
                if wantedEp in result.episodes and isFinalResult(result):
                    wantedEpCount += 1

        # make sure we search every provider for results unless we found everything we wanted
        if wantedEpCount == len(episodes):
            break

    if not didSearch:
        logger.log(
            u"No NZB/Torrent providers found or enabled in the sickrage config for backlog searches. Please check your settings.",
            logger.WARNING)

    # Remove provider from thread name before return results
    threading.currentThread().name = origThreadName
    return finalResults
    def run(self, force=False):
        """
        Runs the daily searcher, queuing selected episodes for search

        :param force: Force search
        """
        if self.amActive:
            return

        self.amActive = True

        logger.log(u"Searching for new released episodes ...")

        if not network_timezones.network_dict:
            network_timezones.update_network_dict()

        if network_timezones.network_dict:
            curDate = (datetime.date.today() +
                       datetime.timedelta(days=1)).toordinal()
        else:
            curDate = (datetime.date.today() +
                       datetime.timedelta(days=2)).toordinal()

        curTime = datetime.datetime.now(network_timezones.sb_timezone)

        main_db_con = db.DBConnection()
        sql_results = main_db_con.select(
            "SELECT showid, airdate, season, episode FROM tv_episodes WHERE status = ? AND (airdate <= ? and airdate > 1)",
            [common.UNAIRED, curDate])

        sql_l = []
        show = None

        for sqlEp in sql_results:
            try:
                if not show or int(sqlEp["showid"]) != show.indexerid:
                    show = Show.find(sickbeard.showList, int(sqlEp["showid"]))

                # for when there is orphaned series in the database but not loaded into our showlist
                if not show or show.paused:
                    continue

            except MultipleShowObjectsException:
                logger.log(u"ERROR: expected to find a single show matching " +
                           str(sqlEp['showid']))
                continue

            if show.airs and show.network:
                # This is how you assure it is always converted to local time
                air_time = network_timezones.parse_date_time(
                    sqlEp['airdate'], show.airs,
                    show.network).astimezone(network_timezones.sb_timezone)

                # filter out any episodes that haven't started airing yet,
                # but set them to the default status while they are airing
                # so they are snatched faster
                if air_time > curTime:
                    continue

            ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
            with ep.lock:
                if ep.season == 0:
                    logger.log(
                        u"New episode " + ep.prettyName() +
                        " airs today, setting status to SKIPPED because is a special season"
                    )
                    ep.status = common.SKIPPED
                else:
                    logger.log(
                        u"New episode %s airs today, setting to default episode status for this show: %s"
                        % (ep.prettyName(),
                           common.statusStrings[ep.show.default_ep_status]))
                    ep.status = ep.show.default_ep_status

                sql_l.append(ep.get_sql())

        if len(sql_l) > 0:
            main_db_con = db.DBConnection()
            main_db_con.mass_action(sql_l)
        else:
            logger.log(u"No new released episodes found ...")

        # queue episode for daily search
        dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem()
        sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)

        self.amActive = False
    def process(self):
        """
        Post-process a given file
        """

        self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")")

        if os.path.isdir(self.file_path):
            self._log(u"File " + self.file_path + " seems to be a directory")
            return False
        for ignore_file in self.IGNORED_FILESTRINGS:
            if ignore_file in self.file_path:
                self._log(u"File " + self.file_path + " is ignored type, skipping")
                return False
        # reset per-file stuff
        self.in_history = False

        # try to find the file info
        (tvdb_id, season, episodes) = self._find_info()

        # if we don't have it then give up
        if not tvdb_id or season == None or not episodes:
            return False

        # retrieve/create the corresponding TVEpisode objects
        ep_obj = self._get_ep_obj(tvdb_id, season, episodes)

        # get the quality of the episode we're processing
        new_ep_quality = self._get_quality(ep_obj)
        logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG)

        # see if this is a priority download (is it snatched, in history, or PROPER)
        priority_download = self._is_priority(ep_obj, new_ep_quality)
        self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)

        # set the status of the episodes
        for curEp in [ep_obj] + ep_obj.relatedEps:
            curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)

        # check for an existing file
        existing_file_status = self._checkForExistingFile(ep_obj.location)

        # if it's not priority then we don't want to replace smaller files in case it was a mistake
        if not priority_download:

            # if there's an existing file that we don't want to replace stop here
            if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME):
                self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG)
                return False
            elif existing_file_status == PostProcessor.EXISTS_SMALLER:
                self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG)
            elif existing_file_status != PostProcessor.DOESNT_EXIST:
                self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR)
                return False

        # if the file is priority then we're going to replace it even if it exists
        else:
            self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG)

        # delete the existing file (and company)
        for cur_ep in [ep_obj] + ep_obj.relatedEps:
            try:
                self._delete(cur_ep.location, associated_files=True)
                # clean up any left over folders
                if cur_ep.location:
                    helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location)
            except (OSError, IOError):
                raise exceptions.PostProcessingFailed("Unable to delete the existing files")

        # if the show directory doesn't exist then make it if allowed
        if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
            self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
            try:
                ek.ek(os.mkdir, ep_obj.show._location)
                # do the library update for synoindex
                notifiers.synoindex_notifier.addFolder(ep_obj.show._location)

            except (OSError, IOError):
                raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location)

            # get metadata for the show (but not episode because it hasn't been fully processed)
            ep_obj.show.writeMetadata(True)

        # update the ep info before we rename so the quality & release name go into the name properly
        for cur_ep in [ep_obj] + ep_obj.relatedEps:
            with cur_ep.lock:
                cur_release_name = None

                # use the best possible representation of the release name
                if self.good_results[self.NZB_NAME]:
                    cur_release_name = self.nzb_name
                    if cur_release_name.lower().endswith('.nzb'):
                        cur_release_name = cur_release_name.rpartition('.')[0]
                elif self.good_results[self.FOLDER_NAME]:
                    cur_release_name = self.folder_name
                elif self.good_results[self.FILE_NAME]:
                    cur_release_name = self.file_name
                    # take the extension off the filename, it's not needed
                    if '.' in self.file_name:
                        cur_release_name = self.file_name.rpartition('.')[0]

                if cur_release_name:
                    self._log("Found release name " + cur_release_name, logger.DEBUG)
                    cur_ep.release_name = cur_release_name
                else:
                    logger.log("good results: " + repr(self.good_results), logger.DEBUG)

                cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)

                cur_ep.saveToDB()

        # find the destination folder
        try:
            proper_path = ep_obj.proper_path()
            proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path)

            dest_path = ek.ek(os.path.dirname, proper_absolute_path)
        except exceptions.ShowDirNotFoundException:
            raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting")

        self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)

        # create any folders we need
        helpers.make_dirs(dest_path)

        # figure out the base name of the resulting episode file
        if sickbeard.RENAME_EPISODES:
            orig_extension = self.file_name.rpartition('.')[-1]
            new_base_name = ek.ek(os.path.basename, proper_path)
            new_file_name = new_base_name + '.' + orig_extension

        else:
            # if we're not renaming then there's no new base name, we'll just use the existing name
            new_base_name = None
            new_file_name = self.file_name

#        with open(self.file_path, 'rb') as fh:
#            m = hashlib.md5()
#            while True:
#                data = fh.read(8192)
#                if not data:
#                    break
#                m.update(data)
#            MD5 = m.hexdigest()
        m = hashlib.md5()
        m.update(self.file_path)
        MD5 = m.hexdigest()
       
        try:
            
            path,file=os.path.split(self.file_path)
            
            if sickbeard.TORRENT_DOWNLOAD_DIR in path and sickbeard.TORRENT_DOWNLOAD_DIR != "":
                #Action possible pour les torrent
                if sickbeard.PROCESS_METHOD == "copy":
                    self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
                elif sickbeard.PROCESS_METHOD == "move":
                    self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
                elif sickbeard.PROCESS_METHOD == "hardlink":
                    self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
                elif sickbeard.PROCESS_METHOD == "symlink":
                    self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
                else: 
                    logger.log(u"Unknown process method: " + str(sickbeard.PROCESS_METHOD), logger.ERROR)
                    raise exceptions.PostProcessingFailed("Unable to move the files to their new home") 
            else:
                #action pour le reste des fichier
                if sickbeard.KEEP_PROCESSED_DIR:
                    self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)
                else:
                    self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES)       
                
        except (OSError, IOError):
            raise exceptions.PostProcessingFailed("Unable to move the files to their new home")

        myDB = db.DBConnection()

        ## INSERT MD5 of file
        controlMD5 = {"episode_id" : int(ep_obj.tvdbid) }
        NewValMD5 = {"filename" : new_base_name ,
                     "md5" : MD5
                     }       
        myDB.upsert("processed_files", NewValMD5,  controlMD5)



        # put the new location in the database
        for cur_ep in [ep_obj] + ep_obj.relatedEps:
            with cur_ep.lock:
                cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
                cur_ep.saveToDB()

        # log it to history
        history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
        
        # download subtitles
        if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
            cur_ep.downloadSubtitles()

        # send notifications
        notifiers.notify_download(ep_obj.prettyName())

        # generate nfo/tbn
        ep_obj.createMetaFiles()
        ep_obj.saveToDB()

        # do the library update for XBMC
        notifiers.xbmc_notifier.update_library(ep_obj.show.name)

        # do the library update for Plex
        notifiers.plex_notifier.update_library()

        # do the library update for NMJ
        # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers)

        # do the library update for Synology Indexer
        notifiers.synoindex_notifier.addFile(ep_obj.location)

        # do the library update for pyTivo
        notifiers.pytivo_notifier.update_library(ep_obj)

        # do the library update for Trakt
        notifiers.trakt_notifier.update_library(ep_obj)

        # do the library update for BetaSeries
        notifiers.betaseries_notifier.update_library(ep_obj)
		
        self._run_extra_scripts(ep_obj)

        return True
Exemple #10
0
def validateDir(path, dirName, nzbNameOriginal, failed, result):
    """
    Check if directory is valid for processing

    :param path: Path to use
    :param dirName: Directory to check
    :param nzbNameOriginal: Original NZB name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    IGNORED_FOLDERS = ['.@__thumb', '@eaDir']
    folder_name = ek(os.path.basename, dirName)
    if folder_name in IGNORED_FOLDERS:
        return False

    result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    if folder_name.startswith('_FAILED_'):
        result.output += logHelper(
            u"The directory name indicates it failed to extract.",
            logger.DEBUG)
        failed = True
    elif folder_name.startswith('_UNDERSIZED_'):
        result.output += logHelper(
            u"The directory name indicates that it was previously rejected for being undersized.",
            logger.DEBUG)
        failed = True
    elif folder_name.upper().startswith('_UNPACK'):
        result.output += logHelper(
            u"The directory name indicates that this release is in the process of being unpacked.",
            logger.DEBUG)
        result.missedfiles.append(dirName + " : Being unpacked")
        return False

    if failed:
        process_failed(os.path.join(path, dirName), nzbNameOriginal, result)
        result.missedfiles.append(dirName + " : Failed download")
        return False

    if helpers.is_hidden_folder(os.path.join(path, dirName)):
        result.output += logHelper(u"Ignoring hidden folder: " + dirName,
                                   logger.DEBUG)
        result.missedfiles.append(dirName + " : Hidden folder")
        return False

    # make sure the dir isn't inside a show dir
    myDB = db.DBConnection()
    sqlResults = myDB.select("SELECT * FROM tv_shows")

    for sqlShow in sqlResults:
        if dirName.lower().startswith(
                ek(os.path.realpath, sqlShow["location"]).lower() +
                os.sep) or dirName.lower() == ek(os.path.realpath,
                                                 sqlShow["location"]).lower():
            result.output += logHelper(
                u"Cannot process an episode that's already been moved to its show dir, skipping "
                + dirName, logger.WARNING)
            return False

    # Get the videofile list for the next checks
    allFiles = []
    allDirs = []
    for processPath, processDir, fileList in ek(os.walk,
                                                ek(os.path.join, path,
                                                   dirName),
                                                topdown=False):
        allDirs += processDir
        allFiles += fileList

    videoFiles = filter(helpers.isMediaFile, allFiles)
    allDirs.append(dirName)

    #check if the dir have at least one tv video file
    for video in videoFiles:
        try:
            NameParser().parse(video, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    for dir in allDirs:
        try:
            NameParser().parse(dir, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException):
            pass

    if sickbeard.UNPACK:
        #Search for packed release
        packedFiles = filter(helpers.isRarFile, allFiles)

        for packed in packedFiles:
            try:
                NameParser().parse(packed, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException):
                pass

    result.output += logHelper(
        dirName + " : No processable items found in folder", logger.DEBUG)
    return False
Exemple #11
0
 def _get_season_segments(self, tvdb_id, fromDate):
     myDB = db.DBConnection()
     sqlResults = myDB.select(
         "SELECT DISTINCT(season) as season FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?",
         [tvdb_id, fromDate.toordinal()])
     return [int(x["season"]) for x in sqlResults]
Exemple #12
0
    def run(self):
        # TODO: Put that in the __init__ before starting the thread?
        if not sickbeard.USE_SUBTITLES:
            logger.log(u'Subtitles support disabled', logger.DEBUG)
            return
        if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
            logger.log(
                u'Not enough services selected. At least 1 service is required to search subtitles in the background',
                logger.ERROR)
            return

        logger.log(u'Checking for subtitles', logger.MESSAGE)

        # get episodes on which we want subtitles
        # criteria is:
        #  - show subtitles = 1
        #  - episode subtitles != config wanted languages or SINGLE (depends on config multi)
        #  - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
        #  - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d

        myDB = db.DBConnection()
        today = datetime.date.today().toordinal()
        # you have 5 minutes to understand that one. Good luck
        sqlResults = myDB.select(
            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.tvdb_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('
            + ','.join([str(x) for x in Quality.DOWNLOADED]) +
            ') OR (e.status IN (' + ','.join(
                [str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER]) +
            ') AND e.location != ""))',
            [today, wantedLanguages(True), today, today])
        if len(sqlResults) == 0:
            logger.log('No subtitles to download', logger.MESSAGE)
            return

        rules = self._getRules()
        now = datetime.datetime.now()
        for epToSub in sqlResults:
            if not ek.ek(os.path.isfile, epToSub['location']):
                logger.log(
                    'Episode file does not exist, cannot download subtitles for episode %dx%d of show %s'
                    % (epToSub['season'], epToSub['episode'],
                       epToSub['show_name']), logger.DEBUG)
                continue

            # Old shows rule
            if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2
                 and now - datetime.datetime.strptime(epToSub['lastsearch'],
                                                      '%Y-%m-%d %H:%M:%S') >
                 datetime.timedelta(hours=rules['old'][epToSub['searchcount']])
                 ) or
                    # Recent shows rule
                (epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7
                 and now - datetime.datetime.strptime(epToSub['lastsearch'],
                                                      '%Y-%m-%d %H:%M:%S') >
                 datetime.timedelta(hours=rules['new'][epToSub['searchcount']])
                 )):
                logger.log(
                    'Downloading subtitles for episode %dx%d of show %s' %
                    (epToSub['season'], epToSub['episode'],
                     epToSub['show_name']), logger.DEBUG)

                showObj = helpers.findCertainShow(sickbeard.showList,
                                                  int(epToSub['showid']))
                if not showObj:
                    logger.log(u'Show not found', logger.DEBUG)
                    return

                epObj = showObj.getEpisode(int(epToSub["season"]),
                                           int(epToSub["episode"]))
                if isinstance(epObj, str):
                    logger.log(u'Episode not found', logger.DEBUG)
                    return

                previous_subtitles = epObj.subtitles
Exemple #13
0
def setLastRefresh(list):
    myDB = db.DBConnection('cache.db')
    myDB.upsert("scene_exceptions_refresh", {
        'last_refreshed':
        int(time.mktime(datetime.datetime.today().timetuple()))
    }, {'list': list})
Exemple #14
0
def retrieve_exceptions():
    """
    Looks up the exceptions on github, parses them into a dict, and inserts them into the
    scene_exceptions table in cache.db. Also clears the scene name cache.
    """
    global exception_dict, anidb_exception_dict, xem_exception_dict

    # exceptions are stored on github pages
    for indexer in sickbeard.indexerApi().indexers:
        if shouldRefresh(sickbeard.indexerApi(indexer).name):
            logger.log(u"Checking for scene exception updates for " +
                       sickbeard.indexerApi(indexer).name + "")

            url = sickbeard.indexerApi(indexer).config['scene_url']

            url_data = helpers.getURL(url)
            if url_data is None:
                # When urlData is None, trouble connecting to github
                logger.log(
                    u"Check scene exceptions update failed. Unable to get URL: "
                    + url, logger.ERROR)
                continue

            else:
                setLastRefresh(sickbeard.indexerApi(indexer).name)

                # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
                for cur_line in url_data.splitlines():
                    cur_line = cur_line.decode('utf-8')
                    indexer_id, sep, aliases = cur_line.partition(
                        ':')  # @UnusedVariable

                    if not aliases:
                        continue

                    indexer_id = int(indexer_id)

                    # regex out the list of shows, taking \' into account
                    # alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                    alias_list = [{
                        re.sub(r'\\(.)', r'\1', x): -1
                    } for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                    exception_dict[indexer_id] = alias_list
                    del alias_list
                del url_data

    # XEM scene exceptions
    _xem_exceptions_fetcher()
    for xem_ex in xem_exception_dict:
        if xem_ex in exception_dict:
            exception_dict[
                xem_ex] = exception_dict[xem_ex] + xem_exception_dict[xem_ex]
        else:
            exception_dict[xem_ex] = xem_exception_dict[xem_ex]

    # AniDB scene exceptions
    _anidb_exceptions_fetcher()
    for anidb_ex in anidb_exception_dict:
        if anidb_ex in exception_dict:
            exception_dict[anidb_ex] = exception_dict[
                anidb_ex] + anidb_exception_dict[anidb_ex]
        else:
            exception_dict[anidb_ex] = anidb_exception_dict[anidb_ex]

    changed_exceptions = False

    # write all the exceptions we got off the net into the database
    myDB = db.DBConnection('cache.db')
    for cur_indexer_id in exception_dict:

        # get a list of the existing exceptions for this ID
        existing_exceptions = [
            x["show_name"] for x in myDB.select(
                "SELECT * FROM scene_exceptions WHERE indexer_id = ?",
                [cur_indexer_id])
        ]

        if not cur_indexer_id in exception_dict:
            continue

        for cur_exception_dict in exception_dict[cur_indexer_id]:
            cur_exception, curSeason = cur_exception_dict.items()[0]

            # if this exception isn't already in the DB then add it
            if cur_exception not in existing_exceptions:

                if not isinstance(cur_exception, unicode):
                    cur_exception = unicode(cur_exception, 'utf-8', 'replace')

                myDB.action(
                    "INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
                    [cur_indexer_id, cur_exception, curSeason])
                changed_exceptions = True

    # since this could invalidate the results of the cache we clear it out after updating
    if changed_exceptions:
        logger.log(u"Updated scene exceptions")
    else:
        logger.log(u"No scene exceptions update needed")

    # cleanup
    exception_dict.clear()
    anidb_exception_dict.clear()
    xem_exception_dict.clear()
Exemple #15
0
def del_mapping(indexer, indexerid):
    my_db = db.DBConnection()
    my_db.action(
        'DELETE' +
        ' FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?',
        [indexerid, indexer])
def retrieve_exceptions():
    """
    Looks up the exceptions on the show-api server, parses them into a dict, and inserts them into the
    scene_exceptions table in cache.db. Also clears the scene name cache.
    """
    global _dyn_cache

    exception_dict = {}

    # Moved the exceptions onto our show-api server (to allow for future source merging)
    url = 'http://show-api.tvtumbler.com/api/exceptions'

    logger.log(u"Check scene exceptions update")
    url_data = helpers.getURL(url)

    if url_data is None:
        logger.log(
            u"Check scene exceptions update failed. Unable to get URL: " + url,
            logger.ERROR)
        return
    else:
        exception_dict = json.loads(url_data)
        myDB = db.DBConnection("cache.db")
        changed_exceptions = False

        # write all the exceptions we got off the net into the database
        for cur_tvdb_id in exception_dict:

            # update our cache
            _dyn_cache[str(cur_tvdb_id)] = exception_dict[cur_tvdb_id]

            # get a list of the existing exceptions for this ID
            existing_exceptions = [
                x["show_name"] for x in myDB.select(
                    "SELECT * FROM scene_exceptions WHERE tvdb_id = ?",
                    [cur_tvdb_id])
            ]

            for cur_exception in exception_dict[cur_tvdb_id]:
                # if this exception isn't already in the DB then add it
                if cur_exception not in existing_exceptions:
                    logger.log(
                        u'Adding exception %s: %s' %
                        (cur_tvdb_id, cur_exception), logger.DEBUG)
                    myDB.action(
                        "INSERT INTO scene_exceptions (tvdb_id, show_name) VALUES (?,?)",
                        [cur_tvdb_id, cur_exception])
                    changed_exceptions = True

            # check for any exceptions which have been deleted
            for cur_exception in existing_exceptions:
                if cur_exception not in exception_dict[cur_tvdb_id]:
                    logger.log(
                        u'Removing exception %s: %s' %
                        (cur_tvdb_id, cur_exception), logger.DEBUG)
                    myDB.action(
                        "DELETE FROM scene_exceptions WHERE tvdb_id = ? AND show_name = ?",
                        [cur_tvdb_id, cur_exception])
                    changed_exceptions = True

        # since this could invalidate the results of the cache we clear it out after updating
        if changed_exceptions:
            logger.log(u"Updated scene exceptions")
            name_cache.clearCache()
        else:
            logger.log(u"No scene exceptions update needed")
Exemple #17
0
def initialize(consoleLogging=True):

    with INIT_LOCK:

        global LOG_DIR, WEB_PORT, WEB_LOG, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, \
                USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, \
                SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
                NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, currentSearchScheduler, backlogSearchScheduler, \
                USE_XBMC, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_UPDATE_FULL, \
                XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
                USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_UPDATE_LIBRARY, \
                PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \
                showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, showList, loadingShowList, \
                NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, \
                SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \
                QUALITY_DEFAULT, SEASON_FOLDERS_FORMAT, SEASON_FOLDERS_DEFAULT, STATUS_DEFAULT, \
                GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, \
                USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, NZBMATRIX, NZBMATRIX_USERNAME, \
                NZBMATRIX_APIKEY, versionCheckScheduler, VERSION_NOTIFY, PROCESS_AUTOMATICALLY, \
                KEEP_PROCESSED_DIR, TV_DOWNLOAD_DIR, TVDB_BASE_URL, MIN_SEARCH_FREQUENCY, \
                showQueueScheduler, searchQueueScheduler, ROOT_DIRS, \
                NAMING_SHOW_NAME, NAMING_EP_TYPE, NAMING_MULTI_EP_TYPE, CACHE_DIR, ACTUAL_CACHE_DIR, TVDB_API_PARMS, \
                RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
                NAMING_EP_NAME, NAMING_SEP_TYPE, NAMING_USE_PERIODS, WOMBLE, \
                NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, NAMING_QUALITY, providerList, newznabProviderList, providerPluginList, \
                NAMING_DATES, EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \
                USE_NOTIFO, NOTIFO_USERNAME, NOTIFO_APISECRET, NOTIFO_NOTIFY_ONDOWNLOAD, NOTIFO_NOTIFY_ONSNATCH, \
                USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_SYNOINDEX, \
                USE_BANNER, USE_LISTVIEW, METADATA_XBMC, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
                NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \
                COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS

        if __INITIALIZED__:
            return False

        socket.setdefaulttimeout(SOCKET_TIMEOUT)

        CheckSection('General')
        CheckSection('Blackhole')
        CheckSection('Newzbin')
        CheckSection('SABnzbd')
        CheckSection('NZBget')
        CheckSection('XBMC')
        CheckSection('PLEX')
        CheckSection('Growl')
        CheckSection('Prowl')
        CheckSection('Twitter')
        CheckSection('NMJ')
        CheckSection('Synology')

        LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs')
        if not helpers.makeDir(LOG_DIR):
            logger.log(u"!!! No log folder, logging to screen only!",
                       logger.ERROR)

        try:
            WEB_PORT = check_setting_int(CFG, 'General', 'web_port', 8081)
        except:
            WEB_PORT = 8081

        if WEB_PORT < 21 or WEB_PORT > 65535:
            WEB_PORT = 8081

        WEB_HOST = check_setting_str(CFG, 'General', 'web_host', '0.0.0.0')
        WEB_IPV6 = bool(check_setting_int(CFG, 'General', 'web_ipv6', 0))
        WEB_ROOT = check_setting_str(CFG, 'General', 'web_root',
                                     '').rstrip("/")
        WEB_LOG = bool(check_setting_int(CFG, 'General', 'web_log', 0))
        WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '')
        WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '')
        LAUNCH_BROWSER = bool(
            check_setting_int(CFG, 'General', 'launch_browser', 1))

        ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir',
                                             'cache')
        # fix bad configs due to buggy code
        if ACTUAL_CACHE_DIR == 'None':
            ACTUAL_CACHE_DIR = 'cache'

        # unless they specify, put the cache dir inside the data dir
        if not os.path.isabs(ACTUAL_CACHE_DIR):
            CACHE_DIR = os.path.join(DATA_DIR, ACTUAL_CACHE_DIR)
        else:
            CACHE_DIR = ACTUAL_CACHE_DIR

        if not helpers.makeDir(CACHE_DIR):
            logger.log(
                u"!!! Creating local cache dir failed, using system default",
                logger.ERROR)
            CACHE_DIR = None

        ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '')
        if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
            ROOT_DIRS = ''

        proxies = urllib.getproxies()
        proxy_url = None
        if 'http' in proxies:
            proxy_url = proxies['http']
        elif 'ftp' in proxies:
            proxy_url = proxies['ftp']

        # Set our common tvdb_api options here
        TVDB_API_PARMS = {
            'cache': True,
            'apikey': TVDB_API_KEY,
            'language': 'en',
            'cache_dir': os.path.join(CACHE_DIR, 'tvdb'),
            'http_proxy': proxy_url
        }

        if CACHE_DIR:
            TVDB_API_PARMS['cache_dir'] = os.path.join(CACHE_DIR, 'tvdb')

        QUALITY_DEFAULT = check_setting_int(CFG, 'General', 'quality_default',
                                            SD)
        STATUS_DEFAULT = check_setting_int(CFG, 'General', 'status_default',
                                           SKIPPED)
        VERSION_NOTIFY = check_setting_int(CFG, 'General', 'version_notify', 1)
        SEASON_FOLDERS_FORMAT = check_setting_str(CFG, 'General',
                                                  'season_folders_format',
                                                  'Season %02d')
        SEASON_FOLDERS_DEFAULT = bool(
            check_setting_int(CFG, 'General', 'season_folders_default', 0))

        PROVIDER_ORDER = check_setting_str(CFG, 'General', 'provider_order',
                                           '').split()

        NAMING_SHOW_NAME = bool(
            check_setting_int(CFG, 'General', 'naming_show_name', 1))
        NAMING_EP_NAME = bool(
            check_setting_int(CFG, 'General', 'naming_ep_name', 1))
        NAMING_EP_TYPE = check_setting_int(CFG, 'General', 'naming_ep_type', 0)
        NAMING_MULTI_EP_TYPE = check_setting_int(CFG, 'General',
                                                 'naming_multi_ep_type', 0)
        NAMING_SEP_TYPE = check_setting_int(CFG, 'General', 'naming_sep_type',
                                            0)
        NAMING_USE_PERIODS = bool(
            check_setting_int(CFG, 'General', 'naming_use_periods', 0))
        NAMING_QUALITY = bool(
            check_setting_int(CFG, 'General', 'naming_quality', 0))
        NAMING_DATES = bool(
            check_setting_int(CFG, 'General', 'naming_dates', 1))

        TVDB_BASE_URL = 'http://www.thetvdb.com/api/' + TVDB_API_KEY

        USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 1))
        USE_TORRENTS = bool(
            check_setting_int(CFG, 'General', 'use_torrents', 0))

        NZB_METHOD = check_setting_str(CFG, 'General', 'nzb_method',
                                       'blackhole')
        if NZB_METHOD not in ('blackhole', 'sabnzbd', 'nzbget'):
            NZB_METHOD = 'blackhole'

        DOWNLOAD_PROPERS = bool(
            check_setting_int(CFG, 'General', 'download_propers', 1))

        USENET_RETENTION = check_setting_int(CFG, 'General',
                                             'usenet_retention', 500)

        SEARCH_FREQUENCY = check_setting_int(CFG, 'General',
                                             'search_frequency',
                                             DEFAULT_SEARCH_FREQUENCY)
        if SEARCH_FREQUENCY < MIN_SEARCH_FREQUENCY:
            SEARCH_FREQUENCY = MIN_SEARCH_FREQUENCY

        NZB_DIR = check_setting_str(CFG, 'Blackhole', 'nzb_dir', '')
        TORRENT_DIR = check_setting_str(CFG, 'Blackhole', 'torrent_dir', '')

        TV_DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'tv_download_dir',
                                            '')
        PROCESS_AUTOMATICALLY = check_setting_int(CFG, 'General',
                                                  'process_automatically', 0)
        RENAME_EPISODES = check_setting_int(CFG, 'General', 'rename_episodes',
                                            1)
        KEEP_PROCESSED_DIR = check_setting_int(CFG, 'General',
                                               'keep_processed_dir', 1)
        MOVE_ASSOCIATED_FILES = check_setting_int(CFG, 'General',
                                                  'move_associated_files', 0)

        EZRSS = bool(check_setting_int(CFG, 'General', 'use_torrent', 0))
        if not EZRSS:
            EZRSS = bool(check_setting_int(CFG, 'EZRSS', 'ezrss', 0))

        TVTORRENTS = bool(check_setting_int(CFG, 'TVTORRENTS', 'tvtorrents',
                                            0))
        TVTORRENTS_DIGEST = check_setting_str(CFG, 'TVTORRENTS',
                                              'tvtorrents_digest', '')
        TVTORRENTS_HASH = check_setting_str(CFG, 'TVTORRENTS',
                                            'tvtorrents_hash', '')

        NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
        NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '')
        NZBS_HASH = check_setting_str(CFG, 'NZBs', 'nzbs_hash', '')

        NZBSRUS = bool(check_setting_int(CFG, 'NZBsRUS', 'nzbsrus', 0))
        NZBSRUS_UID = check_setting_str(CFG, 'NZBsRUS', 'nzbsrus_uid', '')
        NZBSRUS_HASH = check_setting_str(CFG, 'NZBsRUS', 'nzbsrus_hash', '')

        NZBMATRIX = bool(check_setting_int(CFG, 'NZBMatrix', 'nzbmatrix', 0))
        NZBMATRIX_USERNAME = check_setting_str(CFG, 'NZBMatrix',
                                               'nzbmatrix_username', '')
        NZBMATRIX_APIKEY = check_setting_str(CFG, 'NZBMatrix',
                                             'nzbmatrix_apikey', '')

        NEWZBIN = bool(check_setting_int(CFG, 'Newzbin', 'newzbin', 0))
        NEWZBIN_USERNAME = check_setting_str(CFG, 'Newzbin',
                                             'newzbin_username', '')
        NEWZBIN_PASSWORD = check_setting_str(CFG, 'Newzbin',
                                             'newzbin_password', '')

        WOMBLE = bool(check_setting_int(CFG, 'Womble', 'womble', 1))

        SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '')
        SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
        SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
        SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', 'tv')
        SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')

        NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password',
                                            'tegbzn6789')
        NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category',
                                            'tv')
        NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '')

        USE_XBMC = bool(check_setting_int(CFG, 'XBMC', 'use_xbmc', 0))
        XBMC_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'XBMC', 'xbmc_notify_onsnatch', 0))
        XBMC_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'XBMC', 'xbmc_notify_ondownload', 0))
        XBMC_UPDATE_LIBRARY = bool(
            check_setting_int(CFG, 'XBMC', 'xbmc_update_library', 0))
        XBMC_UPDATE_FULL = bool(
            check_setting_int(CFG, 'XBMC', 'xbmc_update_full', 0))
        XBMC_HOST = check_setting_str(CFG, 'XBMC', 'xbmc_host', '')
        XBMC_USERNAME = check_setting_str(CFG, 'XBMC', 'xbmc_username', '')
        XBMC_PASSWORD = check_setting_str(CFG, 'XBMC', 'xbmc_password', '')

        USE_PLEX = bool(check_setting_int(CFG, 'Plex', 'use_plex', 0))
        PLEX_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Plex', 'plex_notify_onsnatch', 0))
        PLEX_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Plex', 'plex_notify_ondownload', 0))
        PLEX_UPDATE_LIBRARY = bool(
            check_setting_int(CFG, 'Plex', 'plex_update_library', 0))
        PLEX_SERVER_HOST = check_setting_str(CFG, 'Plex', 'plex_server_host',
                                             '')
        PLEX_HOST = check_setting_str(CFG, 'Plex', 'plex_host', '')
        PLEX_USERNAME = check_setting_str(CFG, 'Plex', 'plex_username', '')
        PLEX_PASSWORD = check_setting_str(CFG, 'Plex', 'plex_password', '')

        USE_GROWL = bool(check_setting_int(CFG, 'Growl', 'use_growl', 0))
        GROWL_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Growl', 'growl_notify_onsnatch', 0))
        GROWL_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Growl', 'growl_notify_ondownload', 0))
        GROWL_HOST = check_setting_str(CFG, 'Growl', 'growl_host', '')
        GROWL_PASSWORD = check_setting_str(CFG, 'Growl', 'growl_password', '')

        USE_PROWL = bool(check_setting_int(CFG, 'Prowl', 'use_prowl', 0))
        PROWL_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Prowl', 'prowl_notify_onsnatch', 0))
        PROWL_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Prowl', 'prowl_notify_ondownload', 0))
        PROWL_API = check_setting_str(CFG, 'Prowl', 'prowl_api', '')
        PROWL_PRIORITY = check_setting_str(CFG, 'Prowl', 'prowl_priority', "0")

        USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
        TWITTER_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
        TWITTER_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0))
        TWITTER_USERNAME = check_setting_str(CFG, 'Twitter',
                                             'twitter_username', '')
        TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter',
                                             'twitter_password', '')
        TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix',
                                           'Sick Beard')

        USE_NOTIFO = bool(check_setting_int(CFG, 'Notifo', 'use_notifo', 0))
        NOTIFO_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Notifo', 'notifo_notify_onsnatch', 0))
        NOTIFO_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Notifo', 'notifo_notify_ondownload', 0))
        NOTIFO_USERNAME = check_setting_str(CFG, 'Notifo', 'notifo_username',
                                            '')
        NOTIFO_APISECRET = check_setting_str(CFG, 'Notifo', 'notifo_apisecret',
                                             '')

        USE_LIBNOTIFY = bool(
            check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
        LIBNOTIFY_NOTIFY_ONSNATCH = bool(
            check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch',
                              0))
        LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(
            check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload',
                              0))

        USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0))
        NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '')
        NMJ_DATABASE = check_setting_str(CFG, 'NMJ', 'nmj_database', '')
        NMJ_MOUNT = check_setting_str(CFG, 'NMJ', 'nmj_mount', '')

        USE_SYNOINDEX = bool(
            check_setting_int(CFG, 'Synology', 'use_synoindex', 0))

        GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')

        IGNORE_WORDS = check_setting_str(CFG, 'General', 'ignore_words',
                                         IGNORE_WORDS)

        EXTRA_SCRIPTS = [
            x for x in check_setting_str(CFG, 'General', 'extra_scripts',
                                         '').split('|') if x
        ]

        USE_BANNER = bool(check_setting_int(CFG, 'General', 'use_banner', 0))
        USE_LISTVIEW = bool(
            check_setting_int(CFG, 'General', 'use_listview', 0))
        METADATA_TYPE = check_setting_str(CFG, 'General', 'metadata_type', '')

        metadata_provider_dict = metadata.get_metadata_generator_dict()

        # if this exists it's legacy, use the info to upgrade metadata to the new settings
        if METADATA_TYPE:

            old_metadata_class = None

            if METADATA_TYPE == 'xbmc':
                old_metadata_class = metadata.xbmc.metadata_class
            elif METADATA_TYPE == 'mediabrowser':
                old_metadata_class = metadata.mediabrowser.metadata_class
            elif METADATA_TYPE == 'ps3':
                old_metadata_class = metadata.ps3.metadata_class

            if old_metadata_class:

                METADATA_SHOW = bool(
                    check_setting_int(CFG, 'General', 'metadata_show', 1))
                METADATA_EPISODE = bool(
                    check_setting_int(CFG, 'General', 'metadata_episode', 1))

                ART_POSTER = bool(
                    check_setting_int(CFG, 'General', 'art_poster', 1))
                ART_FANART = bool(
                    check_setting_int(CFG, 'General', 'art_fanart', 1))
                ART_THUMBNAILS = bool(
                    check_setting_int(CFG, 'General', 'art_thumbnails', 1))
                ART_SEASON_THUMBNAILS = bool(
                    check_setting_int(CFG, 'General', 'art_season_thumbnails',
                                      1))

                new_metadata_class = old_metadata_class(
                    METADATA_SHOW, METADATA_EPISODE, ART_POSTER, ART_FANART,
                    ART_THUMBNAILS, ART_SEASON_THUMBNAILS)

                metadata_provider_dict[
                    new_metadata_class.name] = new_metadata_class

        # this is the normal codepath for metadata config
        else:
            METADATA_XBMC = check_setting_str(CFG, 'General', 'metadata_xbmc',
                                              '0|0|0|0|0|0')
            METADATA_MEDIABROWSER = check_setting_str(CFG, 'General',
                                                      'metadata_mediabrowser',
                                                      '0|0|0|0|0|0')
            METADATA_PS3 = check_setting_str(CFG, 'General', 'metadata_ps3',
                                             '0|0|0|0|0|0')
            METADATA_WDTV = check_setting_str(CFG, 'General', 'metadata_wdtv',
                                              '0|0|0|0|0|0')
            METADATA_TIVO = check_setting_str(CFG, 'General', 'metadata_tivo',
                                              '0|0|0|0|0|0')

            for cur_metadata_tuple in [
                (METADATA_XBMC, metadata.xbmc),
                (METADATA_MEDIABROWSER, metadata.mediabrowser),
                (METADATA_PS3, metadata.ps3),
                (METADATA_WDTV, metadata.wdtv),
                (METADATA_TIVO, metadata.tivo),
            ]:

                (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
                tmp_provider = cur_metadata_class.metadata_class()
                tmp_provider.set_config(cur_metadata_config)
                metadata_provider_dict[tmp_provider.name] = tmp_provider

        COMING_EPS_LAYOUT = check_setting_str(CFG, 'GUI', 'coming_eps_layout',
                                              'banner')
        COMING_EPS_DISPLAY_PAUSED = bool(
            check_setting_int(CFG, 'GUI', 'coming_eps_display_paused', 0))
        COMING_EPS_SORT = check_setting_str(CFG, 'GUI', 'coming_eps_sort',
                                            'date')

        newznabData = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
        newznabProviderList = providers.getNewznabProviderList(newznabData)

        providerList = providers.makeProviderList()

        # load provider from plugin directory
        providerPluginList = providers.makePluginProviderList()
        for p in providerPluginList:
            p.Load()

        logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)

        # initialize the main SB database
        db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema)

        # initialize the cache database
        db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)

        # fix up any db problems
        db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)

        currentSearchScheduler = scheduler.Scheduler(
            searchCurrent.CurrentSearcher(),
            cycleTime=datetime.timedelta(minutes=SEARCH_FREQUENCY),
            threadName="SEARCH",
            runImmediately=True)

        # the interval for this is stored inside the ShowUpdater class
        showUpdaterInstance = showUpdater.ShowUpdater()
        showUpdateScheduler = scheduler.Scheduler(
            showUpdaterInstance,
            cycleTime=showUpdaterInstance.updateInterval,
            threadName="SHOWUPDATER",
            runImmediately=False)

        versionCheckScheduler = scheduler.Scheduler(
            versionChecker.CheckVersion(),
            cycleTime=datetime.timedelta(hours=12),
            threadName="CHECKVERSION",
            runImmediately=True)

        showQueueScheduler = scheduler.Scheduler(
            show_queue.ShowQueue(),
            cycleTime=datetime.timedelta(seconds=3),
            threadName="SHOWQUEUE",
            silent=True)

        searchQueueScheduler = scheduler.Scheduler(
            search_queue.SearchQueue(),
            cycleTime=datetime.timedelta(seconds=3),
            threadName="SEARCHQUEUE",
            silent=True)

        properFinderInstance = properFinder.ProperFinder()
        properFinderScheduler = scheduler.Scheduler(
            properFinderInstance,
            cycleTime=properFinderInstance.updateInterval,
            threadName="FINDPROPERS",
            runImmediately=False)

        autoPostProcesserScheduler = scheduler.Scheduler(
            autoPostProcesser.PostProcesser(),
            cycleTime=datetime.timedelta(minutes=10),
            threadName="POSTPROCESSER",
            runImmediately=True)

        backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(
            searchBacklog.BacklogSearcher(),
            cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
            threadName="BACKLOG",
            runImmediately=True)
        backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY

        showList = []
        loadingShowList = {}

        __INITIALIZED__ = True
        return True
Exemple #18
0
def validateDir(path, dirName, nzbNameOriginal, failed):
    global process_result, returnStr

    returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    if ek.ek(os.path.basename, dirName).startswith('_FAILED_'):
        returnStr += logHelper(
            u"The directory name indicates it failed to extract.",
            logger.DEBUG)
        failed = True
    elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
        returnStr += logHelper(
            u"The directory name indicates that it was previously rejected for being undersized.",
            logger.DEBUG)
        failed = True
    elif ek.ek(os.path.basename, dirName).upper().startswith('_UNPACK'):
        returnStr += logHelper(
            u"The directory name indicates that this release is in the process of being unpacked.",
            logger.DEBUG)
        return False

    if failed:
        process_failed(os.path.join(path, dirName), nzbNameOriginal)
        return False

    if helpers.is_hidden_folder(dirName):
        returnStr += logHelper(u"Ignoring hidden folder: " + dirName,
                               logger.DEBUG)
        return False

    # make sure the dir isn't inside a show dir
    with db.DBConnection() as myDB:
        sqlResults = myDB.select("SELECT * FROM tv_shows")

    for sqlShow in sqlResults:
        if dirName.lower().startswith(
                ek.ek(os.path.realpath, sqlShow["location"]).lower() +
                os.sep) or dirName.lower() == ek.ek(
                    os.path.realpath, sqlShow["location"]).lower():
            returnStr += logHelper(
                u"You're trying to post process an episode that's already been moved to its show dir, skipping",
                logger.ERROR)
            return False

    # Get the videofile list for the next checks
    allFiles = []
    allDirs = []
    for processPath, processDir, fileList in ek.ek(os.walk,
                                                   ek.ek(
                                                       os.path.join, path,
                                                       dirName),
                                                   topdown=False):
        allDirs += processDir
        allFiles += fileList

    videoFiles = filter(helpers.isMediaFile, allFiles)
    allDirs.append(dirName)

    #check if the dir have at least one tv video file
    for video in videoFiles:
        try:
            NameParser().parse(video, cache_result=False)
            return True
        except InvalidNameException:
            pass

    for dir in allDirs:
        try:
            NameParser().parse(dir, cache_result=False)
            return True
        except InvalidNameException:
            pass

    if sickbeard.UNPACK:
        #Search for packed release
        packedFiles = filter(helpers.isRarFile, allFiles)

        for packed in packedFiles:
            try:
                NameParser().parse(packed, cache_result=False)
                return True
            except InvalidNameException:
                pass

    return False
Exemple #19
0
    def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False):

        self._checkAuth()
        self.show = show

        results = {}
        searchItems = {}

        searched_scene_season = None
        for epObj in episodes:
            itemList = []

            if search_mode == 'sponly' and searched_scene_season:
                if searched_scene_season == epObj.scene_season:
                    continue

            # mark season searched for season pack searches so we can skip later on
            searched_scene_season = epObj.scene_season

            if search_mode == 'sponly':
                for curString in self._get_season_search_strings(epObj):
                    itemList += self._doSearch(curString, len(episodes))
            else:
                cacheResult = self.cache.searchCache([epObj], manualSearch)
                if len(cacheResult):
                    results.update({epObj.episode: cacheResult[epObj]})
                    continue

                for curString in self._get_episode_search_strings(epObj):
                    itemList += self._doSearch(curString, len(episodes))

            # next episode if no search results
            if not len(itemList):
                continue

            # remove duplicate items
            searchItems[epObj] = itemList

        #if we have cached results return them.
        if len(results):
            return results

        for ep_obj in searchItems:
            for item in searchItems[ep_obj]:

                (title, url) = self._get_title_and_url(item)

                # parse the file name
                try:
                    myParser = NameParser(False, showObj=show, epObj=ep_obj, convert=True)
                    parse_result = myParser.parse(title)
                except InvalidNameException:
                    logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
                    continue

                quality = parse_result.quality

                if not (self.show.air_by_date or self.show.sports):
                    if search_mode == 'sponly' and len(parse_result.episode_numbers):
                        logger.log(
                            u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
                            logger.DEBUG)
                        continue

                    if not len(parse_result.episode_numbers) and (
                                    parse_result.season_number != None and parse_result.season_number != ep_obj.season) or (
                                    parse_result.season_number == None and ep_obj.season != 1):
                        logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
                            ep_obj.season) + ", ignoring", logger.DEBUG)
                        continue
                    elif len(parse_result.episode_numbers) and (
                                    parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
                        logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
                            ep_obj.episode) + ", skipping it", logger.DEBUG)
                        continue

                    # we just use the existing info for normal searches
                    actual_season = ep_obj.season
                    actual_episodes = parse_result.episode_numbers
                else:
                    if not (parse_result.air_by_date or parse_result.sports):
                        logger.log(
                            u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
                            logger.DEBUG)
                        continue

                    if (parse_result.air_by_date and parse_result.air_date != ep_obj.airdate) or (
                                parse_result.sports and parse_result.sports_event_date != ep_obj.airdate):
                        logger.log("Episode " + title + " didn't air on " + str(ep_obj.airdate) + ", skipping it",
                                   logger.DEBUG)
                        continue

                    with db.DBConnection() as myDB:
                        sql_results = myDB.select(
                            "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
                            [show.indexerid,
                             parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])

                    if len(sql_results) != 1:
                        logger.log(
                            u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
                            logger.WARNING)
                        continue

                    actual_season = int(sql_results[0]["season"])
                    actual_episodes = [int(sql_results[0]["episode"])]

                # make sure we want the episode
                wantEp = True
                for epNo in actual_episodes:
                    if not show.wantEpisode(actual_season, epNo, quality, manualSearch):
                        wantEp = False
                        break

                if not wantEp:
                    logger.log(
                        u"Ignoring result " + title + " because we don't want an episode that is " +
                        Quality.qualityStrings[
                            quality], logger.DEBUG)

                    continue

                logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

                # make a result object
                epObj = []
                for curEp in actual_episodes:
                    epObj.append(show.getEpisode(actual_season, curEp))

                result = self.getResult(epObj)
                result.url = url
                result.name = title
                result.quality = quality
                result.provider = self
                result.content = None

                if len(epObj) == 1:
                    epNum = epObj[0].episode
                    logger.log(u"Single episode result.", logger.DEBUG)
                elif len(epObj) > 1:
                    epNum = MULTI_EP_RESULT
                    logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
                        parse_result.episode_numbers), logger.DEBUG)
                elif len(epObj) == 0:
                    epNum = SEASON_RESULT
                    logger.log(u"Separating full season result to check for later", logger.DEBUG)

                if not result:
                    continue

                if epNum in results:
                    results[epNum].append(result)
                else:
                    results[epNum] = [result]

        return results
Exemple #20
0
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    use_quality_list = None
    if any([episodes]):
        old_status = old_status or failed_history.find_old_status(episodes[0]) or episodes[0].status
        if old_status:
            status, quality = Quality.splitCompositeStatus(old_status)
            use_quality_list = (status not in (
                common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))

    provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
                     (not torrent_only or x.providerType == GenericProvider.TORRENT) and
                     (not scheduled or x.enable_scheduled_backlog)]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = getattr(cur_provider, 'search_mode', 'eponly')

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search,
                                                                  try_other_searches=try_other_searches)
                if any(search_results):
                    logger.log(', '.join(['%s %s candidate%s' % (
                        len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode],
                        helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()]))
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.ERROR)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda ep_item: show_name_helpers.pass_wordlist_checks(
                            ep_item.name, parse=False, indexer_lookup=False) and
                                        ep_item.show == show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[cur_ep]

                break
            elif not getattr(cur_provider, 'search_fallback', False) or 2 == search_count:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
                                                  any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall],
                   logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual],
                                                                best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT season, episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [(int(x['season']), int(x['episode'])) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                if not show.wantEpisode(ep_num[0], ep_num[1], season_qual):
                    all_wanted = False
                else:
                    any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and
            # be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
                           (best_season_result.provider.providerType, best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
                           best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(
                            r.name, parse=False, indexer_lookup=False) and r.show == show, individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
                    logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
                               u'the episodes that you do not want to "don\'t download"')
                    ep_objs = []
                    for ep_num in ep_nums:
                        ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [best_season_result]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
                                                                                multi_result.provider.name):
                    logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
                           (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(u'All of these episodes were covered by single episode results, ' +
                               'ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(u'Multi episode check result is... multi needed episodes: ' +
                           '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(u'All of these episodes were covered by another multi episode nzb, ' +
                               'ignoring this multi episode result',
                               logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' +
                                   '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
                                           filter_rls=orig_thread_name)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType:
                if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
                    best_result.url = best_result.get_data_func(best_result.url)
                    best_result.get_data_func = None  # consume only once
                    if not best_result.url:
                        continue
                if best_result.url.startswith('magnet'):
                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = None
                else:
                    cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers.get_system_temp_dir(),
                                       '%s.torrent' % (helpers.sanitizeFileName(best_result.name)))
                    if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session):
                        continue

                    try:
                        with open(cache_file, 'rb') as fh:
                            td = fh.read()
                        setattr(best_result, 'cache_file', cache_file)
                    except (StandardError, Exception):
                        continue

                    if getattr(best_result.provider, 'chk_td', None):
                        name = None
                        try:
                            hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
                            x, v = len(hdr[0]), int(hdr[1])
                            while x < len(td):
                                y = x + v
                                name = 'name' == td[x: y]
                                w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0]
                                x, v = y + len(w[0]), int(w[1])
                                if name:
                                    name = td[x: x + v]
                                    break
                        except (StandardError, Exception):
                            continue
                        if name:
                            if not pass_show_wordlist_checks(name, show):
                                continue
                            if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False):
                                logger.log('Ignored: %s (debug log has detail)' % name)
                                continue
                            best_result.name = name

                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = td

                if None is not best_result.after_get_data_func:
                    best_result.after_get_data_func(best_result)
                    best_result.after_get_data_func = None  # consume only once

            # add result if its not a duplicate
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not len(provider_list):
        logger.log('No NZB/Torrent providers in Media Providers/Options are allowed for active searching', logger.WARNING)
    elif not search_done:
        logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % (
            len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR)
    elif not any(final_results):
        logger.log('No suitable candidates')

    return final_results
Exemple #21
0
def snatchEpisode(result, endStatus=SNATCHED):  # pylint: disable=too-many-branches, too-many-statements
    """
    Contains the internal logic necessary to actually "snatch" a result that
    has been found.

    :param result: SearchResult instance to be snatched.
    :param endStatus: the episode status that should be used for the episode object once it's snatched.
    :return: boolean, True on success
    """

    if result is None:
        return False

    result.priority = 0  # -1 = low, 0 = normal, 1 = high
    if sickbeard.ALLOW_HIGH_PRIORITY:
        # if it aired recently make it high priority
        for curEp in result.episodes:
            if datetime.date.today() - curEp.airdate <= datetime.timedelta(
                    days=7):
                result.priority = 1
    if re.search(r'(^|[\. _-])(proper|repack)([\. _-]|$)', result.name,
                 re.I) is not None:
        endStatus = SNATCHED_PROPER

    if result.url.startswith('magnet') or result.url.endswith('torrent'):
        result.resultType = 'torrent'

    # NZBs can be sent straight to SAB or saved to disk
    if result.resultType in ("nzb", "nzbdata"):
        if sickbeard.NZB_METHOD == "blackhole":
            dlResult = _downloadResult(result)
        elif sickbeard.NZB_METHOD == "sabnzbd":
            dlResult = sab.sendNZB(result)
        elif sickbeard.NZB_METHOD == "nzbget":
            is_proper = True if endStatus == SNATCHED_PROPER else False
            dlResult = nzbget.sendNZB(result, is_proper)
        elif sickbeard.NZB_METHOD == "download_station":
            client = clients.getClientIstance(sickbeard.NZB_METHOD)(
                sickbeard.SYNOLOGY_DSM_HOST, sickbeard.SYNOLOGY_DSM_USERNAME,
                sickbeard.SYNOLOGY_DSM_PASSWORD)
            dlResult = client.sendNZB(result)
        else:
            logger.log(
                u"Unknown NZB action specified in config: " +
                sickbeard.NZB_METHOD, logger.ERROR)
            dlResult = False

    # Torrents can be sent to clients or saved to disk
    elif result.resultType == "torrent":
        # torrents are saved to disk when blackhole mode
        if sickbeard.TORRENT_METHOD == "blackhole":
            dlResult = _downloadResult(result)
        else:
            if not result.content and not result.url.startswith('magnet'):
                if result.provider.login():
                    result.content = result.provider.get_url(result.url,
                                                             returns='content')

            if result.content or result.url.startswith('magnet'):
                client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
                dlResult = client.sendTORRENT(result)
            else:
                logger.log(u"Torrent file content is empty", logger.WARNING)
                dlResult = False
    else:
        logger.log(
            u"Unknown result type, unable to download it ({0!r})".format(
                result.resultType), logger.ERROR)
        dlResult = False

    if not dlResult:
        return False

    if sickbeard.USE_FAILED_DOWNLOADS:
        failed_history.logSnatch(result)

    ui.notifications.message('Episode snatched', result.name)

    history.logSnatch(result)

    # don't notify when we re-download an episode
    sql_l = []
    trakt_data = []
    for curEpObj in result.episodes:
        with curEpObj.lock:
            if isFirstBestMatch(result):
                curEpObj.status = Quality.compositeStatus(
                    SNATCHED_BEST, result.quality)
            else:
                curEpObj.status = Quality.compositeStatus(
                    endStatus, result.quality)

            sql_l.append(curEpObj.get_sql())

        if curEpObj.status not in Quality.DOWNLOADED:
            try:
                notifiers.notify_snatch("{0} from {1}".format(
                    curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'),
                    result.provider.name))  # pylint: disable=protected-access
            except Exception:
                # Without this, when notification fail, it crashes the snatch thread and SR will
                # keep snatching until notification is sent
                logger.log(u"Failed to send snatch notification", logger.DEBUG)

            trakt_data.append((curEpObj.season, curEpObj.episode))

    data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data)

    if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST:
        logger.log(
            u"Add episodes, showid: indexerid " + str(result.show.indexerid) +
            ", Title " + str(result.show.name) + " to Traktv Watchlist",
            logger.DEBUG)
        if data:
            notifiers.trakt_notifier.update_watchlist(result.show,
                                                      data_episode=data,
                                                      update="add")

    if sql_l:
        main_db_con = db.DBConnection()
        main_db_con.mass_action(sql_l)

    return True
Exemple #22
0
def snatch_episode(result, end_status=SNATCHED):
    """
    Contains the internal logic necessary to actually "snatch" a result that
    has been found.

    Returns a bool representing success.

    result: SearchResult instance to be snatched.
    endStatus: the episode status that should be used for the episode object once it's snatched.
    """

    if None is result:
        return False

    result.priority = 0  # -1 = low, 0 = normal, 1 = high
    if sickbeard.ALLOW_HIGH_PRIORITY:
        # if it aired recently make it high priority
        for cur_ep in result.episodes:
            if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
                result.priority = 1
    if 0 < result.properlevel:
        end_status = SNATCHED_PROPER

    # NZBs can be sent straight to SAB or saved to disk
    if result.resultType in ('nzb', 'nzbdata'):
        if 'blackhole' == sickbeard.NZB_METHOD:
            dl_result = _download_result(result)
        elif 'sabnzbd' == sickbeard.NZB_METHOD:
            dl_result = sab.send_nzb(result)
        elif 'nzbget' == sickbeard.NZB_METHOD:
            dl_result = nzbget.send_nzb(result)
        else:
            logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR)
            dl_result = False

    # TORRENTs can be sent to clients or saved to disk
    elif 'torrent' == result.resultType:
        if not result.url.startswith('magnet') and None is not result.get_data_func:
            result.url = result.get_data_func(result.url)
            result.get_data_func = None  # consume only once
            if not result.url:
                return False
        if not result.content and result.url.startswith('magnet-'):
            if sickbeard.TORRENT_DIR:
                filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt')
                try:
                    with open(filepath, 'a') as fh:
                        result.url = result.url[7:]
                        fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR))
                    dl_result = True
                except IOError:
                    logger.log(u'Failed to write to %s' % filepath, logger.ERROR)
                    return False
            else:
                logger.log(u'Need to set a torrent blackhole folder', logger.ERROR)
                return False
        # torrents are saved to disk when blackhole mode
        elif 'blackhole' == sickbeard.TORRENT_METHOD:
            dl_result = _download_result(result)
        else:
            # make sure we have the torrent file content
            if not result.content and not result.url.startswith('magnet'):
                result.content = result.provider.get_url(result.url)
                if result.provider.should_skip() or not result.content:
                    logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
                    return False
            # Snatches torrent with client
            client = clients.get_client_instance(sickbeard.TORRENT_METHOD)()
            dl_result = client.send_torrent(result)

            if getattr(result, 'cache_file', None):
                helpers.remove_file_failed(result.cache_file)
    else:
        logger.log(u'Unknown result type, unable to download it', logger.ERROR)
        dl_result = False

    if not dl_result:
        return False

    if sickbeard.USE_FAILED_DOWNLOADS:
        failed_history.add_snatched(result)

    ui.notifications.message(u'Episode snatched', result.name)

    history.log_snatch(result)

    # don't notify when we re-download an episode
    sql_l = []
    update_imdb_data = True
    for cur_ep_obj in result.episodes:
        with cur_ep_obj.lock:
            if is_first_best_match(cur_ep_obj.status, result):
                cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
            else:
                cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)

            item = cur_ep_obj.get_sql()
            if None is not item:
                sql_l.append(item)

        if cur_ep_obj.status not in Quality.DOWNLOADED:
            notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))

            update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info()

    if 0 < len(sql_l):
        my_db = db.DBConnection()
        my_db.mass_action(sql_l)

    return True
Exemple #23
0
    def findSeasonResults(self, show, season):

        itemList = []
        results = {}

        for cur_string in self._get_season_search_strings(show, season):
            itemList += self._doSearch(cur_string)

        for item in itemList:

            (title, url) = self._get_title_and_url(item)

            quality = self.getQuality(item)

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(title)
            except InvalidNameException:
                logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
                continue

            if not show.air_by_date:
                # this check is meaningless for non-season searches
                if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
                    logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring")
                    continue

                # we just use the existing info for normal searches
                actual_season = season
                actual_episodes = parse_result.episode_numbers

            else:
                if not parse_result.air_by_date:
                    logger.log(u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG)
                    continue

                myDB = db.DBConnection()
                sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.tvdbid, parse_result.air_date.toordinal()])

                if len(sql_results) != 1:
                    logger.log(u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", logger.WARNING)
                    continue

                actual_season = int(sql_results[0]["season"])
                actual_episodes = [int(sql_results[0]["episode"])]

            # make sure we want the episode
            wantEp = True
            for epNo in actual_episodes:
                if not show.wantEpisode(actual_season, epNo, quality):
                    wantEp = False
                    break

            if not wantEp:
                logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
                continue

            logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

            # make a result object
            epObj = []
            for curEp in actual_episodes:
                epObj.append(show.getEpisode(actual_season, curEp))

            result = self.getResult(epObj)
            result.url = url
            result.name = title
            result.quality = quality

            if len(epObj) == 1:
                epNum = epObj[0].episode
            elif len(epObj) > 1:
                epNum = MULTI_EP_RESULT
                logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG)
            elif len(epObj) == 0:
                epNum = SEASON_RESULT
                result.extraInfo = [show]
                logger.log(u"Separating full season result to check for later", logger.DEBUG)

            if epNum in results:
                results[epNum].append(result)
            else:
                results[epNum] = [result]

        return results
Exemple #24
0
    def run(self, force=False):  # pylint: disable=too-many-branches, too-many-statements, too-many-locals
        if not sickbeard.USE_SUBTITLES:
            return

        if not sickbeard.subtitles.enabled_service_list():
            logger.log(
                u'Not enough services selected. At least 1 service is required to '
                'search subtitles in the background', logger.WARNING)
            return

        self.amActive = True

        def dhm(td):
            days = td.days
            hours = td.seconds // 60**2
            minutes = (td.seconds // 60) % 60
            ret = (u'', '{0} days, '.format(days))[days > 0] + \
                (u'', '{0} hours, '.format(hours))[hours > 0] + \
                (u'', '{0} minutes'.format(minutes))[minutes > 0]
            if days == 1:
                ret = ret.replace('days', 'day')
            if hours == 1:
                ret = ret.replace('hours', 'hour')
            if minutes == 1:
                ret = ret.replace('minutes', 'minute')
            return ret.rstrip(', ')

        if sickbeard.SUBTITLES_DOWNLOAD_IN_PP:
            self.subtitles_download_in_pp()

        logger.log(u'Checking for missed subtitles', logger.INFO)

        database = db.DBConnection()
        sql_results = database.select(
            "SELECT s.show_name, e.showid, e.season, e.episode, "
            "e.status, e.subtitles, e.subtitles_searchcount AS searchcount, "
            "e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) as age "
            "FROM tv_episodes AS e INNER JOIN tv_shows AS s "
            "ON (e.showid = s.indexer_id) "
            "WHERE s.subtitles = 1 AND e.subtitles NOT LIKE ? "
            "AND e.location != '' AND e.status IN (%s) ORDER BY age ASC" %
            ','.join(['?'] * len(Quality.DOWNLOADED)),
            [datetime.datetime.now().toordinal(),
             wanted_languages(True)] + Quality.DOWNLOADED)

        if not sql_results:
            logger.log(u'No subtitles to download', logger.INFO)
            self.amActive = False
            return

        for ep_to_sub in sql_results:
            try:
                # Encode path to system encoding.
                subtitle_path = ep_to_sub['location'].encode(
                    sickbeard.SYS_ENCODING)
            except UnicodeEncodeError:
                # Fallback to UTF-8.
                subtitle_path = ep_to_sub['location'].encode('utf-8')
            if not os.path.isfile(subtitle_path):
                logger.log(
                    u'Episode file does not exist, cannot download subtitles for {0} {1}'
                    .format(
                        ep_to_sub['show_name'],
                        episode_num(ep_to_sub['season'], ep_to_sub['episode'])
                        or episode_num(ep_to_sub['season'],
                                       ep_to_sub['episode'],
                                       numbering='absolute')), logger.DEBUG)
                continue

            if not needs_subtitles(ep_to_sub['subtitles']):
                logger.log(
                    u'Episode already has all needed subtitles, skipping {0} {1}'
                    .format(
                        ep_to_sub['show_name'],
                        episode_num(ep_to_sub['season'], ep_to_sub['episode'])
                        or episode_num(ep_to_sub['season'],
                                       ep_to_sub['episode'],
                                       numbering='absolute')), logger.DEBUG)
                continue

            try:
                lastsearched = datetime.datetime.strptime(
                    ep_to_sub['lastsearch'], dateTimeFormat)
            except ValueError:
                lastsearched = datetime.datetime.min

            try:
                if not force:
                    now = datetime.datetime.now()
                    days = int(ep_to_sub['age'])
                    delay_time = datetime.timedelta(
                        hours=8 if days < 10 else 7 * 24 if days < 30 else 30 *
                        24)

                    # Search every hour for the first 24 hours since aired, then every 8 hours until 10 days passes
                    # After 10 days, search every 7 days, after 30 days search once a month
                    # Will always try an episode regardless of age at least 2 times
                    if lastsearched + delay_time > now and int(
                            ep_to_sub['searchcount']) > 2 and days:
                        logger.log(
                            u'Subtitle search for {0} {1} delayed for {2}'.
                            format(
                                ep_to_sub['show_name'],
                                episode_num(ep_to_sub['season'],
                                            ep_to_sub['episode'])
                                or episode_num(ep_to_sub['season'],
                                               ep_to_sub['episode'],
                                               numbering='absolute'),
                                dhm(lastsearched + delay_time - now)),
                            logger.DEBUG)
                        continue

                logger.log(
                    u'Searching for missing subtitles of {0} {1}'.format(
                        ep_to_sub['show_name'],
                        episode_num(ep_to_sub['season'], ep_to_sub['episode'])
                        or episode_num(ep_to_sub['season'],
                                       ep_to_sub['episode'],
                                       numbering='absolute')), logger.INFO)

                show_object = Show.find(sickbeard.showList,
                                        int(ep_to_sub['showid']))
                if not show_object:
                    logger.log(
                        u'Show with ID {0} not found in the database'.format(
                            ep_to_sub['showid']), logger.DEBUG)
                    continue

                episode_object = show_object.getEpisode(
                    ep_to_sub['season'], ep_to_sub['episode'])
                if isinstance(episode_object, str):
                    logger.log(
                        u'{0} {1} not found in the database'.format(
                            ep_to_sub['show_name'],
                            episode_num(ep_to_sub['season'],
                                        ep_to_sub['episode'])
                            or episode_num(ep_to_sub['season'],
                                           ep_to_sub['episode'],
                                           numbering='absolute')),
                        logger.DEBUG)
                    continue

                try:
                    new_subtitles = episode_object.download_subtitles()
                except Exception as error:
                    logger.log(
                        u'Unable to find subtitles for {0} {1}. Error: {2}'.
                        format(
                            ep_to_sub['show_name'],
                            episode_num(ep_to_sub['season'],
                                        ep_to_sub['episode'])
                            or episode_num(ep_to_sub['season'],
                                           ep_to_sub['episode'],
                                           numbering='absolute'), ex(error)),
                        logger.ERROR)
                    continue

                if new_subtitles:
                    logger.log(u'Downloaded {0} subtitles for {1} {2}'.format(
                        ', '.join(new_subtitles), ep_to_sub['show_name'],
                        episode_num(ep_to_sub['season'], ep_to_sub['episode'])
                        or episode_num(ep_to_sub['season'],
                                       ep_to_sub['episode'],
                                       numbering='absolute')))

            except Exception as error:
                logger.log(
                    u'Error while searching subtitles for {0} {1}. Error: {2}'.
                    format(
                        ep_to_sub['show_name'],
                        episode_num(ep_to_sub['season'], ep_to_sub['episode'])
                        or episode_num(ep_to_sub['season'],
                                       ep_to_sub['episode'],
                                       numbering='absolute'), ex(error)),
                    logger.ERROR)
                continue

        logger.log(u'Finished checking for missed subtitles', logger.INFO)
        self.amActive = False
Exemple #25
0
                logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        try:
            self.show.saveToDB()
        except Exception, e:
            logger.log(u"Error saving the episode to the database: " + str(e),
                       logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        # if they gave a custom status then change all the eps to it
        if self.default_status != SKIPPED:
            logger.log(
                u"Setting all episodes to the specified default status: " +
                str(self.default_status))
            myDB = db.DBConnection()
            myDB.action(
                "UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ?",
                [self.default_status, SKIPPED, self.show.tvdbid])

        # if they started with WANTED eps then run the backlog
        if self.default_status == WANTED:
            logger.log(
                u"Launching backlog for this show since its episodes are WANTED"
            )
            sickbeard.backlogSearchScheduler.action.searchBacklog([self.show])

        self.show.flushEpisodes()

        self.finish()
Exemple #26
0
    def _parse_string(self, name, skip_scene_detection=False):  # pylint: disable=too-many-locals, too-many-branches, too-many-statements
        if not name:
            return

        matches = []
        bestResult = None

        for (cur_regex_num, cur_regex_name,
             cur_regex) in self.compiled_regexes:
            match = cur_regex.match(name)

            if not match:
                continue

            result = ParseResult(name)
            result.which_regex = [cur_regex_name]
            result.score = 0 - cur_regex_num

            named_groups = match.groupdict().keys()

            if 'series_name' in named_groups:
                result.series_name = match.group('series_name')
                if result.series_name:
                    result.series_name = self.clean_series_name(
                        result.series_name)
                    result.score += 1

            if 'series_num' in named_groups and match.group('series_num'):
                result.score += 1

            if 'season_num' in named_groups:
                tmp_season = int(match.group('season_num'))
                if cur_regex_name == 'bare' and tmp_season in (19, 20):
                    continue
                if cur_regex_name == 'fov' and tmp_season > 500:
                    continue

                result.season_number = tmp_season
                result.score += 1

            if 'ep_num' in named_groups:
                ep_num = self._convert_number(match.group('ep_num'))
                if 'extra_ep_num' in named_groups and match.group(
                        'extra_ep_num'):
                    tmp_episodes = range(
                        ep_num,
                        self._convert_number(match.group('extra_ep_num')) + 1)
                    if len(tmp_episodes) > 4:
                        matches = []
                        break
                else:
                    tmp_episodes = [ep_num]

                result.episode_numbers = tmp_episodes
                result.score += 3

            if 'ep_ab_num' in named_groups:
                ep_ab_num = self._convert_number(match.group('ep_ab_num'))
                if 'extra_ab_ep_num' in named_groups and match.group(
                        'extra_ab_ep_num'):
                    result.ab_episode_numbers = range(
                        ep_ab_num,
                        self._convert_number(match.group('extra_ab_ep_num')) +
                        1)
                    result.score += 1
                else:
                    result.ab_episode_numbers = [ep_ab_num]
                result.score += 1

            if 'air_date' in named_groups:
                air_date = match.group('air_date')
                try:
                    assert re.sub(r'[^\d]*', '', air_date) != '112263'
                    result.air_date = dateutil.parser.parse(
                        air_date, fuzzy_with_tokens=True)[0].date()
                    result.score += 1
                except Exception:
                    continue

            if 'extra_info' in named_groups:
                tmp_extra_info = match.group('extra_info')

                # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
                if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
                        r'([. _-]|^)(special|extra)s?\w*([. _-]|$)',
                        tmp_extra_info, re.I):
                    continue
                result.extra_info = tmp_extra_info
                result.score += 1

            if 'release_group' in named_groups:
                result.release_group = match.group('release_group')
                result.score += 1

            if 'version' in named_groups:
                # assigns version to anime file if detected using anime regex. Non-anime regex receives -1
                version = match.group('version')
                if version:
                    result.version = version
                else:
                    result.version = 1
            else:
                result.version = -1

            matches.append(result)

        if matches:
            # pick best match with highest score based on placement
            bestResult = max(sorted(matches,
                                    reverse=True,
                                    key=lambda x: x.which_regex),
                             key=lambda x: x.score)

            show = None
            if not self.naming_pattern:
                # try and create a show object for this result
                show = helpers.get_show(bestResult.series_name,
                                        self.tryIndexers)

            # confirm passed in show object indexer id matches result show object indexer id
            if show:
                if self.showObj and show.indexerid != self.showObj.indexerid:
                    show = None
                bestResult.show = show
            elif not show and self.showObj:
                bestResult.show = self.showObj

            # if this is a naming pattern test or result doesn't have a show object then return best result
            if not bestResult.show or self.naming_pattern:
                return bestResult

            # get quality
            bestResult.quality = common.Quality.nameQuality(
                name, bestResult.show.is_anime)

            new_episode_numbers = []
            new_season_numbers = []
            new_absolute_numbers = []

            # if we have an air-by-date show then get the real season/episode numbers
            if bestResult.is_air_by_date:
                airdate = bestResult.air_date.toordinal()
                main_db_con = db.DBConnection()
                sql_result = main_db_con.select(
                    "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
                    [
                        bestResult.show.indexerid, bestResult.show.indexer,
                        airdate
                    ])

                season_number = None
                episode_numbers = []

                if sql_result:
                    season_number = int(sql_result[0][0])
                    episode_numbers = [int(sql_result[0][1])]

                if season_number is None or not episode_numbers:
                    try:
                        lINDEXER_API_PARMS = sickbeard.indexerApi(
                            bestResult.show.indexer).api_params.copy()

                        lINDEXER_API_PARMS[
                            'language'] = bestResult.show.lang or sickbeard.INDEXER_DEFAULT_LANGUAGE

                        t = sickbeard.indexerApi(
                            bestResult.show.indexer).indexer(
                                **lINDEXER_API_PARMS)

                        epObj = t[bestResult.show.indexerid].airedOn(
                            bestResult.air_date)[0]

                        season_number = int(epObj["seasonnumber"])
                        episode_numbers = [int(epObj["episodenumber"])]
                    except sickbeard.indexer_episodenotfound:
                        logger.log(
                            "Unable to find episode with date " +
                            six.text_type(bestResult.air_date) + " for show " +
                            bestResult.show.name + ", skipping",
                            logger.WARNING)
                        episode_numbers = []
                    except sickbeard.indexer_error as e:
                        logger.log(
                            "Unable to contact " + sickbeard.indexerApi(
                                bestResult.show.indexer).name + ": " + ex(e),
                            logger.WARNING)
                        episode_numbers = []

                for epNo in episode_numbers:
                    s = season_number
                    e = epNo

                    if bestResult.show.is_scene:
                        (s, e) = scene_numbering.get_indexer_numbering(
                            bestResult.show.indexerid, bestResult.show.indexer,
                            season_number, epNo)
                    new_episode_numbers.append(e)
                    new_season_numbers.append(s)

            elif bestResult.show.is_anime and bestResult.ab_episode_numbers:
                bestResult.scene_season = scene_exceptions.get_scene_exception_by_name(
                    bestResult.series_name)[1]
                for epAbsNo in bestResult.ab_episode_numbers:
                    a = epAbsNo

                    if bestResult.show.is_scene and not skip_scene_detection:
                        a = scene_numbering.get_indexer_absolute_numbering(
                            bestResult.show.indexerid, bestResult.show.indexer,
                            epAbsNo, True, bestResult.scene_season)

                    (s, e) = helpers.get_all_episodes_from_absolute_number(
                        bestResult.show, [a])

                    new_absolute_numbers.append(a)
                    new_episode_numbers.extend(e)
                    new_season_numbers.append(s)

            elif bestResult.season_number and bestResult.episode_numbers:
                for epNo in bestResult.episode_numbers:
                    s = bestResult.season_number
                    e = epNo

                    if bestResult.show.is_scene and not skip_scene_detection:
                        (s, e) = scene_numbering.get_indexer_numbering(
                            bestResult.show.indexerid, bestResult.show.indexer,
                            bestResult.season_number, epNo)
                    if bestResult.show.is_anime:
                        a = helpers.get_absolute_number_from_season_and_episode(
                            bestResult.show, s, e)
                        if a:
                            new_absolute_numbers.append(a)

                    new_episode_numbers.append(e)
                    new_season_numbers.append(s)

            # need to do a quick sanity check heregex.  It's possible that we now have episodes
            # from more than one season (by tvdb numbering), and this is just too much
            # for sickbeard, so we'd need to flag it.
            new_season_numbers = list(
                set(new_season_numbers))  # remove duplicates
            if len(new_season_numbers) > 1:
                raise InvalidNameException(
                    "Scene numbering results episodes from "
                    "seasons %s, (i.e. more than one) and "
                    "sickrage does not support this.  "
                    "Sorry." % (six.text_type(new_season_numbers)))

            # I guess it's possible that we'd have duplicate episodes too, so lets
            # eliminate them
            new_episode_numbers = list(set(new_episode_numbers))
            new_episode_numbers.sort()

            # maybe even duplicate absolute numbers so why not do them as well
            new_absolute_numbers = list(set(new_absolute_numbers))
            new_absolute_numbers.sort()

            if new_absolute_numbers:
                bestResult.ab_episode_numbers = new_absolute_numbers

            if new_season_numbers and new_episode_numbers:
                bestResult.episode_numbers = new_episode_numbers
                bestResult.season_number = new_season_numbers[0]

            if bestResult.show.is_scene and not skip_scene_detection:
                logger.log(
                    "Converted parsed result " + bestResult.original_name +
                    " into " + six.text_type(bestResult), logger.DEBUG)

        # CPU sleep
        time.sleep(0.02)

        return bestResult
Exemple #27
0
def fix_xem_numbering(indexer_id, indexer):
    """
    Returns a dict of (season, episode) : (sceneSeason, sceneEpisode) mappings
    for an entire show.  Both the keys and values of the dict are tuples.
    Will be empty if there are no scene numbers set in xem
    """
    if indexer_id is None:
        return {}

    indexer_id = int(indexer_id)
    indexer = int(indexer)

    # query = [{
    # "name": self.show.name,
    # "seasons": [{
    # "episodes": [{
    # "episode_number": None,
    # "name": None
    # }],
    # "season_number": None,
    #                          }],
    #              "/tv/tv_program/number_of_seasons": [],
    #              "/tv/tv_program/number_of_episodes": [],
    #              "/tv/tv_program/thetvdb_id": [],
    #              "/tv/tv_program/tvrage_id": [],
    #              "type": "/tv/tv_program",
    #          }]
    #
    #
    # url = 'https://www.googleapis.com/freebase/v1/mqlread'
    # api_key = "AIzaSyCCHNp4dhVHxJYzbLiCE4y4a1rgTnX4fDE"
    # params = {
    #     'query': json.dumps(query),
    #     'key': api_key
    # }
    #
    #
    # def get_from_api(url, params=None):
    #     """Build request and return results
    #     """
    #     import xmltodict
    #
    #     response = requests.get(url, params=params)
    #     if response.status_code == 200:
    #         try:
    #             return response.json()
    #         except ValueError:
    #             return xmltodict.parse(response.text)['Data']
    #
    # # Get query results
    # tmp = get_from_api(url, params=params)['result']

    myDB = db.DBConnection()
    rows = myDB.select(
        'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?',
        [indexer, indexer_id])

    last_absolute_number = None
    last_scene_season = None
    last_scene_episode = None
    last_scene_absolute_number = None

    update_absolute_number = False
    update_scene_season = False
    update_scene_episode = False
    update_scene_absolute_number = False

    logger.log(
        u'Fixing any XEM scene mapping issues for show %s on %s' % (
            indexer_id,
            sickbeard.indexerApi(indexer).name,
        ), logger.DEBUG)

    cl = []
    for row in rows:
        season = int(row['season'])
        episode = int(row['episode'])

        if not int(row['scene_season']) and last_scene_season:
            scene_season = last_scene_season + 1
            update_scene_season = True
        else:
            scene_season = int(row['scene_season'])
            if last_scene_season and scene_season < last_scene_season:
                scene_season = last_scene_season + 1
                update_scene_season = True

        if not int(row['scene_episode']) and last_scene_episode:
            scene_episode = last_scene_episode + 1
            update_scene_episode = True
        else:
            scene_episode = int(row['scene_episode'])
            if last_scene_episode and scene_episode < last_scene_episode:
                scene_episode = last_scene_episode + 1
                update_scene_episode = True

        # check for unset values and correct them
        if not int(row['absolute_number']) and last_absolute_number:
            absolute_number = last_absolute_number + 1
            update_absolute_number = True
        else:
            absolute_number = int(row['absolute_number'])
            if last_absolute_number and absolute_number < last_absolute_number:
                absolute_number = last_absolute_number + 1
                update_absolute_number = True

        if not int(
                row['scene_absolute_number']) and last_scene_absolute_number:
            scene_absolute_number = last_scene_absolute_number + 1
            update_scene_absolute_number = True
        else:
            scene_absolute_number = int(row['scene_absolute_number'])
            if last_scene_absolute_number and scene_absolute_number < last_scene_absolute_number:
                scene_absolute_number = last_scene_absolute_number + 1
                update_scene_absolute_number = True

        # store values for lookup on next iteration
        last_absolute_number = absolute_number
        last_scene_season = scene_season
        last_scene_episode = scene_episode
        last_scene_absolute_number = scene_absolute_number

        if update_absolute_number:
            cl.append([
                "UPDATE tv_episodes SET absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                [absolute_number, indexer_id, season, episode]
            ])
            update_absolute_number = False

        if update_scene_season:
            cl.append([
                "UPDATE tv_episodes SET scene_season = ? WHERE showid = ? AND season = ? AND episode = ?",
                [scene_season, indexer_id, season, episode]
            ])
            update_scene_season = False

        if update_scene_episode:
            cl.append([
                "UPDATE tv_episodes SET scene_episode = ? WHERE showid = ? AND season = ? AND episode = ?",
                [scene_episode, indexer_id, season, episode]
            ])
            update_scene_episode = False

        if update_scene_absolute_number:
            cl.append([
                "UPDATE tv_episodes SET scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                [scene_absolute_number, indexer_id, season, episode]
            ])
            update_scene_absolute_number = False

    if len(cl) > 0:
        myDB = db.DBConnection()
        myDB.mass_action(cl)
Exemple #28
0
def map_indexers_to_show(show_obj, update=False, force=False, recheck=False):
    """

    :return: mapped ids
    :rtype: dict
    :param show_obj: TVShow Object
    :param update: add missing + previously not found ids
    :param force: search for and replace all mapped/missing ids (excluding NO_AUTOMATIC_CHANGE flagged)
    :param recheck: load all ids, don't remove existing
    """
    mapped = {}

    # init mapped indexers object
    for indexer in indexer_list:
        mapped[indexer] = {
            'id':
            (0, show_obj.indexerid)[int(indexer) == int(show_obj.indexer)],
            'status':
            (MapStatus.NONE,
             MapStatus.SOURCE)[int(indexer) == int(show_obj.indexer)],
            'date':
            datetime.date.fromordinal(1)
        }

    my_db = db.DBConnection()
    sql_results = my_db.select(
        'SELECT' +
        ' * FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?',
        [show_obj.indexerid, show_obj.indexer])

    # for each mapped entry
    for curResult in sql_results:
        date = tryInt(curResult['date'])
        mapped[int(curResult['mindexer'])] = {
            'status': int(curResult['status']),
            'id': int(curResult['mindexer_id']),
            'date': datetime.date.fromordinal(date if 0 < date else 1)
        }

    # get list of needed ids
    mis_map = [
        k for k, v in mapped.iteritems() if
        (v['status'] not in [MapStatus.NO_AUTOMATIC_CHANGE, MapStatus.SOURCE])
        and ((0 == v['id'] and MapStatus.NONE == v['status']) or force or
             recheck or (update and 0 == v['id'] and k not in defunct_indexer))
    ]
    if mis_map:
        url_tvmaze = TvmazeDict()
        url_trakt = TraktDict()
        if show_obj.indexer == INDEXER_TVDB or show_obj.indexer == INDEXER_TVRAGE:
            url_tvmaze[show_obj.indexer] = show_obj.indexerid
            url_trakt[show_obj.indexer] = show_obj.indexerid
        elif show_obj.indexer == INDEXER_TVMAZE:
            url_tvmaze[INDEXER_TVMAZE] = show_obj.indexer
        if show_obj.imdbid and re.search(r'\d+$', show_obj.imdbid):
            url_tvmaze[INDEXER_IMDB] = tryInt(
                re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
            url_trakt[INDEXER_IMDB] = tryInt(
                re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
        for m, v in mapped.iteritems():
            if m != show_obj.indexer and m in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB] and \
                            0 < v.get('id', 0):
                url_tvmaze[m] = v['id']

        new_ids = NewIdDict()

        if isinstance(show_obj.imdbid, basestring) and re.search(
                r'\d+$', show_obj.imdbid):
            new_ids[INDEXER_IMDB] = tryInt(
                re.search(r'(?:tt)?(\d+)', show_obj.imdbid))

        if 0 < len(url_tvmaze):
            new_ids.update(get_tvmaze_ids(url_tvmaze))

        for m, v in new_ids.iteritems():
            if m != show_obj.indexer and m in [
                    INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB
            ] and 0 < v:
                url_trakt[m] = v

        if url_trakt:
            new_ids.update(get_trakt_ids(url_trakt))

        if INDEXER_TVMAZE not in new_ids:
            new_url_tvmaze = TvmazeDict()
            for k, v in new_ids.iteritems():
                if k != show_obj.indexer and k in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB] \
                        and 0 < v and k not in url_tvmaze:
                    new_url_tvmaze[k] = v

            if 0 < len(new_url_tvmaze):
                new_ids.update(get_tvmaze_ids(new_url_tvmaze))

        if INDEXER_TVMAZE not in new_ids:
            f_date = get_premieredate(show_obj)
            if f_date and f_date is not datetime.date.fromordinal(1):
                tvids = {
                    k: v
                    for k, v in get_tvmaze_by_name(show_obj.name,
                                                   f_date).iteritems()
                    if k == INDEXER_TVMAZE or k not in new_ids
                    or new_ids.get(k) in (None, 0, '', MapStatus.NOT_FOUND)
                }
                new_ids.update(tvids)

        new_ids = check_missing_trakt_id(new_ids, show_obj, url_trakt)

        if INDEXER_IMDB not in new_ids:
            new_ids.update(
                get_imdbid_by_name(show_obj.name, show_obj.startyear))
            new_ids = check_missing_trakt_id(new_ids, show_obj, url_trakt)

        if INDEXER_TMDB in mis_map \
                and (None is new_ids.get(INDEXER_TMDB) or MapStatus.NOT_FOUND == new_ids.get(INDEXER_TMDB)) \
                and (0 < mapped.get(INDEXER_TVDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TVDB, 0)
                     or 0 < mapped.get(INDEXER_IMDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TMDB, 0)
                     or 0 < mapped.get(INDEXER_TVRAGE, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TVRAGE, 0)):
            try:
                tmdb = TMDB(sickbeard.TMDB_API_KEY)
                for d in [INDEXER_TVDB, INDEXER_IMDB, INDEXER_TVRAGE]:
                    c = (new_ids.get(d), mapped.get(d, {
                        'id': 0
                    }).get('id'))[0 < mapped.get(d, {
                        'id': 0
                    }).get('id', 0)]
                    if 0 >= c:
                        continue
                    if INDEXER_IMDB == d:
                        c = 'tt%07d' % c
                    if None is not c and 0 < c:
                        tmdb_data = tmdb.Find(c).info(
                            {'external_source': tmdb_ids[d]})
                        if isinstance(tmdb_data, dict) \
                                and 'tv_results' in tmdb_data and 0 < len(tmdb_data['tv_results']) \
                                and 'id' in tmdb_data['tv_results'][0] and 0 < tryInt(tmdb_data['tv_results'][0]['id']):
                            new_ids[INDEXER_TMDB] = tryInt(
                                tmdb_data['tv_results'][0]['id'])
                            break
            except (StandardError, Exception):
                pass

            if INDEXER_TMDB not in new_ids:
                try:
                    tmdb = TMDB(sickbeard.TMDB_API_KEY)
                    tmdb_data = tmdb.Search().tv(
                        params={
                            'query': clean_show_name(show_obj.name),
                            'first_air_date_year': show_obj.startyear
                        })
                    for s in tmdb_data.get('results'):
                        if clean_show_name(s['name']) == clean_show_name(
                                show_obj.name):
                            new_ids[INDEXER_TMDB] = tryInt(s['id'])
                            break
                except (StandardError, Exception):
                    pass

        for i in indexer_list:
            if i != show_obj.indexer and i in mis_map and 0 != new_ids.get(
                    i, 0):
                if 0 > new_ids[i]:
                    mapped[i] = {'status': new_ids[i], 'id': 0}
                elif force or not recheck or 0 >= mapped.get(i, {
                        'id': 0
                }).get('id', 0):
                    mapped[i] = {'status': MapStatus.NONE, 'id': new_ids[i]}

        if [
                k for k in mis_map if 0 != mapped.get(k, {
                    'id': 0,
                    'status': 0
                })['id'] or mapped.get(k, {
                    'id': 0,
                    'status': 0
                })['status'] not in [MapStatus.NONE, MapStatus.SOURCE]
        ]:
            sql_l = []
            today = datetime.date.today()
            date = today.toordinal()
            for indexer in indexer_list:

                if show_obj.indexer == indexer or indexer not in mis_map:
                    continue

                if 0 != mapped[indexer]['id'] or MapStatus.NONE != mapped[
                        indexer]['status']:
                    mapped[indexer]['date'] = today
                    sql_l.append([
                        'INSERT OR REPLACE INTO indexer_mapping (' +
                        'indexer_id, indexer, mindexer_id, mindexer, date, status) VALUES (?,?,?,?,?,?)',
                        [
                            show_obj.indexerid, show_obj.indexer,
                            mapped[indexer]['id'], indexer, date,
                            mapped[indexer]['status']
                        ]
                    ])
                else:
                    sql_l.append([
                        'DELETE' +
                        ' FROM indexer_mapping WHERE indexer_id = ? AND indexer = ? AND mindexer = ?',
                        [show_obj.indexerid, show_obj.indexer, indexer]
                    ])

            if 0 < len(sql_l):
                logger.log(
                    'Adding indexer mapping to DB for show: %s' %
                    show_obj.name, logger.DEBUG)
                my_db = db.DBConnection()
                my_db.mass_action(sql_l)

    show_obj.ids = mapped
    return mapped
Exemple #29
0
    def _getProperList(self):

        propers = {}

        # for each provider get a list of the propers
        for curProvider in providers.sortedProviderList():

            if not curProvider.isActive():
                continue

            date = datetime.datetime.today() - datetime.timedelta(days=2)

            logger.log(u"Searching for any new PROPER releases from " +
                       curProvider.name)
            curPropers = curProvider.findPropers(date)

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                name = self._genericName(x.name)

                if not name in propers:
                    logger.log(u"Found new proper: " + x.name, logger.DEBUG)
                    x.provider = curProvider
                    propers[name] = x

        # take the list of unique propers and get it sorted by
        sortedPropers = sorted(propers.values(),
                               key=operator.attrgetter('date'),
                               reverse=True)
        finalPropers = []

        for curProper in sortedPropers:

            # parse the file name
            try:
                myParser = NameParser(False)
                parse_result = myParser.parse(curProper.name)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + curProper.name +
                    " into a valid episode", logger.DEBUG)
                continue

            if not parse_result.episode_numbers:
                logger.log(
                    u"Ignoring " + curProper.name +
                    " because it's for a full season rather than specific episode",
                    logger.DEBUG)
                continue

            # populate our Proper instance
            if parse_result.air_by_date:
                curProper.season = -1
                curProper.episode = parse_result.air_date
            else:
                curProper.season = parse_result.season_number if parse_result.season_number != None else 1
                curProper.episode = parse_result.episode_numbers[0]
            curProper.quality = Quality.nameQuality(curProper.name)

            # for each show in our list
            for curShow in sickbeard.showList:

                if not parse_result.series_name:
                    continue

                genericName = self._genericName(parse_result.series_name)

                # get the scene name masks
                sceneNames = set(
                    show_name_helpers.makeSceneShowSearchStrings(curShow))

                # for each scene name mask
                for curSceneName in sceneNames:

                    # if it matches
                    if genericName == self._genericName(curSceneName):
                        logger.log(
                            u"Successful match! Result " +
                            parse_result.series_name + " matched to show " +
                            curShow.name, logger.DEBUG)

                        # set the tvdbid in the db to the show's tvdbid
                        curProper.tvdbid = curShow.tvdbid

                        # since we found it, break out
                        break

                # if we found something in the inner for loop break out of this one
                if curProper.tvdbid != -1:
                    break

            if curProper.tvdbid == -1:
                continue

            if not show_name_helpers.filterBadReleases(curProper.name):
                logger.log(
                    u"Proper " + curProper.name +
                    " isn't a valid scene release that we want, igoring it",
                    logger.DEBUG)
                continue

            # if we have an air-by-date show then get the real season/episode numbers
            if curProper.season == -1 and curProper.tvdbid:
                showObj = helpers.findCertainShow(sickbeard.showList,
                                                  curProper.tvdbid)
                if not showObj:
                    logger.log(
                        u"This should never have happened, post a bug about this!",
                        logger.ERROR)
                    raise Exception("BAD STUFF HAPPENED")

                tvdb_lang = showObj.lang
                # There's gotta be a better way of doing this but we don't wanna
                # change the language value elsewhere
                ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                if tvdb_lang and not tvdb_lang == 'en':
                    ltvdb_api_parms['language'] = tvdb_lang

                try:
                    t = tvdb_api.Tvdb(**ltvdb_api_parms)
                    epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0]
                    curProper.season = int(epObj["seasonnumber"])
                    curProper.episodes = [int(epObj["episodenumber"])]
                except tvdb_exceptions.tvdb_episodenotfound:
                    logger.log(
                        u"Unable to find episode with date " +
                        str(curProper.episode) + " for show " +
                        parse_result.series_name + ", skipping",
                        logger.WARNING)
                    continue

            # check if we actually want this proper (if it's the right quality)
            sqlResults = db.DBConnection().select(
                "SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
                [curProper.tvdbid, curProper.season, curProper.episode])
            if not sqlResults:
                continue
            oldStatus, oldQuality = Quality.splitCompositeStatus(
                int(sqlResults[0]["status"]))

            # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
            if oldStatus not in (DOWNLOADED,
                                 SNATCHED) or oldQuality != curProper.quality:
                continue

            # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
            if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season,
                                           curProper.episode) not in map(
                                               operator.attrgetter(
                                                   'tvdbid', 'season',
                                                   'episode'), finalPropers):
                logger.log(u"Found a proper that we need: " +
                           str(curProper.name))
                finalPropers.append(curProper)

        return finalPropers
Exemple #30
0
def trimHistory():
    """Trims history table to 1 month of history from today"""
    myDB = db.DBConnection('failed.db')
    myDB.action("DELETE FROM history WHERE date < " +
                str((datetime.datetime.today() - datetime.timedelta(days=30)
                     ).strftime(History.date_format)))