Esempio n. 1
0
    def sync_trakt_shows(self):
        """Sync Trakt shows watchlist."""
        if not self.show_watchlist:
            log.info('No shows found in your Trakt watchlist. Nothing to sync')
            return

        trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER)

        for trakt_show in self.show_watchlist:
            if trakt_show.year and trakt_show.ids['ids']['slug'].endswith(
                    str(trakt_show.year)):
                show_name = f'{trakt_show.title} ({trakt_show.year})'
            else:
                show_name = trakt_show.title

            show = None
            indexer = None
            for i in indexerConfig:
                trakt_indexer = get_trakt_indexer(i)
                indexer_id = trakt_show.ids['ids'].get(trakt_indexer, -1)
                indexer = indexerConfig[i]['id']
                show = Show.find_by_id(app.showList, indexer, indexer_id)
                if show:
                    break
            if not show:
                # If can't find with available indexers try IMDB
                trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB)
                indexer_id = trakt_show.ids['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id)
            if not show:
                # If can't find with available indexers try TRAKT
                trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT)
                indexer_id = trakt_show.ids['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_TRAKT,
                                       indexer_id)

            if show:
                continue

            indexer_id = trakt_show.ids['ids'].get(
                get_trakt_indexer(trakt_default_indexer), -1)
            if int(app.TRAKT_METHOD_ADD) != 2:
                self.add_show(trakt_default_indexer, indexer_id, show_name,
                              SKIPPED)
            else:
                self.add_show(trakt_default_indexer, indexer_id, show_name,
                              WANTED)

            if int(app.TRAKT_METHOD_ADD) == 1 and indexer:
                new_show = Show.find_by_id(app.showList, indexer, indexer_id)

                if new_show:
                    set_episode_to_wanted(new_show, 1, 1)
                else:
                    log.warning(
                        'Unable to find the new added show.'
                        'Pilot will be set to wanted in the next Trakt run')
                    self.todo_wanted.append((indexer, indexer_id, 1, 1))
        log.debug('Synced shows with Trakt watchlist')
Esempio n. 2
0
    def sync_trakt_shows(self):
        """Sync Trakt shows watchlist."""
        if not self.show_watchlist:
            log.info('No shows found in your Trakt watchlist. Nothing to sync')
        else:
            trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER)

            for watchlisted_show in self.show_watchlist:
                trakt_show = watchlisted_show['show']

                if trakt_show['year'] and trakt_show['ids']['slug'].endswith(str(trakt_show['year'])):
                    show_name = '{title} ({year})'.format(title=trakt_show['title'], year=trakt_show['year'])
                else:
                    show_name = trakt_show['title']

                show = None
                indexer = None
                for i in indexerConfig:
                    trakt_indexer = get_trakt_indexer(i)
                    indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                    indexer = indexerConfig[i]['id']
                    show = Show.find_by_id(app.showList, indexer, indexer_id)
                    if show:
                        break
                if not show:
                    # If can't find with available indexers try IMDB
                    trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB)
                    indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                    show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id)
                if not show:
                    # If can't find with available indexers try TRAKT
                    trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT)
                    indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                    show = Show.find_by_id(app.showList, EXTERNAL_TRAKT, indexer_id)

                if show:
                    continue

                indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1)
                if int(app.TRAKT_METHOD_ADD) != 2:
                    self.add_show(trakt_default_indexer, indexer_id, show_name, SKIPPED)
                else:
                    self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED)

                if int(app.TRAKT_METHOD_ADD) == 1 and indexer:
                    new_show = Show.find_by_id(app.showList, indexer, indexer_id)

                    if new_show:
                        set_episode_to_wanted(new_show, 1, 1)
                    else:
                        log.warning('Unable to find the new added show.'
                                    'Pilot will be set to wanted in the next Trakt run')
                        self.todoWanted.append(indexer_id)
            log.debug('Synced shows with Trakt watchlist')
Esempio n. 3
0
    def sync_trakt_episodes(self):
        """Sync Trakt episodes watchlist."""
        if not self.episode_watchlist:
            log.info(
                'No episodes found in your Trakt watchlist. Nothing to sync')
            return

        added_shows = []
        trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER)

        for watchlist_item in self.episode_watchlist:
            trakt_show = watchlist_item.show
            trakt_episode = watchlist_item.episode
            trakt_season = watchlist_item.season

            show = None

            for i in indexerConfig:
                trakt_indexer = get_trakt_indexer(i)
                if not trakt_indexer:
                    continue

                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                indexer = indexerConfig[i]['id']
                show = Show.find_by_id(app.showList, indexer, indexer_id)
                if show:
                    break

            if not show:
                # If can't find with available indexers try IMDB
                trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id)
            if not show:
                # If can't find with available indexers try TRAKT
                trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_TRAKT,
                                       indexer_id)

            # If can't find show add with default trakt indexer
            if not show:
                trakt_indexer = get_trakt_indexer(trakt_default_indexer)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                # Only add show if we didn't added it before
                if indexer_id not in added_shows:
                    self.add_show(trakt_default_indexer, indexer_id,
                                  trakt_show['title'], SKIPPED)
                    added_shows.append(indexer_id)

            elif not trakt_season == 0 and not show.paused:
                set_episode_to_wanted(show, trakt_season, trakt_episode)

        log.debug('Synced episodes with Trakt watchlist')
Esempio n. 4
0
def get_episode(series_id, season=None, episode=None, absolute=None, indexer=None):
    """
    Get a specific episode object based on show, season and episode number.

    :param show: Series id
    :param season: Season number
    :param episode: Episode number
    :param absolute: Optional if the episode number is a scene absolute number
    :param indexer: Optional indexer id.
    :return: episode object
    """
    if series_id is None:
        return 'Invalid show parameters'

    series_obj = Show.find_by_id(app.showList, indexer, series_id)

    if series_obj is None:
        return 'Invalid show parameters'

    if absolute:
        ep_obj = series_obj.get_episode(absolute_number=absolute)
    elif season and episode:
        ep_obj = series_obj.get_episode(season, episode)
    else:
        return 'Invalid parameters'

    if ep_obj is None:
        return 'Unable to retrieve episode'

    return ep_obj
Esempio n. 5
0
    def find_propers(self, proper_candidates):
        """Find propers in providers."""
        results = []

        for proper_candidate in proper_candidates:
            series_obj = Show.find_by_id(app.showList, proper_candidate['indexer'], proper_candidate['showid'])

            if series_obj:
                self.series = series_obj
                episode_obj = series_obj.get_episode(proper_candidate['season'], proper_candidate['episode'])

                for term in self.proper_strings:
                    search_strings = self._get_episode_search_strings(episode_obj, add_string=term)

                    for item in self.search(search_strings[0], ep_obj=episode_obj):
                        search_result = self.get_result()
                        results.append(search_result)

                        search_result.name, search_result.url = self._get_title_and_url(item)
                        search_result.seeders, search_result.leechers = self._get_result_info(item)
                        search_result.size = self._get_size(item)
                        search_result.pubdate = self._get_pubdate(item)

                        # This will be retrieved from the parser
                        search_result.proper_tags = ''

                        search_result.search_type = PROPER_SEARCH
                        search_result.date = datetime.today()
                        search_result.series = series_obj

        return results
Esempio n. 6
0
    def remove_episode_watchlist(self):
        """Remove episode from Trakt watchlist."""
        if not (app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT):
            return

        main_db_con = db.DBConnection()
        statuses = [DOWNLOADED, ARCHIVED]
        sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \
                        'FROM tv_episodes AS e, tv_shows AS s ' \
                        'WHERE e.indexer = s.indexer ' \
                        'AND s.indexer_id = e.showid AND e.status in ({0})'.format(','.join(['?'] * len(statuses)))

        sql_result = main_db_con.select(sql_selection, statuses)

        if not sql_result:
            return

        episodes = []
        shows = {}
        for cur_episode in sql_result:
            # Check if TRAKT supports that indexer
            if not get_trakt_indexer(cur_episode['indexer']):
                continue

            show_id = cur_episode['indexer'], cur_episode['indexer_id']
            episode = cur_episode['season'], cur_episode['episode']

            if show_id not in shows:
                shows[show_id] = []

            shows[show_id].append(episode)

        media_object_shows = []
        for show_id in shows:
            episodes = []
            show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1])
            for season, episode in shows[show_id]:
                if not self._check_list(indexer=show_obj.indexer,
                                        indexer_id=show_obj.series_id,
                                        season=season,
                                        episode=episode,
                                        list_type='Collection'):
                    continue

                log.info("Removing episode '{show}' {ep} from Trakt watchlist",
                         {
                             'show': show_obj.name,
                             'ep': episode_num(season, episode)
                         })
                episodes.append(show_obj.get_episode(season, episode))
            media_object_shows.append(
                create_episode_structure(show_obj, episodes))

        try:
            sync.remove_from_collection({'shows': media_object_shows})
            self._get_episode_watchlist()
        except TraktException as error:
            log.info(
                'Unable to remove episodes from Trakt watchlist. Error: {error!r}',
                {'error': error})
Esempio n. 7
0
def get_episode(series_id,
                season=None,
                episode=None,
                absolute=None,
                indexer=None):
    """
    Get a specific episode object based on show, season and episode number.

    :param show: Series id
    :param season: Season number
    :param episode: Episode number
    :param absolute: Optional if the episode number is a scene absolute number
    :param indexer: Optional indexer id.
    :return: episode object
    """
    if series_id is None:
        return 'Invalid show parameters'

    series_obj = Show.find_by_id(app.showList, indexer, series_id)

    if series_obj is None:
        return 'Invalid show parameters'

    if absolute:
        ep_obj = series_obj.get_episode(absolute_number=absolute)
    elif season and episode:
        ep_obj = series_obj.get_episode(season, episode)
    else:
        return 'Invalid parameters'

    if ep_obj is None:
        return 'Unable to retrieve episode'

    return ep_obj
Esempio n. 8
0
def get_episodes(search_thread, searchstatus):
    """Get all episodes located in a search thread with a specific status."""
    results = []

    # Search again for the show in the library. Might have been deleted very recently.
    series_obj = Show.find_by_id(app.showList, search_thread.show.indexer, search_thread.show.series_id)

    if not series_obj:
        if not search_thread.show.is_recently_deleted:
            log.error(u'No Show Object found for show with indexerID: {0}',
                      search_thread.show.indexerid)
        return results

    if not isinstance(search_thread.segment, list):
        search_thread.segment = [search_thread.segment]

    for ep_obj in search_thread.segment:
        ep = series_obj.get_episode(ep_obj.season, ep_obj.episode)
        results.append({
            'indexer_id': series_obj.indexer,
            'series_id': series_obj.series_id,
            'episode': ep.episode,
            'episodeindexerid': ep.indexerid,
            'season': ep.season,
            'searchstatus': searchstatus,
            'status': statusStrings[ep.status],
            'quality_name': Quality.qualityStrings[ep.quality],
            'quality_style': get_quality_class(ep),
            'overview': Overview.overviewStrings[series_obj.get_overview(
                ep.status, ep.quality,
                manually_searched=ep.manually_searched
            )],
        })

    return results
Esempio n. 9
0
    def find_propers(self, proper_candidates):
        """Find propers in providers."""
        results = []

        for proper_candidate in proper_candidates:
            series_obj = Show.find_by_id(app.showList,
                                         proper_candidate['indexer'],
                                         proper_candidate['showid'])

            if series_obj:
                self.series = series_obj
                episode_obj = series_obj.get_episode(
                    proper_candidate['season'], proper_candidate['episode'])

                for term in self.proper_strings:
                    search_strings = self._get_episode_search_strings(
                        episode_obj, add_string=term)

                    for item in self.search(search_strings[0],
                                            ep_obj=episode_obj):
                        search_result = self.get_result(series=series_obj,
                                                        item=item)
                        if search_result in results:
                            continue

                        search_result.search_type = PROPER_SEARCH
                        results.append(search_result)

        return results
Esempio n. 10
0
def get_episodes(search_thread, searchstatus):
    """Get all episodes located in a search thread with a specific status."""
    results = []

    # Search again for the show in the library. Might have been deleted very recently.
    series_obj = Show.find_by_id(app.showList, search_thread.show.indexer, search_thread.show.series_id)

    if not series_obj:
        if not search_thread.show.is_recently_deleted:
            log.error(u'No Show Object found for show with indexerID: {0}',
                      search_thread.show.indexerid)
        return results

    if not isinstance(search_thread.segment, list):
        search_thread.segment = [search_thread.segment]

    for ep_obj in search_thread.segment:
        ep = series_obj.get_episode(ep_obj.season, ep_obj.episode)
        results.append({
            'indexer_id': series_obj.indexer,
            'series_id': series_obj.series_id,
            'episode': ep.episode,
            'episodeindexerid': ep.indexerid,
            'season': ep.season,
            'searchstatus': searchstatus,
            'status': statusStrings[ep.status],
            'quality': get_quality_class(ep),
            'overview': Overview.overviewStrings[series_obj.get_overview(
                ep.status,
                manually_searched=ep.manually_searched
            )],
        })

    return results
Esempio n. 11
0
    def find_propers(self, proper_candidates):
        """Find propers in providers."""
        results = []

        for proper_candidate in proper_candidates:
            series_obj = Show.find_by_id(app.showList, proper_candidate['indexer'], proper_candidate['showid'])

            if series_obj:
                self.series = series_obj
                episode_obj = series_obj.get_episode(proper_candidate['season'], proper_candidate['episode'])

                for term in self.proper_strings:
                    search_strings = self._get_episode_search_strings(episode_obj, add_string=term)

                    for item in self.search(search_strings[0], ep_obj=episode_obj):
                        search_result = self.get_result()
                        results.append(search_result)

                        search_result.name, search_result.url = self._get_title_and_url(item)
                        search_result.seeders, search_result.leechers = self._get_result_info(item)
                        search_result.size = self._get_size(item)
                        search_result.pubdate = self._get_pubdate(item)

                        # This will be retrieved from the parser
                        search_result.proper_tags = ''

                        search_result.search_type = PROPER_SEARCH
                        search_result.date = datetime.today()
                        search_result.series = series_obj

        return results
Esempio n. 12
0
    def add_show(indexer, indexer_id, show_name, status):
        """Add a new show with default settings."""
        if Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id):
            return

        root_dirs = app.ROOT_DIRS

        location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None

        if location:
            log.info(
                "Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}",
                {
                    'show': show_name,
                    'indexer_name': indexerConfig[indexer]['identifier'],
                    'id': indexer_id
                })

            allowed, preferred = Quality.split_quality(int(
                app.QUALITY_DEFAULT))
            quality = {'allowed': allowed, 'preferred': preferred}

            app.show_queue_scheduler.action.addShow(
                indexer,
                indexer_id,
                None,
                default_status=status,
                quality=quality,
                season_folders=int(app.SEASON_FOLDERS_DEFAULT),
                paused=app.TRAKT_START_PAUSED,
                default_status_after=status,
                root_dir=location)
            tries = 0
            while tries < 3:
                if Show.find_by_id(app.showList, indexer, indexer_id):
                    return
                # Wait before show get's added and refreshed
                time.sleep(60)
                tries += 1
            log.warning("Error creating show '{show}. Please check logs' ",
                        {'show': show_name})
            return
        else:
            log.warning(
                "Error creating show '{show}' folder. No default root directory",
                {'show': show_name})
            return
Esempio n. 13
0
    def sync_trakt_episodes(self):
        """Sync Trakt episodes watchlist."""
        if not self.episode_watchlist:
            log.info('No episodes found in your Trakt watchlist. Nothing to sync')
            return

        added_shows = []
        trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER)

        for watchlist_item in self.episode_watchlist:
            trakt_show = watchlist_item['show']
            trakt_episode = watchlist_item['episode'].get('number', -1)
            trakt_season = watchlist_item['episode'].get('season', -1)

            show = None
            for i in indexerConfig:
                trakt_indexer = get_trakt_indexer(i)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                indexer = indexerConfig[i]['id']
                show = Show.find_by_id(app.showList, indexer, indexer_id)
                if show:
                    break

            if not show:
                # If can't find with available indexers try IMDB
                trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id)
            if not show:
                # If can't find with available indexers try TRAKT
                trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT)
                indexer_id = trakt_show['ids'].get(trakt_indexer, -1)
                show = Show.find_by_id(app.showList, EXTERNAL_TRAKT, indexer_id)

            # If can't find show add with default trakt indexer
            if not show:
                indexer_id = trakt_show['ids'].get(get_trakt_indexer(trakt_default_indexer), -1)
                # Only add show if we didn't added it before
                if indexer_id not in added_shows:
                    self.add_show(trakt_default_indexer, indexer_id, trakt_show['title'], SKIPPED)
                    added_shows.append(indexer_id)

            elif not trakt_season == 0 and not show.paused:
                set_episode_to_wanted(show, trakt_season, trakt_episode)

        log.debug('Synced episodes with Trakt watchlist')
Esempio n. 14
0
    def backlogShow(self, indexername, seriesid):
        indexer_id = indexer_name_to_id(indexername)
        series_obj = Show.find_by_id(app.showList, indexer_id, seriesid)

        if series_obj:
            app.backlog_search_scheduler.action.search_backlog([series_obj])

        return self.redirect('/manage/backlogOverview/')
Esempio n. 15
0
    def backlogShow(self, indexername, seriesid):
        indexer_id = indexer_name_to_id(indexername)
        series_obj = Show.find_by_id(app.showList, indexer_id, seriesid)

        if series_obj:
            app.backlog_search_scheduler.action.search_backlog([series_obj])

        return self.redirect('/manage/backlogOverview/')
Esempio n. 16
0
    def add_episode_trakt_collection(self):
        """Add all existing episodes to Trakt collections.

        For episodes that have a media file (location)
        """
        if not(app.TRAKT_SYNC and app.USE_TRAKT):
            return

        main_db_con = db.DBConnection()
        statuses = [DOWNLOADED, ARCHIVED]
        sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \
                        'FROM tv_episodes AS e, tv_shows AS s ' \
                        'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \
                        "AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses)))

        sql_result = main_db_con.select(sql_selection, statuses)
        if not sql_result:
            return

        episodes = []
        shows = {}
        for cur_episode in sql_result:
            # Check if TRAKT supports that indexer
            if not get_trakt_indexer(cur_episode['indexer']):
                continue

            show_id = cur_episode['indexer'], cur_episode['indexer_id']
            episode = cur_episode['season'], cur_episode['episode']

            if show_id not in shows:
                shows[show_id] = []

            shows[show_id].append(episode)

        media_object_shows = []
        for show_id in shows:
            episodes = []
            show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1])
            for season, episode in shows[show_id]:
                if not self._check_list(
                    indexer=show_obj.indexer, indexer_id=show_obj.series_id,
                    season=season, episode=episode,
                    list_type='Collection'
                ):
                    continue

                log.info("Adding episode '{show}' {ep} to Trakt collection", {
                    'show': show_obj.name,
                    'ep': episode_num(season, episode)
                })
                episodes.append(show_obj.get_episode(season, episode))
            media_object_shows.append(create_episode_structure(show_obj, episodes))

        try:
            sync.add_to_collection({'shows': media_object_shows})
            self._get_show_collection()
        except (TraktException, RequestException) as error:
            log.info('Unable to add episodes to Trakt collection. Error: {error!r}', {'error': error})
Esempio n. 17
0
def get_episodes(search_thread, searchstatus):
    """Get all episodes located in a search thread with a specific status."""
    results = []

    # Search again for the show in the library. Might have been deleted very recently.
    series_obj = Show.find_by_id(app.showList, search_thread.show.indexer,
                                 search_thread.show.series_id)

    if not series_obj:
        if not search_thread.show.is_recently_deleted:
            log.error(u'No Show Object found for show with indexerID: {0}',
                      search_thread.show.indexerid)
        return results

    if not isinstance(search_thread.segment, list):
        search_thread.segment = [search_thread.segment]

    for ep_obj in search_thread.segment:
        ep = series_obj.get_episode(ep_obj.season, ep_obj.episode)
        results.append({
            'indexer_id':
            series_obj.indexer,
            'series_id':
            series_obj.series_id,
            'episode':
            ep.episode,
            'episodeindexerid':
            ep.indexerid,
            'season':
            ep.season,
            'searchstatus':
            searchstatus,
            'status':
            statusStrings[ep.status],
            # TODO: `quality_name` and `quality_style` should both be removed
            # when converting forced/manual episode search to Vue (use QualityPill component directly)
            'quality_name':
            Quality.qualityStrings[ep.quality],
            'quality_style':
            Quality.quality_keys.get(ep.quality)
            or Quality.quality_keys[Quality.UNKNOWN],
            'overview':
            Overview.overviewStrings[series_obj.get_overview(
                ep.status, ep.quality,
                manually_searched=ep.manually_searched)],
            'queuetime':
            search_thread.queue_time.isoformat(),
            'starttime':
            search_thread.start_time.isoformat()
            if search_thread.start_time else None,
        })

    return results
Esempio n. 18
0
    def add_show(indexer, indexer_id, show_name, status):
        """Add a new show with default settings."""
        if not Show.find_by_id(app.showList, EXTERNAL_IMDB, indexer_id):
            root_dirs = app.ROOT_DIRS

            location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None

            if location:
                log.info("Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", {
                    'show': show_name,
                    'indexer_name': indexerConfig[indexer]['identifier'],
                    'id': indexer_id
                })

                app.show_queue_scheduler.action.addShow(indexer, indexer_id, None,
                                                        default_status=status,
                                                        quality=int(app.QUALITY_DEFAULT),
                                                        season_folders=int(app.SEASON_FOLDERS_DEFAULT),
                                                        paused=app.TRAKT_START_PAUSED,
                                                        default_status_after=status,
                                                        root_dir=location)
                tries = 0
                while tries < 3:
                    if Show.find_by_id(app.showList, indexer, indexer_id):
                        return
                    # Wait before show get's added and refreshed
                    time.sleep(60)
                    tries += 1
                log.warning("Error creating show '{show}. Please check logs' ", {
                    'show': show_name
                })
                return
            else:
                log.warning("Error creating show '{show}' folder. No default root directory", {
                    'show': show_name
                })
                return
Esempio n. 19
0
    def downloadSubtitleMissed(self, *args, **kwargs):
        to_download = {}

        # make a list of all shows and their associated args
        for arg in kwargs:
            indexer_id, series_id, what = arg.split('-')

            # we don't care about unchecked checkboxes
            if kwargs[arg] != 'on':
                continue

            if (indexer_id, series_id) not in to_download:
                to_download[(indexer_id, series_id)] = []

            to_download[(indexer_id, series_id)].append(what)

        for cur_indexer_id, cur_series_id in to_download:
            # get a list of all the eps we want to download subtitles if they just said 'all'
            if 'all' in to_download[(cur_indexer_id, cur_series_id)]:
                main_db_con = db.DBConnection()
                all_eps_results = main_db_con.select(
                    b'SELECT season, episode '
                    b'FROM tv_episodes '
                    b'WHERE status = ? '
                    b'AND season != 0 '
                    b'AND indexer = ? '
                    b'AND showid = ? '
                    b"AND location != ''",
                    [DOWNLOADED, cur_indexer_id, cur_series_id])
                to_download[(cur_indexer_id, cur_series_id)] = [
                    's' + str(x[b'season']) + 'e' + str(x[b'episode'])
                    for x in all_eps_results
                ]

            for epResult in to_download[(cur_indexer_id, cur_series_id)]:
                season, episode = epResult.lstrip('s').split('e')

                series_obj = Show.find_by_id(app.showList, cur_indexer_id,
                                             cur_series_id)
                series_obj.get_episode(season, episode).download_subtitles()

        return self.redirect('/manage/subtitleMissed/')
Esempio n. 20
0
    def _get_episodes_info(self, tvdb_id, episodes, season=None):
        """Add full episode information for existing episodes."""
        series = Show.find_by_id(app.showList, 1, tvdb_id)
        if not series:
            return episodes

        existing_episodes = series.get_all_episodes(season=season,
                                                    has_location=True)
        if not existing_episodes:
            return episodes

        for i, ep in enumerate(episodes):
            # Try to be as conservative as possible. Only query if the episode
            # exists on disk and it needs episode metadata.
            if any(ep_obj.indexerid == ep.id and needs_metadata(ep_obj)
                   for ep_obj in existing_episodes):
                episode = self.config['session'].episodes_api.episodes_id_get(
                    ep.id, accept_language=self.config['language'])
                episodes[i] = episode.data

        return episodes
Esempio n. 21
0
    def _get_episodes_info(self, tvdb_id, episodes, season=None):
        """Add full episode information for existing episodes."""
        series = Show.find_by_id(app.showList, 1, tvdb_id)
        if not series:
            return episodes

        existing_episodes = series.get_all_episodes(season=season, has_location=True)
        if not existing_episodes:
            return episodes

        for i, ep in enumerate(episodes):
            # Try to be as conservative as possible. Only query if the episode
            # exists on disk and it needs episode metadata.
            if any(ep_obj.indexerid == ep.id and needs_metadata(ep_obj)
                   for ep_obj in existing_episodes):
                episode = self.config['session'].episodes_api.episodes_id_get(
                    ep.id, accept_language=self.config['language']
                )
                episodes[i] = episode.data

        return episodes
Esempio n. 22
0
    def downloadSubtitleMissed(self, *args, **kwargs):
        to_download = {}

        # make a list of all shows and their associated args
        for arg in kwargs:
            indexer_id, series_id, what = arg.split('-')

            # we don't care about unchecked checkboxes
            if kwargs[arg] != 'on':
                continue

            if (indexer_id, series_id) not in to_download:
                to_download[(indexer_id, series_id)] = []

            to_download[(indexer_id, series_id)].append(what)

        for cur_indexer_id, cur_series_id in to_download:
            # get a list of all the eps we want to download subtitles if they just said 'all'
            if 'all' in to_download[(cur_indexer_id, cur_series_id)]:
                main_db_con = db.DBConnection()
                all_eps_results = main_db_con.select(
                    'SELECT season, episode '
                    'FROM tv_episodes '
                    'WHERE status = ? '
                    'AND season != 0 '
                    'AND indexer = ? '
                    'AND showid = ? '
                    "AND location != ''",
                    [DOWNLOADED, cur_indexer_id, cur_series_id]
                )
                to_download[(cur_indexer_id, cur_series_id)] = ['s' + str(x['season']) + 'e' + str(x['episode'])
                                                                for x in all_eps_results]

            for ep_result in to_download[(cur_indexer_id, cur_series_id)]:
                season, episode = ep_result.lstrip('s').split('e')

                series_obj = Show.find_by_id(app.showList, cur_indexer_id, cur_series_id)
                series_obj.get_episode(season, episode).download_subtitles()

        return self.redirect('/manage/subtitleMissed/')
Esempio n. 23
0
    def _postprocess(self, path, info_hash, resource_name, failed=False, client_type=None):
        """Queue a postprocess action."""
        # Use the info hash get a segment of episodes.
        history_items = self.main_db_con.select(
            'SELECT * FROM history WHERE info_hash = ?',
            [info_hash]
        )

        episodes = []
        for history_item in history_items:
            # Search for show in library
            show = Show.find_by_id(app.showList, history_item['indexer_id'], history_item['showid'])
            if not show:
                # Show is "no longer" available in library.
                continue
            episodes.append(show.get_episode(history_item['season'], history_item['episode']))

        queue_item = PostProcessQueueItem(
            path, info_hash, resource_name=resource_name,
            failed=failed, episodes=episodes, client_type=client_type
        )
        app.post_processor_queue_scheduler.action.add_item(queue_item)
Esempio n. 24
0
    def run(self, force=False):
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress(
        ) and not force:
            log.warning(
                'Manual search is running. Unable to start Daily search')
            return

        self.amActive = True
        # Let's keep track of the exact time the scheduler kicked in,
        # as we need to compare to this time for each provider.
        scheduler_start_time = int(time())

        if not network_dict:
            update_network_dict()

        # The tvshows airdate_offset field is used to configure a search offset for specific shows.
        # This way we can search/accept results early or late, depending on the value.
        main_db_con = DBConnection()
        min_offset_show = main_db_con.select(
            'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset '
            'FROM tv_shows '
            'WHERE paused = 0 AND airdate_offset < 0')
        additional_search_offset = 0
        if min_offset_show and min_offset_show[0]['offsets'] > 0:
            additional_search_offset = int(
                ceil(abs(min_offset_show[0]['min_offset']) / 24.0))
            log.debug(
                'Using an airdate offset of {min_offset_show} as we found show(s) with an airdate'
                ' offset configured.',
                {'min_offset_show': min_offset_show[0]['min_offset']})

        cur_time = datetime.now(app_timezone)

        cur_date = (date.today() + timedelta(days=1 if network_dict else 2) +
                    timedelta(days=additional_search_offset)).toordinal()

        episodes_from_db = main_db_con.select(
            'SELECT indexer, showid, airdate, season, episode '
            'FROM tv_episodes '
            'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date])

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode['indexer']
            series_id = db_episode['showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id,
                                                 series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            cur_ep = series_obj.get_episode(db_episode['season'],
                                            db_episode['episode'])

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode['airdate'],
                                                series_obj.airs,
                                                series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(
                    minutes=try_int(series_obj.runtime, 60))

                if series_obj.airdate_offset != 0:
                    log.debug(
                        '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours',
                        {
                            'show': series_obj.name,
                            'episode': cur_ep.pretty_name(),
                            'offset': series_obj.airdate_offset
                        })

                # filter out any episodes that haven't finished airing yet
                if end_time + timedelta(
                        hours=series_obj.airdate_offset) > cur_time:
                    continue

            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}',
                    {
                        'name':
                        cur_ep.pretty_name(),
                        'status':
                        common.statusStrings[cur_ep.status],
                        'special':
                        '(specials are not supported)'
                        if not cur_ep.season else '',
                    })
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue a daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(scheduler_start_time, force=force))

        self.amActive = False
Esempio n. 25
0
    def find_episodes(self, episodes):
        """
        Search cache for episodes.

        NOTE: This is currently only used by the Backlog/Forced Search. As we determine the candidates there.
        The following checks are performed on the cache results:
        * Filter out non-anime results on Anime only providers
        * Check if the series is still in our library
        :param episodes: Single or list of episode object(s)

        :return list of SearchResult objects.
        """
        cache_results = defaultdict(list)
        results = []

        cache_db_con = self._get_db()
        if not episodes:
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}]'.format(name=self.provider_id))
        elif not isinstance(episodes, list):
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}] '
                'WHERE indexer = ? AND '
                'indexerid = ? AND '
                'season = ? AND '
                'episodes LIKE ?'.format(name=self.provider_id),
                [episodes.series.indexer, episodes.series.series_id, episodes.season,
                 '%|{0}|%'.format(episodes.episode)]
            )
        else:
            for ep_obj in episodes:
                results.append([
                    'SELECT * FROM [{name}] '
                    'WHERE indexer = ? AND '
                    'indexerid = ? AND '
                    'season = ? AND '
                    'episodes LIKE ?'.format(
                        name=self.provider_id
                    ),
                    [ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season,
                     '%|{0}|%'.format(ep_obj.episode)]]
                )

            if len(episodes) > 1:
                results.append([
                    'SELECT * FROM [{name}] '
                    'WHERE indexer = ? AND '
                    'indexerid = ? AND '
                    'season = ? AND '
                    'episodes == "||"'.format(
                        name=self.provider_id
                    ),
                    [ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season]
                ])

            if results:
                # Only execute the query if we have results
                sql_results = cache_db_con.mass_action(results, fetchall=True)
                sql_results = list(itertools.chain(*sql_results))
            else:
                sql_results = []
                log.debug(
                    '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', {
                        'id': episodes[0].series.series_id,
                        'provider': self.provider.name,
                        'show_name': episodes[0].series.name,
                        'ep': episode_num(episodes[0].season, episodes[0].episode),
                    }
                )

        # for each cache entry
        for cur_result in sql_results:
            if cur_result['indexer'] is None:
                log.debug('Ignoring result: {0}, missing indexer. This is probably a result added'
                          ' prior to medusa version 0.2.0', cur_result['name'])
                continue

            # get the show, or ignore if it's not one of our shows
            series_obj = Show.find_by_id(app.showList, int(cur_result['indexer']), int(cur_result['indexerid']))
            if not series_obj:
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not series_obj.is_anime:
                log.debug('{0} is not an anime, skipping', series_obj.name)
                continue

            search_result = self.provider.get_result(series=series_obj, cache=cur_result)
            if search_result in cache_results[search_result.episode_number]:
                continue
            # add it to the list
            cache_results[search_result.episode_number].append(search_result)

        # datetime stamp this search so cache gets cleared
        self.searched = time()

        return cache_results
Esempio n. 26
0
    def remove_episode_trakt_collection(self, filter_show=None):
        """Remove episode from trakt collection.

        For episodes that no longer have a media file (location)
        :param filter_show: optional. Only remove episodes from trakt collection for given shows
        """
        if not (app.TRAKT_SYNC_REMOVE and app.TRAKT_SYNC and app.USE_TRAKT):
            return

        params = []
        main_db_con = db.DBConnection()
        statuses = [DOWNLOADED, ARCHIVED]
        sql_selection = 'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \
                        'e.season, e.episode, e.status ' \
                        'FROM tv_episodes AS e, tv_shows AS s WHERE e.indexer = s.indexer AND ' \
                        's.indexer_id = e.showid and e.location = "" ' \
                        'AND e.status in ({0})'.format(','.join(['?'] * len(statuses)))
        if filter_show:
            sql_selection += ' AND s.indexer_id = ? AND e.indexer = ?'
            params = [filter_show.series_id, filter_show.indexer]

        sql_result = main_db_con.select(sql_selection, statuses + params)

        if not sql_result:
            return

        episodes = []
        shows = {}
        for cur_episode in sql_result:
            # Check if TRAKT supports that indexer
            if not get_trakt_indexer(cur_episode['indexer']):
                continue

            show_id = cur_episode['indexer'], cur_episode['indexer_id']
            episode = cur_episode['season'], cur_episode['episode']

            if show_id not in shows:
                shows[show_id] = []

            shows[show_id].append(episode)

        media_object_shows = []
        for show_id in shows:
            episodes = []
            show_obj = Show.find_by_id(app.showList, show_id[0], show_id[1])
            for season, episode in shows[show_id]:
                if not self._check_list(
                    indexer=show_obj.indexer, indexer_id=show_obj.series_id,
                    season=season, episode=episode,
                    list_type='Collection'
                ):
                    continue

                log.info("Removing episode '{show}' {ep} from Trakt collection", {
                    'show': show_obj.name,
                    'ep': episode_num(season, episode)
                })
                episodes.append(show_obj.get_episode(season, episode))
            media_object_shows.append(create_episode_structure(show_obj, episodes))

        try:
            sync.remove_from_collection({'shows': media_object_shows})
            self._get_show_collection()
        except TraktException as error:
            log.info('Unable to remove episodes from Trakt collection. Error: {error!r}', {
                'error': error
            })
Esempio n. 27
0
    def find_needed_episodes(self,
                             episode,
                             forced_search=False,
                             down_cur_quality=False):
        """Find needed episodes."""
        needed_eps = {}
        results = []

        cache_db_con = self._get_db()
        if not episode:
            sql_results = cache_db_con.select(
                b'SELECT * FROM [{name}]'.format(name=self.provider_id))
        elif not isinstance(episode, list):
            sql_results = cache_db_con.select(
                b'SELECT * FROM [{name}] '
                b'WHERE indexer = ? AND'
                b'      indexerid = ? AND'
                b'      season = ? AND'
                b'      episodes LIKE ?'.format(name=self.provider_id), [
                    episode.series.indexer, episode.series.series_id,
                    episode.season, b'%|{0}|%'.format(episode.episode)
                ])
        else:
            for ep_obj in episode:
                results.append([
                    b'SELECT * FROM [{name}] '
                    b'WHERE indexer = ? AND '
                    b'      indexerid = ? AND'
                    b'      season = ? AND'
                    b'      episodes LIKE ? AND '
                    b'      quality IN ({qualities})'.format(
                        name=self.provider_id,
                        qualities=','.join(
                            (str(x) for x in ep_obj.wanted_quality))),
                    [
                        ep_obj.series.indexer, ep_obj.series.series_id,
                        ep_obj.season, b'%|{0}|%'.format(ep_obj.episode)
                    ]
                ])

            if results:
                # Only execute the query if we have results
                sql_results = cache_db_con.mass_action(results, fetchall=True)
                sql_results = list(itertools.chain(*sql_results))
            else:
                sql_results = []
                log.debug(
                    '{id}: No cached results in {provider} for series {show_name!r} episode {ep}',
                    {
                        'id': episode[0].series.series_id,
                        'provider': self.provider.name,
                        'show_name': episode[0].series.name,
                        'ep': episode_num(episode[0].season,
                                          episode[0].episode),
                    })

        # for each cache entry
        for cur_result in sql_results:
            if cur_result[b'indexer'] is None:
                log.debug(
                    'Ignoring result: {0}, missing indexer. This is probably a result added'
                    ' prior to medusa version 0.2.0', cur_result[b'name'])
                continue

            search_result = self.provider.get_result()

            # ignored/required words, and non-tv junk
            if not naming.filter_bad_releases(cur_result[b'name']):
                continue

            # get the show, or ignore if it's not one of our shows
            series_obj = Show.find_by_id(app.showList,
                                         int(cur_result[b'indexer']),
                                         int(cur_result[b'indexerid']))
            if not series_obj:
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not series_obj.is_anime:
                log.debug('{0} is not an anime, skipping', series_obj.name)
                continue

            # get season and ep data (ignoring multi-eps for now)
            search_result.season = int(cur_result[b'season'])
            if search_result.season == -1:
                continue

            cur_ep = cur_result[b'episodes'].split('|')[1]
            if not cur_ep:
                continue

            cur_ep = int(cur_ep)

            search_result.quality = int(cur_result[b'quality'])
            search_result.release_group = cur_result[b'release_group']
            search_result.version = cur_result[b'version']

            # if the show says we want that episode then add it to the list
            if not series_obj.want_episode(search_result.season, cur_ep,
                                           search_result.quality,
                                           forced_search, down_cur_quality):
                log.debug('Ignoring {0}', cur_result[b'name'])
                continue

            search_result.episodes = [
                series_obj.get_episode(search_result.season, cur_ep)
            ]

            search_result.actual_episodes = [search_result.episodes[0].episode]
            search_result.actual_season = search_result.season

            # build a result object
            search_result.name = cur_result[b'name']
            search_result.url = cur_result[b'url']

            log.debug(
                '{id}: Using cached results from {provider} for series {show_name!r} episode {ep}',
                {
                    'id':
                    search_result.episodes[0].series.series_id,
                    'provider':
                    self.provider.name,
                    'show_name':
                    search_result.episodes[0].series.name,
                    'ep':
                    episode_num(search_result.episodes[0].season,
                                search_result.episodes[0].episode),
                })

            # Map the remaining attributes
            search_result.series = series_obj
            search_result.seeders = cur_result[b'seeders']
            search_result.leechers = cur_result[b'leechers']
            search_result.size = cur_result[b'size']
            search_result.pubdate = cur_result[b'pubdate']
            search_result.proper_tags = cur_result[b'proper_tags'].split(
                '|') if cur_result[b'proper_tags'] else ''
            search_result.content = None

            # FIXME: Should be changed to search_result.search_type
            search_result.forced_search = forced_search

            search_result.download_current_quality = down_cur_quality

            episode_object = search_result.episodes[0]
            # add it to the list
            if episode_object not in needed_eps:
                needed_eps[episode_object] = [search_result]
            else:
                needed_eps[episode_object].append(search_result)

        # datetime stamp this search so cache gets cleared
        self.searched = time()

        return needed_eps
Esempio n. 28
0
    def run(self, force=False):  # pylint: disable=too-many-branches, too-many-statements, too-many-locals
        """Check for needed subtitles for users' shows.

        :param force: True if a force search needs to be executed
        :type force: bool
        """
        if self.amActive:
            logger.debug(u'Subtitle finder is still running, not starting it again')
            return

        if not app.USE_SUBTITLES:
            logger.warning(u'Subtitle search is disabled. Please enabled it')
            return

        if not enabled_service_list():
            logger.warning(u'Not enough services selected. At least 1 service is required to search subtitles in the '
                           u'background')
            return

        self.amActive = True

        def dhm(td):
            """Create the string for subtitles delay."""
            days_delay = td.days
            hours_delay = td.seconds // 60 ** 2
            minutes_delay = (td.seconds // 60) % 60
            ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \
                  (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \
                  (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0]
            if days_delay == 1:
                ret = ret.replace('days', 'day')
            if hours_delay == 1:
                ret = ret.replace('hours', 'hour')
            if minutes_delay == 1:
                ret = ret.replace('minutes', 'minute')
            return ret.rstrip(', ')

        if app.POSTPONE_IF_NO_SUBS:
            self.subtitles_download_in_pp()

        logger.info(u'Checking for missed subtitles')

        main_db_con = db.DBConnection()
        # Shows with air date <= 30 days, have a limit of 100 results
        # Shows with air date > 30 days, have a limit of 200 results
        sql_args = [{'age_comparison': '<=', 'limit': 100}, {'age_comparison': '>', 'limit': 200}]
        sql_like_languages = '%' + ','.join(sorted(wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%'
        sql_results = []
        for args in sql_args:
            sql_results += main_db_con.select(
                'SELECT '
                's.show_name, '
                'e.indexer,'
                'e.showid, '
                'e.season, '
                'e.episode,'
                'e.release_name, '
                'e.status, '
                'e.subtitles, '
                'e.subtitles_searchcount AS searchcount, '
                'e.subtitles_lastsearch AS lastsearch, '
                'e.location, (? - e.airdate) as age '
                'FROM '
                'tv_episodes AS e '
                'INNER JOIN tv_shows AS s '
                'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) '
                'WHERE '
                's.subtitles = 1 '
                'AND s.paused = 0 '
                'AND e.status = ? '
                'AND e.season > 0 '
                "AND e.location != '' "
                'AND age {} 30 '
                'AND e.subtitles NOT LIKE ? '
                'ORDER BY '
                'lastsearch ASC '
                'LIMIT {}'.format
                (args['age_comparison'], args['limit']),
                [datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages]
            )

        if not sql_results:
            logger.info('No subtitles to download')
            self.amActive = False
            return

        for ep_to_sub in sql_results:

            # give the CPU a break
            time.sleep(cpu_presets[app.CPU_PRESET])

            ep_num = episode_num(ep_to_sub['season'], ep_to_sub['episode']) or \
                episode_num(ep_to_sub['season'], ep_to_sub['episode'], numbering='absolute')
            subtitle_path = _encode(ep_to_sub['location'])
            if not os.path.isfile(subtitle_path):
                logger.debug('Episode file does not exist, cannot download subtitles for %s %s',
                             ep_to_sub['show_name'], ep_num)
                continue

            if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub['subtitles']:
                logger.debug('Episode already has one subtitle, skipping %s %s', ep_to_sub['show_name'], ep_num)
                continue

            if not needs_subtitles(ep_to_sub['subtitles']):
                logger.debug('Episode already has all needed subtitles, skipping %s %s',
                             ep_to_sub['show_name'], ep_num)
                continue

            try:
                lastsearched = datetime.datetime.strptime(ep_to_sub['lastsearch'], dateTimeFormat)
            except ValueError:
                lastsearched = datetime.datetime.min

            if not force:
                now = datetime.datetime.now()
                days = int(ep_to_sub['age'])
                delay_time = datetime.timedelta(hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24)
                delay = lastsearched + delay_time - now

                # Search every hour until 10 days pass
                # After 10 days, search every 8 hours, after 30 days search once a month
                # Will always try an episode regardless of age for 3 times
                # The time resolution is minute
                # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot.
                if delay.total_seconds() > 60 and int(ep_to_sub['searchcount']) > 2:
                    logger.debug('Subtitle search for %s %s delayed for %s',
                                 ep_to_sub['show_name'], ep_num, dhm(delay))
                    continue

            show_object = Show.find_by_id(app.showList, ep_to_sub['indexer'], ep_to_sub['showid'])
            if not show_object:
                logger.debug('Show with ID %s not found in the database', ep_to_sub['showid'])
                continue

            episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode'])
            if isinstance(episode_object, str):
                logger.debug('%s %s not found in the database', ep_to_sub['show_name'], ep_num)
                continue

            episode_object.download_subtitles()

        logger.info('Finished checking for missed subtitles')
        self.amActive = False
Esempio n. 29
0
    def get_last_updated_seasons(self,
                                 show_list=None,
                                 cache=None,
                                 *args,
                                 **kwargs):
        """Return updated seasons for shows passed, using the from_time.

        :param show_list[int]: The list of shows, where seasons updates are retrieved for.
        :param from_time[int]: epoch timestamp, with the start date/time
        :param weeks: number of weeks to get updates for.
        """
        show_season_updates = {}

        # we don't have a single api call tha we can run to check if an update is required.
        # So we'll have to check what's there in the library, and decide based on the last episode's date, if a
        # season update is needed.

        for series_id in show_list:
            series_obj = Show.find_by_id(app.showList, self.indexer, series_id)
            all_episodes_local = series_obj.get_all_episodes()

            total_updates = []
            results = None
            # A small api call to get the amount of known seasons
            try:
                results = self.imdb_api.get_title_episodes(
                    ImdbIdentifier(series_id).imdb_id)
            except LookupError as error:
                raise IndexerShowIncomplete(
                    'Show episode search exception, '
                    'could not get any episodes. Exception: {error!r}'.format(
                        error=error))
            except (AttributeError, RequestException) as error:
                raise IndexerUnavailable(
                    'Error connecting to Imdb api. Caused by: {0!r}'.format(
                        error))

            if not results or not results.get('seasons'):
                continue

            # Get all the seasons

            # Loop through seasons
            for season in results['seasons']:
                season_number = season.get('season')

                # Imdb api gives back a season without the 'season' key. This season has special episodes.
                # Dont know what this is, but skipping it.
                if not season_number:
                    continue

                # Check if the season is already known in our local db.
                local_season_episodes = [
                    ep for ep in all_episodes_local
                    if ep.season == season_number
                ]
                remote_season_episodes = season['episodes']
                if not local_season_episodes or len(
                        remote_season_episodes) != len(local_season_episodes):
                    total_updates.append(season_number)
                    log.debug(
                        '{series}: Season {season} seems to be a new season. Adding it.',
                        {
                            'series': series_obj.name,
                            'season': season_number
                        })
                    continue

                # Per season, get latest episode airdate
                sorted_episodes = sorted(local_season_episodes,
                                         key=lambda x: x.airdate)
                # date_season_start = sorted_episodes[0].airdate
                date_season_last = sorted_episodes[-1].airdate

                # Get date for last updated, from the cache object.

                # Calculate update interval for the season
                update_interval = self._calc_update_interval(
                    # date_season_start,
                    date_season_last,
                    season_finished=bool([
                        s for s in results['seasons']
                        if s.get('season') == season_number + 1
                    ]))

                last_update = cache.get_last_update_season(
                    self.indexer, series_id, season_number)
                if last_update < time() - update_interval:
                    # This season should be updated.
                    total_updates.append(season_number)

                    # Update last_update for this season.
                    cache.set_last_update_season(self.indexer, series_id,
                                                 season_number)
                else:
                    log.debug(
                        '{series}: Season {season} seems to have been recently updated. Not scheduling a new refresh',
                        {
                            'series': series_obj.name,
                            'season': season_number
                        })

            show_season_updates[series_id] = list(set(total_updates))

        return show_season_updates
Esempio n. 30
0
    def run(self, force=False):  # pylint: disable=too-many-branches, too-many-statements, too-many-locals
        """Check for needed subtitles for users' shows.

        :param force: True if a force search needs to be executed
        :type force: bool
        """
        if self.amActive:
            logger.debug(
                u'Subtitle finder is still running, not starting it again')
            return

        if not app.USE_SUBTITLES:
            logger.warning(u'Subtitle search is disabled. Please enabled it')
            return

        if not enabled_service_list():
            logger.warning(
                u'Not enough services selected. At least 1 service is required to search subtitles in the '
                u'background')
            return

        self.amActive = True

        def dhm(td):
            """Create the string for subtitles delay."""
            days_delay = td.days
            hours_delay = td.seconds // 60**2
            minutes_delay = (td.seconds // 60) % 60
            ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \
                  (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \
                  (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0]
            if days_delay == 1:
                ret = ret.replace('days', 'day')
            if hours_delay == 1:
                ret = ret.replace('hours', 'hour')
            if minutes_delay == 1:
                ret = ret.replace('minutes', 'minute')
            return ret.rstrip(', ')

        if app.POSTPONE_IF_NO_SUBS:
            self.subtitles_download_in_pp()

        logger.info(u'Checking for missed subtitles')

        database = db.DBConnection()
        # Shows with air date <= 30 days, have a limit of 100 results
        # Shows with air date > 30 days, have a limit of 200 results
        sql_args = [{
            'age_comparison': '<=',
            'limit': 100
        }, {
            'age_comparison': '>',
            'limit': 200
        }]
        sql_like_languages = '%' + ','.join(sorted(
            wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%'
        sql_results = []
        for args in sql_args:
            sql_results += database.select(
                'SELECT '
                's.show_name, '
                'e.indexer,'
                'e.showid, '
                'e.season, '
                'e.episode,'
                'e.release_name, '
                'e.status, '
                'e.subtitles, '
                'e.subtitles_searchcount AS searchcount, '
                'e.subtitles_lastsearch AS lastsearch, '
                'e.location, (? - e.airdate) as age '
                'FROM '
                'tv_episodes AS e '
                'INNER JOIN tv_shows AS s '
                'ON (e.showid = s.indexer_id AND e.indexer = s.indexer) '
                'WHERE '
                's.subtitles = 1 '
                'AND s.paused = 0 '
                'AND e.status = ? '
                'AND e.season > 0 '
                "AND e.location != '' "
                'AND age {} 30 '
                'AND e.subtitles NOT LIKE ? '
                'ORDER BY '
                'lastsearch ASC '
                'LIMIT {}'.format(args['age_comparison'], args['limit']), [
                    datetime.datetime.now().toordinal(), DOWNLOADED,
                    sql_like_languages
                ])

        if not sql_results:
            logger.info('No subtitles to download')
            self.amActive = False
            return

        for ep_to_sub in sql_results:

            # give the CPU a break
            time.sleep(cpu_presets[app.CPU_PRESET])

            ep_num = episode_num(ep_to_sub[b'season'], ep_to_sub[b'episode']) or \
                episode_num(ep_to_sub[b'season'], ep_to_sub[b'episode'], numbering='absolute')
            subtitle_path = _encode(ep_to_sub[b'location'], fallback='utf-8')
            if not os.path.isfile(subtitle_path):
                logger.debug(
                    'Episode file does not exist, cannot download subtitles for %s %s',
                    ep_to_sub[b'show_name'], ep_num)
                continue

            if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub[b'subtitles']:
                logger.debug(
                    'Episode already has one subtitle, skipping %s %s',
                    ep_to_sub[b'show_name'], ep_num)
                continue

            if not needs_subtitles(ep_to_sub[b'subtitles']):
                logger.debug(
                    'Episode already has all needed subtitles, skipping %s %s',
                    ep_to_sub[b'show_name'], ep_num)
                continue

            try:
                lastsearched = datetime.datetime.strptime(
                    ep_to_sub[b'lastsearch'], dateTimeFormat)
            except ValueError:
                lastsearched = datetime.datetime.min

            if not force:
                now = datetime.datetime.now()
                days = int(ep_to_sub[b'age'])
                delay_time = datetime.timedelta(
                    hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24)
                delay = lastsearched + delay_time - now

                # Search every hour until 10 days pass
                # After 10 days, search every 8 hours, after 30 days search once a month
                # Will always try an episode regardless of age for 3 times
                # The time resolution is minute
                # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot.
                if delay.total_seconds() > 60 and int(
                        ep_to_sub[b'searchcount']) > 2:
                    logger.debug('Subtitle search for %s %s delayed for %s',
                                 ep_to_sub[b'show_name'], ep_num, dhm(delay))
                    continue

            show_object = Show.find_by_id(app.showList, ep_to_sub[b'indexer'],
                                          ep_to_sub[b'showid'])
            if not show_object:
                logger.debug('Show with ID %s not found in the database',
                             ep_to_sub[b'showid'])
                continue

            episode_object = show_object.get_episode(ep_to_sub[b'season'],
                                                     ep_to_sub[b'episode'])
            if isinstance(episode_object, str):
                logger.debug('%s %s not found in the database',
                             ep_to_sub[b'show_name'], ep_num)
                continue

            episode_object.download_subtitles()

        logger.info('Finished checking for missed subtitles')
        self.amActive = False
Esempio n. 31
0
    def find_episodes(self, episodes):
        """
        Search cache for episodes.

        NOTE: This is currently only used by the Backlog/Forced Search. As we determine the candidates there.
        The following checks are performed on the cache results:
        * Filter out non-anime results on Anime only providers
        * Check if the series is still in our library
        :param episodes: Single or list of episode object(s)

        :return list of SearchResult objects.
        """
        cache_results = defaultdict(list)
        results = []

        cache_db_con = self._get_db()
        if not episodes:
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}]'.format(name=self.provider_id))
        elif not isinstance(episodes, list):
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}] '
                'WHERE indexer = ? AND '
                'indexerid = ? AND '
                'season = ? AND '
                'episodes LIKE ?'.format(name=self.provider_id),
                [episodes.series.indexer, episodes.series.series_id, episodes.season,
                 '%|{0}|%'.format(episodes.episode)]
            )
        else:
            for ep_obj in episodes:
                results.append([
                    'SELECT * FROM [{name}] '
                    'WHERE indexer = ? AND '
                    'indexerid = ? AND '
                    'season = ? AND '
                    'episodes LIKE ?'.format(
                        name=self.provider_id
                    ),
                    [ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season,
                     '%|{0}|%'.format(ep_obj.episode)]]
                )

            if results:
                # Only execute the query if we have results
                sql_results = cache_db_con.mass_action(results, fetchall=True)
                sql_results = list(itertools.chain(*sql_results))
            else:
                sql_results = []
                log.debug(
                    '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', {
                        'id': episodes[0].series.series_id,
                        'provider': self.provider.name,
                        'show_name': episodes[0].series.name,
                        'ep': episode_num(episodes[0].season, episodes[0].episode),
                    }
                )

        # for each cache entry
        for cur_result in sql_results:
            if cur_result['indexer'] is None:
                log.debug('Ignoring result: {0}, missing indexer. This is probably a result added'
                          ' prior to medusa version 0.2.0', cur_result['name'])
                continue

            search_result = self.provider.get_result()

            # get the show, or ignore if it's not one of our shows
            series_obj = Show.find_by_id(app.showList, int(cur_result['indexer']), int(cur_result['indexerid']))
            if not series_obj:
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not series_obj.is_anime:
                log.debug('{0} is not an anime, skipping', series_obj.name)
                continue

            # build a result object
            search_result.quality = int(cur_result['quality'])
            search_result.release_group = cur_result['release_group']
            search_result.version = cur_result['version']
            search_result.name = cur_result['name']
            search_result.url = cur_result['url']
            search_result.actual_season = int(cur_result['season'])

            # TODO: Add support for season results
            sql_episodes = cur_result['episodes'].strip('|')
            # Season result
            if not sql_episodes:
                ep_objs = series_obj.get_all_episodes(search_result.actual_season)
                if not ep_objs:
                    # We couldn't get any episodes for this season, which is odd, skip the result.
                    log.debug("We couldn't get any episodes for season {0} of {1}, skipping",
                              search_result.actual_season, search_result.name)
                    continue
                actual_episodes = [ep.episode for ep in ep_objs]
                episode_number = SEASON_RESULT
            # Multi or single episode result
            else:
                actual_episodes = [int(ep) for ep in sql_episodes.split('|')]
                ep_objs = [series_obj.get_episode(search_result.actual_season, ep) for ep in actual_episodes]
                if len(actual_episodes) == 1:
                    episode_number = actual_episodes[0]
                else:
                    episode_number = MULTI_EP_RESULT

            search_result.episodes = ep_objs
            search_result.actual_episodes = actual_episodes

            # Map the remaining attributes
            search_result.series = series_obj
            search_result.seeders = cur_result['seeders']
            search_result.leechers = cur_result['leechers']
            search_result.size = cur_result['size']
            search_result.pubdate = cur_result['pubdate']
            search_result.proper_tags = cur_result['proper_tags'].split('|') if cur_result['proper_tags'] else ''
            search_result.content = None

            # add it to the list
            cache_results[episode_number].append(search_result)

        # datetime stamp this search so cache gets cleared
        self.searched = time()

        return cache_results
Esempio n. 32
0
    def find_episodes(self, episodes):
        """
        Search cache for episodes.

        NOTE: This is currently only used by the Backlog/Forced Search. As we determine the candidates there.
        The following checks are performed on the cache results:
        * Filter out non-anime results on Anime only providers
        * Check if the series is still in our library
        :param episodes: Single or list of episode object(s)

        :return list of SearchResult objects.
        """
        cache_results = defaultdict(list)
        results = []

        cache_db_con = self._get_db()
        if not episodes:
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}]'.format(name=self.provider_id))
        elif not isinstance(episodes, list):
            sql_results = cache_db_con.select(
                'SELECT * FROM [{name}] '
                'WHERE indexer = ? AND '
                'indexerid = ? AND '
                'season = ? AND '
                'episodes LIKE ?'.format(name=self.provider_id), [
                    episodes.series.indexer, episodes.series.series_id,
                    episodes.season, '%|{0}|%'.format(episodes.episode)
                ])
        else:
            for ep_obj in episodes:
                results.append([
                    'SELECT * FROM [{name}] '
                    'WHERE indexer = ? AND '
                    'indexerid = ? AND '
                    'season = ? AND '
                    'episodes LIKE ?'.format(name=self.provider_id),
                    [
                        ep_obj.series.indexer, ep_obj.series.series_id,
                        ep_obj.season, '%|{0}|%'.format(ep_obj.episode)
                    ]
                ])

            if results:
                # Only execute the query if we have results
                sql_results = cache_db_con.mass_action(results, fetchall=True)
                sql_results = list(itertools.chain(*sql_results))
            else:
                sql_results = []
                log.debug(
                    '{id}: No cached results in {provider} for series {show_name!r} episode {ep}',
                    {
                        'id': episodes[0].series.series_id,
                        'provider': self.provider.name,
                        'show_name': episodes[0].series.name,
                        'ep': episode_num(episodes[0].season,
                                          episodes[0].episode),
                    })

        # for each cache entry
        for cur_result in sql_results:
            if cur_result['indexer'] is None:
                log.debug(
                    'Ignoring result: {0}, missing indexer. This is probably a result added'
                    ' prior to medusa version 0.2.0', cur_result['name'])
                continue

            search_result = self.provider.get_result()

            # get the show, or ignore if it's not one of our shows
            series_obj = Show.find_by_id(app.showList,
                                         int(cur_result['indexer']),
                                         int(cur_result['indexerid']))
            if not series_obj:
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not series_obj.is_anime:
                log.debug('{0} is not an anime, skipping', series_obj.name)
                continue

            # build a result object
            search_result.quality = int(cur_result['quality'])
            search_result.release_group = cur_result['release_group']
            search_result.version = cur_result['version']
            search_result.name = cur_result['name']
            search_result.url = cur_result['url']
            search_result.actual_season = int(cur_result['season'])

            # TODO: Add support for season results
            sql_episodes = cur_result['episodes'].strip('|')
            # Season result
            if not sql_episodes:
                ep_objs = series_obj.get_all_episodes(
                    search_result.actual_season)
                if not ep_objs:
                    # We couldn't get any episodes for this season, which is odd, skip the result.
                    log.debug(
                        "We couldn't get any episodes for season {0} of {1}, skipping",
                        search_result.actual_season, search_result.name)
                    continue
                actual_episodes = [ep.episode for ep in ep_objs]
                episode_number = SEASON_RESULT
            # Multi or single episode result
            else:
                actual_episodes = [int(ep) for ep in sql_episodes.split('|')]
                ep_objs = [
                    series_obj.get_episode(search_result.actual_season, ep)
                    for ep in actual_episodes
                ]
                if len(actual_episodes) == 1:
                    episode_number = actual_episodes[0]
                else:
                    episode_number = MULTI_EP_RESULT

            search_result.episodes = ep_objs
            search_result.actual_episodes = actual_episodes

            # Map the remaining attributes
            search_result.series = series_obj
            search_result.seeders = cur_result['seeders']
            search_result.leechers = cur_result['leechers']
            search_result.size = cur_result['size']
            search_result.pubdate = cur_result['pubdate']
            search_result.proper_tags = cur_result['proper_tags'].split(
                '|') if cur_result['proper_tags'] else ''
            search_result.content = None

            # add it to the list
            cache_results[episode_number].append(search_result)

        # datetime stamp this search so cache gets cleared
        self.searched = time()

        return cache_results
Esempio n. 33
0
    def pickManualSearch(self, provider=None, identifier=None):
        """
        Tries to Perform the snatch for a manualSelected episode, episodes or season pack.

        @param provider: The provider id, passed as usenet_crawler and not the provider name (Usenet-Crawler)
        @param identifier: The provider's cache table's identifier (unique).

        @return: A json with a {'success': true} or false.
        """
        # Try to retrieve the cached result from the providers cache table.
        provider_obj = providers.get_provider_class(provider)

        try:
            cached_result = Cache(provider_obj).load_from_row(identifier)
        except Exception as msg:
            error_message = "Couldn't read cached results. Error: {error}".format(
                error=msg)
            logger.log(error_message)
            return self._genericMessage('Error', error_message)

        if not cached_result or not all([
                cached_result['url'], cached_result['quality'],
                cached_result['name'], cached_result['indexer'],
                cached_result['indexerid'], cached_result['season']
                is not None, provider
        ]):
            return self._genericMessage(
                'Error',
                "Cached result doesn't have all needed info to snatch episode")

        try:
            series_obj = Show.find_by_id(app.showList,
                                         cached_result['indexer'],
                                         cached_result['indexerid'])
        except (ValueError, TypeError):
            return self._genericMessage(
                'Error',
                'Invalid show ID: {0}'.format(cached_result['indexerid']))

        if not series_obj:
            return self._genericMessage(
                'Error',
                'Could not find a show with id {0} in the list of shows, '
                'did you remove the show?'.format(cached_result['indexerid']))

        search_result = provider_obj.get_result(series=series_obj,
                                                cache=cached_result)
        search_result.search_type = SearchType.MANUAL_SEARCH

        # Create the queue item
        snatch_queue_item = SnatchQueueItem(search_result.series,
                                            search_result.episodes,
                                            search_result)

        # Add the queue item to the queue
        app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

        while snatch_queue_item.success is not False:
            if snatch_queue_item.started and snatch_queue_item.success:
                # If the snatch was successfull we'll need to update the original searched segment,
                # with the new status: SNATCHED (2)
                update_finished_search_queue_item(snatch_queue_item)
                return json.dumps({
                    'result': 'success',
                })
            time.sleep(1)

        return json.dumps({
            'result': 'failure',
        })
Esempio n. 34
0
    def run(self, force=False):
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress() and not force:
            log.warning('Manual search is running. Unable to start Daily search')
            return

        self.amActive = True
        # Let's keep track of the exact time the scheduler kicked in,
        # as we need to compare to this time for each provider.
        scheduler_start_time = int(time())

        if not network_dict:
            update_network_dict()

        # The tvshows airdate_offset field is used to configure a search offset for specific shows.
        # This way we can search/accept results early or late, depending on the value.
        main_db_con = DBConnection()
        min_offset_show = main_db_con.select(
            'SELECT COUNT(*) as offsets, MIN(airdate_offset) AS min_offset '
            'FROM tv_shows '
            'WHERE paused = 0 AND airdate_offset < 0'
        )
        additional_search_offset = 0
        if min_offset_show and min_offset_show[0]['offsets'] > 0:
            additional_search_offset = int(ceil(abs(min_offset_show[0]['min_offset']) / 24.0))
            log.debug('Using an airdate offset of {min_offset_show} as we found show(s) with an airdate'
                      ' offset configured.', {'min_offset_show': min_offset_show[0]['min_offset']})

        cur_time = datetime.now(app_timezone)

        cur_date = (
            date.today() + timedelta(days=1 if network_dict else 2) + timedelta(days=additional_search_offset)
        ).toordinal()

        episodes_from_db = main_db_con.select(
            'SELECT indexer, showid, airdate, season, episode '
            'FROM tv_episodes '
            'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date]
        )

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode['indexer']
            series_id = db_episode['showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id, series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            cur_ep = series_obj.get_episode(db_episode['season'], db_episode['episode'])

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode['airdate'], series_obj.airs, series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(minutes=try_int(series_obj.runtime, 60))

                if series_obj.airdate_offset != 0:
                    log.debug(
                        '{show}: Applying an airdate offset for the episode: {episode} of {offset} hours',
                        {'show': series_obj.name, 'episode': cur_ep.pretty_name(), 'offset': series_obj.airdate_offset})

                # filter out any episodes that haven't finished airing yet
                if end_time + timedelta(hours=series_obj.airdate_offset) > cur_time:
                    continue

            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}', {
                        'name': cur_ep.pretty_name(),
                        'status': common.statusStrings[cur_ep.status],
                        'special': '(specials are not supported)' if not cur_ep.season else '',
                    }
                )
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue a daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(scheduler_start_time, force=force)
        )

        self.amActive = False
Esempio n. 35
0
    def addShowByID(self, indexername=None, seriesid=None, show_name=None, which_series=None,
                    indexer_lang=None, root_dir=None, default_status=None,
                    quality_preset=None, any_qualities=None, best_qualities=None,
                    season_folders=None, subtitles=None, full_show_path=None,
                    other_shows=None, skip_show=None, provided_indexer=None,
                    anime=None, scene=None, blacklist=None, whitelist=None,
                    default_status_after=None, configure_show_options=False):
        """
        Add's a new show with provided show options by indexer_id.
        Currently only TVDB and IMDB id's supported.
        """
        series_id = seriesid
        if indexername != 'tvdb':
            series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper())
            if not series_id:
                log.info('Unable to find tvdb ID to add {name}', {'name': show_name})
                ui.notifications.error(
                    'Unable to add {0}'.format(show_name),
                    'Could not add {0}. We were unable to locate the tvdb id at this time.'.format(show_name)
                )
                return json_response(
                    result=False,
                    message='Unable to find tvdb ID to add {show}'.format(show=show_name)
                )

        if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id):
            return json_response(
                result=False,
                message='Show already exists'
            )

        # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists
        if any_qualities:
            any_qualities = any_qualities.split(',')
        else:
            any_qualities = []

        if best_qualities:
            best_qualities = best_qualities.split(',')
        else:
            best_qualities = []

        # If configure_show_options is enabled let's use the provided settings
        configure_show_options = config.checkbox_to_value(configure_show_options)

        if configure_show_options:
            # prepare the inputs for passing along
            scene = config.checkbox_to_value(scene)
            anime = config.checkbox_to_value(anime)
            season_folders = config.checkbox_to_value(season_folders)
            subtitles = config.checkbox_to_value(subtitles)

            if whitelist:
                whitelist = short_group_names(whitelist)
            if blacklist:
                blacklist = short_group_names(blacklist)

            if not any_qualities:
                any_qualities = []

            if not best_qualities or try_int(quality_preset, None):
                best_qualities = []

            if not isinstance(any_qualities, list):
                any_qualities = [any_qualities]

            if not isinstance(best_qualities, list):
                best_qualities = [best_qualities]

            quality = Quality.combine_qualities([int(q) for q in any_qualities], [int(q) for q in best_qualities])

            location = root_dir

        else:
            default_status = app.STATUS_DEFAULT
            quality = app.QUALITY_DEFAULT
            season_folders = app.SEASON_FOLDERS_DEFAULT
            subtitles = app.SUBTITLES_DEFAULT
            anime = app.ANIME_DEFAULT
            scene = app.SCENE_DEFAULT
            default_status_after = app.STATUS_DEFAULT_AFTER

            if app.ROOT_DIRS:
                root_dirs = app.ROOT_DIRS
                location = root_dirs[int(root_dirs[0]) + 1]
            else:
                location = None

        if not location:
            log.warning('There was an error creating the show, no root directory setting found')
            return json_response(
                result=False,
                message='No root directories set up, please go back and add one.'
            )

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id)
        show_dir = None

        # add the show
        app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(series_id), show_dir, int(default_status), quality,
                                                season_folders, indexer_lang, subtitles, anime, scene, None, blacklist,
                                                whitelist, int(default_status_after), root_dir=location)

        ui.notifications.message('Show added', 'Adding the specified show {0}'.format(show_name))

        # done adding show
        return json_response(
            message='Adding the specified show {0}'.format(show_name),
            redirect='home'
        )
Esempio n. 36
0
    def run(self, force=False):  # pylint:disable=too-many-branches
        """
        Run the daily searcher, queuing selected episodes for search.

        :param force: Force search
        """
        if self.amActive:
            log.debug('Daily search is still running, not starting it again')
            return
        elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress(
        ) and not force:
            log.warning(
                'Manual search is running. Unable to start Daily search')
            return

        self.amActive = True

        if not network_dict:
            update_network_dict()

        cur_time = datetime.now(app_timezone)
        cur_date = (date.today() +
                    timedelta(days=1 if network_dict else 2)).toordinal()

        main_db_con = DBConnection()
        episodes_from_db = main_db_con.select(
            b'SELECT indexer, showid, airdate, season, episode '
            b'FROM tv_episodes '
            b'WHERE status = ? AND (airdate <= ? and airdate > 1)',
            [common.UNAIRED, cur_date])

        new_releases = []
        series_obj = None

        for db_episode in episodes_from_db:
            indexer_id = db_episode[b'indexer']
            series_id = db_episode[b'showid']
            try:
                if not series_obj or series_id != series_obj.indexerid:
                    series_obj = Show.find_by_id(app.showList, indexer_id,
                                                 series_id)

                # for when there is orphaned series in the database but not loaded into our show list
                if not series_obj or series_obj.paused:
                    continue

            except MultipleShowObjectsException:
                log.info('ERROR: expected to find a single show matching {id}',
                         {'id': series_id})
                continue

            if series_obj.airs and series_obj.network:
                # This is how you assure it is always converted to local time
                show_air_time = parse_date_time(db_episode[b'airdate'],
                                                series_obj.airs,
                                                series_obj.network)
                end_time = show_air_time.astimezone(app_timezone) + timedelta(
                    minutes=try_int(series_obj.runtime, 60))

                # filter out any episodes that haven't finished airing yet,
                if end_time > cur_time:
                    continue

            cur_ep = series_obj.get_episode(db_episode[b'season'],
                                            db_episode[b'episode'])
            with cur_ep.lock:
                cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED
                log.info(
                    'Setting status ({status}) for show airing today: {name} {special}',
                    {
                        'name':
                        cur_ep.pretty_name(),
                        'status':
                        common.statusStrings[cur_ep.status],
                        'special':
                        '(specials are not supported)'
                        if not cur_ep.season else '',
                    })
                new_releases.append(cur_ep.get_sql())

        if new_releases:
            main_db_con = DBConnection()
            main_db_con.mass_action(new_releases)

        # queue episode for daily search
        app.search_queue_scheduler.action.add_item(
            DailySearchQueueItem(force=force))

        self.amActive = False
Esempio n. 37
0
    def addShowByID(self,
                    indexername=None,
                    seriesid=None,
                    show_name=None,
                    which_series=None,
                    indexer_lang=None,
                    root_dir=None,
                    default_status=None,
                    quality_preset=None,
                    any_qualities=None,
                    best_qualities=None,
                    season_folders=None,
                    subtitles=None,
                    full_show_path=None,
                    other_shows=None,
                    skip_show=None,
                    provided_indexer=None,
                    anime=None,
                    scene=None,
                    blacklist=None,
                    whitelist=None,
                    default_status_after=None,
                    configure_show_options=False):
        """
        Add's a new show with provided show options by indexer_id.
        Currently only TVDB and IMDB id's supported.
        """
        series_id = seriesid
        if indexername != 'tvdb':
            series_id = helpers.get_tvdb_from_id(seriesid, indexername.upper())
            if not series_id:
                logger.log(u'Unable to to find tvdb ID to add %s' % show_name)
                ui.notifications.error(
                    'Unable to add %s' % show_name,
                    'Could not add %s.  We were unable to locate the tvdb id at this time.'
                    % show_name)
                return

        if Show.find_by_id(app.showList, INDEXER_TVDBV2, series_id):
            return

        # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists
        if any_qualities:
            any_qualities = any_qualities.split(',')
        else:
            any_qualities = []

        if best_qualities:
            best_qualities = best_qualities.split(',')
        else:
            best_qualities = []

        # If configure_show_options is enabled let's use the provided settings
        configure_show_options = config.checkbox_to_value(
            configure_show_options)

        if configure_show_options:
            # prepare the inputs for passing along
            scene = config.checkbox_to_value(scene)
            anime = config.checkbox_to_value(anime)
            season_folders = config.checkbox_to_value(season_folders)
            subtitles = config.checkbox_to_value(subtitles)

            if whitelist:
                whitelist = short_group_names(whitelist)
            if blacklist:
                blacklist = short_group_names(blacklist)

            if not any_qualities:
                any_qualities = []

            if not best_qualities or try_int(quality_preset, None):
                best_qualities = []

            if not isinstance(any_qualities, list):
                any_qualities = [any_qualities]

            if not isinstance(best_qualities, list):
                best_qualities = [best_qualities]

            quality = Quality.combine_qualities(
                [int(q) for q in any_qualities],
                [int(q) for q in best_qualities])

            location = root_dir

        else:
            default_status = app.STATUS_DEFAULT
            quality = app.QUALITY_DEFAULT
            season_folders = app.SEASON_FOLDERS_DEFAULT
            subtitles = app.SUBTITLES_DEFAULT
            anime = app.ANIME_DEFAULT
            scene = app.SCENE_DEFAULT
            default_status_after = app.STATUS_DEFAULT_AFTER

            if app.ROOT_DIRS:
                root_dirs = app.ROOT_DIRS
                location = root_dirs[int(root_dirs[0]) + 1]
            else:
                location = None

        if not location:
            logger.log(
                u'There was an error creating the show, '
                u'no root directory setting found', logger.WARNING)
            return 'No root directories setup, please go back and add one.'

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, series_id)
        show_dir = None

        # add the show
        app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2,
                                                int(series_id),
                                                show_dir,
                                                int(default_status),
                                                quality,
                                                season_folders,
                                                indexer_lang,
                                                subtitles,
                                                anime,
                                                scene,
                                                None,
                                                blacklist,
                                                whitelist,
                                                int(default_status_after),
                                                root_dir=location)

        ui.notifications.message(
            'Show added', 'Adding the specified show {0}'.format(show_name))

        # done adding show
        return self.redirect('/home/')
Esempio n. 38
0
    def massAddTable(self, rootDir=None):
        t = PageTemplate(rh=self, filename='home_massAddTable.mako')

        if not rootDir:
            return 'No folders selected.'
        elif not isinstance(rootDir, list):
            root_dirs = [rootDir]
        else:
            root_dirs = rootDir

        root_dirs = [unquote_plus(x) for x in root_dirs]

        if app.ROOT_DIRS:
            default_index = int(app.ROOT_DIRS[0])
        else:
            default_index = 0

        if len(root_dirs) > default_index:
            tmp = root_dirs[default_index]
            if tmp in root_dirs:
                root_dirs.remove(tmp)
                root_dirs = [tmp] + root_dirs

        dir_list = []

        main_db_con = db.DBConnection()
        for root_dir in root_dirs:
            try:
                file_list = os.listdir(root_dir)
            except Exception as error:
                logger.log('Unable to listdir {path}: {e!r}'.format(path=root_dir, e=error))
                continue

            for cur_file in file_list:

                try:
                    cur_path = os.path.normpath(os.path.join(root_dir, cur_file))
                    if not os.path.isdir(cur_path):
                        continue
                except Exception as error:
                    logger.log('Unable to get current path {path} and {file}: {e!r}'.format(path=root_dir, file=cur_file, e=error))
                    continue

                cur_dir = {
                    'dir': cur_path,
                    'display_dir': '<b>{dir}{sep}</b>{base}'.format(
                        dir=os.path.dirname(cur_path), sep=os.sep, base=os.path.basename(cur_path)),
                }

                # see if the folder is in KODI already
                dir_results = main_db_con.select(
                    b'SELECT indexer, indexer_id '
                    b'FROM tv_shows '
                    b'WHERE location = ? LIMIT 1',
                    [cur_path]
                )

                cur_dir['added_already'] = bool(dir_results)

                dir_list.append(cur_dir)

                indexer_id = show_name = indexer = None
                # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute
                # is reset. This will prevent errors, when using multiple indexers and caching.
                for cur_provider in app.metadata_provider_dict.values():
                    if not (indexer_id and show_name):
                        (indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path)

                cur_dir['existing_info'] = (indexer_id, show_name, indexer)

                if indexer_id and indexer and Show.find_by_id(app.showList, indexer, indexer_id):
                    cur_dir['added_already'] = True
        return t.render(dirList=dir_list)