def sync_trakt_shows(self): """Sync Trakt shows watchlist.""" if not self.show_watchlist: log.info('No shows found in your Trakt watchlist. Nothing to sync') else: trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlisted_show in self.show_watchlist: trakt_show = watchlisted_show['show'] if trakt_show['year'] and trakt_show['ids']['slug'].endswith( str(trakt_show['year'])): show_name = '{title} ({year})'.format( title=trakt_show['title'], year=trakt_show['year']) else: show_name = trakt_show['title'] show = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find(app.showList, indexer_id, indexer) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_IMDB) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_TRAKT) if show: continue indexer_id = trakt_show['ids'].get( get_trakt_indexer(trakt_default_indexer), -1) if int(app.TRAKT_METHOD_ADD) != 2: self.add_show(trakt_default_indexer, indexer_id, show_name, SKIPPED) else: self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) if int(app.TRAKT_METHOD_ADD) == 1: new_show = Show.find(app.showList, indexer_id, indexer) if new_show: set_episode_to_wanted(new_show, 1, 1) else: log.warning( 'Unable to find the new added show.' 'Pilot will be set to wanted in the next Trakt run' ) self.todoWanted.append(indexer_id) log.debug('Synced shows with Trakt watchlist')
def sync_trakt_episodes(self): """Sync Trakt episodes watchlist.""" if not self.episode_watchlist: log.info( 'No episodes found in your Trakt watchlist. Nothing to sync') return added_shows = [] trakt_default_indexer = int(app.TRAKT_DEFAULT_INDEXER) for watchlist_item in self.episode_watchlist: trakt_show = watchlist_item['show'] trakt_episode = watchlist_item['episode'].get('number', -1) trakt_season = watchlist_item['episode'].get('season', -1) show = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) indexer = indexerConfig[i]['id'] show = Show.find(app.showList, indexer_id, indexer) if show: break if not show: # If can't find with available indexers try IMDB trakt_indexer = get_trakt_indexer(EXTERNAL_IMDB) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_IMDB) if not show: # If can't find with available indexers try TRAKT trakt_indexer = get_trakt_indexer(EXTERNAL_TRAKT) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) show = Show.find(app.showList, indexer_id, EXTERNAL_TRAKT) # If can't find show add with default trakt indexer if not show: indexer_id = trakt_show['ids'].get( get_trakt_indexer(trakt_default_indexer), -1) # Only add show if we didn't added it before if indexer_id not in added_shows: self.add_show(trakt_default_indexer, indexer_id, trakt_show['title'], SKIPPED) added_shows.append(indexer_id) elif not trakt_season == 0 and not show.paused: set_episode_to_wanted(show, trakt_season, trakt_episode) log.debug('Synced episodes with Trakt watchlist')
def get_episodes(search_thread, searchstatus): """Get all episodes located in a search thread with a specific status.""" results = [] # NOTE!: Show.find called with just indexerid! show_obj = Show.find(app.showList, int(search_thread.show.indexerid)) if not show_obj: if not search_thread.show.is_recently_deleted: log.error(u'No Show Object found for show with indexerID: {0}', search_thread.show.indexerid) return results if not isinstance(search_thread.segment, list): search_thread.segment = [search_thread.segment] for ep_obj in search_thread.segment: ep = show_obj.get_episode(ep_obj.season, ep_obj.episode) results.append({ 'show': show_obj.indexerid, 'episode': ep.episode, 'episodeindexid': ep.indexerid, 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], 'quality': get_quality_class(ep), 'overview': Overview.overviewStrings[show_obj.get_overview(ep.status, manually_searched=ep.manually_searched)], }) return results
def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=True): """ Returns a tuple, (season, episode), with the scene numbering (if there is one), otherwise returns the xem numbering (if fallback_to_xem is set), otherwise returns the TVDB numbering. (so the return values will always be set) :param indexer_id: int :param season: int :param episode: int :param fallback_to_xem: bool If set (the default), check xem for matches if there is no local scene numbering :return: (int, int) a tuple with (season, episode) """ if indexer_id is None or season is None or episode is None: return season, episode showObj = Show.find(app.showList, int(indexer_id)) if showObj and not showObj.is_scene: return season, episode result = find_scene_numbering(int(indexer_id), int(indexer), season, episode) if result: return result else: if fallback_to_xem: xem_result = find_xem_numbering(int(indexer_id), int(indexer), season, episode) if xem_result: return xem_result return season, episode
def get_episode(show, season=None, episode=None, absolute=None): """ Get a specific episode object based on show, season and episode number. :param show: Season number :param season: Season number :param episode: Episode number :param absolute: Optional if the episode number is a scene absolute number :return: episode object """ if show is None: return 'Invalid show parameters' show_obj = Show.find(app.showList, int(show)) if show_obj is None: return 'Invalid show parameters' if absolute: ep_obj = show_obj.get_episode(absolute_number=absolute) elif season and episode: ep_obj = show_obj.get_episode(season, episode) else: return 'Invalid parameters' if ep_obj is None: return 'Unable to retrieve episode' return ep_obj
def add_show(indexer, indexer_id, show_name, status): """Add a new show with default settings.""" if not Show.find(app.showList, int(indexer_id)): root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] if root_dirs else None if location: log.info( "Adding show '{show}' using indexer: '{indexer_name}' and ID: {id}", { 'show': show_name, 'indexer_name': indexerConfig[indexer]['identifier'], 'id': indexer_id }) app.show_queue_scheduler.action.addShow( indexer, indexer_id, None, default_status=status, quality=int(app.QUALITY_DEFAULT), flatten_folders=int(app.FLATTEN_FOLDERS_DEFAULT), paused=app.TRAKT_START_PAUSED, default_status_after=status, root_dir=location) else: log.warning( "Error creating show '{show}' folder. No default root directory", {'show': show_name}) return
def backlogShow(self, indexer_id): show_obj = Show.find(app.showList, int(indexer_id)) if show_obj: app.backlog_search_scheduler.action.search_backlog([show_obj]) return self.redirect('/manage/backlogOverview/')
def find_propers(self, proper_candidates): """Find propers in providers.""" results = [] for proper_candidate in proper_candidates: show_obj = Show.find(app.showList, int(proper_candidate[b'showid'])) if proper_candidate[b'showid'] else None if show_obj: self.show = show_obj episode_obj = show_obj.get_episode(proper_candidate[b'season'], proper_candidate[b'episode']) for term in self.proper_strings: search_strings = self._get_episode_search_strings(episode_obj, add_string=term) for item in self.search(search_strings[0], ep_obj=episode_obj): search_result = self.get_result() results.append(search_result) search_result.name, search_result.url = self._get_title_and_url(item) search_result.seeders, search_result.leechers = self._get_result_info(item) search_result.size = self._get_size(item) search_result.pubdate = self._get_pubdate(item) # This will be retrieved from the parser search_result.proper_tags = '' search_result.search_type = PROPER_SEARCH search_result.date = datetime.today() search_result.show = show_obj return results
def test_find(self): app.QUALITY_DEFAULT = Quality.FULLHDTV app.showList = [] show123 = TestTVShow(0, 123) show456 = TestTVShow(0, 456) show789 = TestTVShow(0, 789) shows = [show123, show456, show789] shows_duplicate = shows + shows test_cases = { (False, None): None, (False, ''): None, (False, '123'): None, (False, 123): None, (False, 12.3): None, (True, None): None, (True, ''): None, (True, '123'): None, (True, 123): show123, (True, 12.3): None, (True, 456): show456, (True, 789): show789, } unicode_test_cases = { (False, u''): None, (False, u'123'): None, (True, u''): None, (True, u'123'): None, } for tests in test_cases, unicode_test_cases: for ((use_shows, indexer_id), result) in iteritems(tests): if use_shows: self.assertEqual(Show.find(shows, indexer_id), result) else: self.assertEqual(Show.find(None, indexer_id), result) with self.assertRaises(MultipleShowObjectsException): Show.find(shows_duplicate, 456)
def set_scene_numbering( indexer_id, indexer, season=None, episode=None, # pylint:disable=too-many-arguments absolute_number=None, sceneSeason=None, sceneEpisode=None, sceneAbsolute=None): """ Set scene numbering for a season/episode. To clear the scene numbering, leave both sceneSeason and sceneEpisode as None. """ if indexer_id is None: return indexer_id = int(indexer_id) indexer = int(indexer) main_db_con = db.DBConnection() # Season/episode can be 0 so can't check "if season" if season is not None and episode is not None and absolute_number is None: main_db_con.action( "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)", [indexer, indexer_id, season, episode]) main_db_con.action( "UPDATE scene_numbering SET scene_season = ?, scene_episode = ? WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [sceneSeason, sceneEpisode, indexer, indexer_id, season, episode]) # absolute_number can be 0 so can't check "if absolute_number" else: main_db_con.action( "INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, absolute_number) VALUES (?,?,?)", [indexer, indexer_id, absolute_number]) main_db_con.action( "UPDATE scene_numbering SET scene_absolute_number = ? WHERE indexer = ? and indexer_id = ? and absolute_number = ?", [sceneAbsolute, indexer, indexer_id, absolute_number]) # Reload data from DB so that cache and db are in sync show = Show.find(app.showList, indexer_id) show.flush_episodes() show.erase_cached_parse()
def downloadSubtitleMissed(self, *args, **kwargs): to_download = {} # make a list of all shows and their associated args for arg in kwargs: indexer_id, what = arg.split('-') # we don't care about unchecked checkboxes if kwargs[arg] != 'on': continue if indexer_id not in to_download: to_download[indexer_id] = [] to_download[indexer_id].append(what) for cur_indexer_id in to_download: # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[cur_indexer_id]: main_db_con = db.DBConnection() all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' b'WHERE status LIKE \'%4\' ' b'AND season != 0 ' b'AND showid = ? ' b'AND location != \'\'', [cur_indexer_id]) to_download[cur_indexer_id] = [ str(x[b'season']) + 'x' + str(x[b'episode']) for x in all_eps_results ] for epResult in to_download[cur_indexer_id]: season, episode = epResult.split('x') show = Show.find(app.showList, int(cur_indexer_id)) show.get_episode(season, episode).download_subtitles() return self.redirect('/manage/subtitleMissed/')
def series(self): """Find the series by indexer id.""" try: return Show.find(app.showList, self.indexer_id) except MultipleShowObjectsException: return None
def get_provider_cache_results(indexer, show_all_results=None, perform_search=None, show=None, season=None, episode=None, manual_search_type=None, **search_show): """Check all provider cache tables for search results.""" down_cur_quality = 0 show_obj = Show.find(app.showList, int(show)) preferred_words = show_obj.show_words().preferred_words undesired_words = show_obj.show_words().undesired_words ignored_words = show_obj.show_words().ignored_words required_words = show_obj.show_words().required_words main_db_con = db.DBConnection('cache.db') provider_results = { 'last_prov_updates': {}, 'error': {}, 'found_items': [] } original_thread_name = threading.currentThread().name sql_total = [] combined_sql_q = [] combined_sql_params = [] for cur_provider in enabled_providers('manualsearch'): threading.currentThread().name = '{thread} :: [{provider}]'.format( thread=original_thread_name, provider=cur_provider.name) # Let's check if this provider table already exists table_exists = main_db_con.select( b"SELECT name " b"FROM sqlite_master " b"WHERE type='table'" b" AND name=?", [cur_provider.get_id()]) columns = [ i[1] for i in main_db_con.select("PRAGMA table_info('{0}')".format( cur_provider.get_id())) ] if table_exists else [] minseed = int(cur_provider.minseed) if getattr(cur_provider, 'minseed', None) else -1 minleech = int(cur_provider.minleech) if getattr( cur_provider, 'minleech', None) else -1 # TODO: the implicit sqlite rowid is used, should be replaced with an explicit PK column # If table doesn't exist, start a search to create table and new columns seeders, leechers and size required_columns = ['seeders', 'leechers', 'size', 'proper_tags'] if table_exists and all(required_column in columns for required_column in required_columns): # The default sql, that's executed for each providers cache table common_sql = ( b"SELECT rowid, ? AS 'provider_type', ? AS 'provider_image'," b" ? AS 'provider', ? AS 'provider_id', ? 'provider_minseed'," b" ? 'provider_minleech', name, season, episodes, indexerid," b" url, time, proper_tags, quality, release_group, version," b" seeders, leechers, size, time, pubdate " b"FROM '{provider_id}' " b"WHERE indexerid = ? AND quality > 0 ".format( provider_id=cur_provider.get_id())) # Let's start by adding the default parameters, which are used to subsitute the '?'s. add_params = [ cur_provider.provider_type.title(), cur_provider.image_name(), cur_provider.name, cur_provider.get_id(), minseed, minleech, show ] if manual_search_type != 'season': # If were not looking for all results, meaning don't do the filter on season + ep, add sql if not int(show_all_results): # If it's an episode search, pass season and episode. common_sql += " AND season = ? AND episodes LIKE ? " add_params += [season, "%|{0}|%".format(episode)] else: # If were not looking for all results, meaning don't do the filter on season + ep, add sql if not int(show_all_results): list_of_episodes = '{0}{1}'.format( ' episodes LIKE ', ' AND episodes LIKE '.join( ['?' for _ in show_obj.get_all_episodes(season)])) common_sql += " AND season = ? AND (episodes LIKE ? OR {list_of_episodes})".format( list_of_episodes=list_of_episodes) add_params += [season, '||'] # When the episodes field is empty. add_params += [ '%|{episode}|%'.format(episode=ep.episode) for ep in show_obj.get_all_episodes(season) ] # Add the created sql, to lists, that are used down below to perform one big UNIONED query combined_sql_q.append(common_sql) combined_sql_params += add_params # Get the last updated cache items timestamp last_update = main_db_con.select( b"SELECT max(time) AS lastupdate " b"FROM '{provider_id}'".format( provider_id=cur_provider.get_id())) provider_results['last_prov_updates'][ cur_provider.get_id()] = last_update[0][ 'lastupdate'] if last_update[0]['lastupdate'] else 0 # Check if we have the combined sql strings if combined_sql_q: sql_prepend = b"SELECT * FROM (" sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \ b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN) # Add all results sql_total += main_db_con.select( b'{0} {1} {2}'.format(sql_prepend, ' UNION ALL '.join(combined_sql_q), sql_append), combined_sql_params) # Always start a search when no items found in cache if not sql_total or int(perform_search): # retrieve the episode object and fail if we can't get one ep_obj = get_episode(show, season, episode) if isinstance(ep_obj, str): provider_results[ 'error'] = 'Something went wrong when starting the manual search for show {0}, \ and episode: {1}x{2}'.format(show_obj.name, season, episode) # make a queue item for it and put it on the queue ep_queue_item = ForcedSearchQueueItem(ep_obj.series, [ep_obj], bool(int(down_cur_quality)), True, manual_search_type) # pylint: disable=maybe-no-member app.forced_search_queue_scheduler.action.add_item(ep_queue_item) # give the CPU a break and some time to start the queue time.sleep(cpu_presets[app.CPU_PRESET]) else: cached_results = [dict(row) for row in sql_total] for i in cached_results: i['quality_name'] = Quality.split_quality(int(i['quality'])) i['time'] = datetime.fromtimestamp(i['time']) i['release_group'] = i['release_group'] or 'None' i['provider_img_link'] = 'images/providers/' + i[ 'provider_image'] or 'missing.png' i['provider'] = i['provider'] if i[ 'provider_image'] else 'missing provider' i['proper_tags'] = i['proper_tags'].replace('|', ', ') i['pretty_size'] = pretty_file_size( i['size']) if i['size'] > -1 else 'N/A' i['seeders'] = i['seeders'] if i['seeders'] >= 0 else '-' i['leechers'] = i['leechers'] if i['leechers'] >= 0 else '-' i['pubdate'] = sbdatetime.convert_to_setting( parser.parse(i['pubdate'])).strftime( app.DATE_PRESET + ' ' + app.TIME_PRESET) if i['pubdate'] else '-' release_group = i['release_group'] if ignored_words and release_group in ignored_words: i['rg_highlight'] = 'ignored' elif required_words and release_group in required_words: i['rg_highlight'] = 'required' elif preferred_words and release_group in preferred_words: i['rg_highlight'] = 'preferred' elif undesired_words and release_group in undesired_words: i['rg_highlight'] = 'undesired' else: i['rg_highlight'] = '' if contains_at_least_one_word(i['name'], required_words): i['name_highlight'] = 'required' elif contains_at_least_one_word( i['name'], ignored_words) or not filter_bad_releases( i['name'], parse=False): i['name_highlight'] = 'ignored' elif contains_at_least_one_word(i['name'], undesired_words): i['name_highlight'] = 'undesired' elif contains_at_least_one_word(i['name'], preferred_words): i['name_highlight'] = 'preferred' else: i['name_highlight'] = '' i['seed_highlight'] = 'ignored' if i.get( 'provider_minseed') > i.get('seeders', -1) >= 0 else '' i['leech_highlight'] = 'ignored' if i.get( 'provider_minleech') > i.get('leechers', -1) >= 0 else '' provider_results['found_items'] = cached_results # Remove provider from thread name before return results threading.currentThread().name = original_thread_name # Sanitize the last_prov_updates key provider_results['last_prov_updates'] = json.dumps( provider_results['last_prov_updates']) return provider_results
def addShowByID(self, indexer_id, show_name=None, indexer="TVDB", which_series=None, indexer_lang=None, root_dir=None, default_status=None, quality_preset=None, any_qualities=None, best_qualities=None, flatten_folders=None, subtitles=None, full_show_path=None, other_shows=None, skip_show=None, provided_indexer=None, anime=None, scene=None, blacklist=None, whitelist=None, default_status_after=None, default_flatten_folders=None, configure_show_options=False): """ Add's a new show with provided show options by indexer_id. Currently only TVDB and IMDB id's supported. """ if indexer != 'TVDB': tvdb_id = helpers.get_tvdb_from_id(indexer_id, indexer.upper()) if not tvdb_id: logger.log(u'Unable to to find tvdb ID to add %s' % show_name) ui.notifications.error( 'Unable to add %s' % show_name, 'Could not add %s. We were unable to locate the tvdb id at this time.' % show_name) return indexer_id = try_int(tvdb_id, None) if Show.find(app.showList, int(indexer_id)): return # Sanitize the parameter allowed_qualities and preferred_qualities. As these would normally be passed as lists if any_qualities: any_qualities = any_qualities.split(',') else: any_qualities = [] if best_qualities: best_qualities = best_qualities.split(',') else: best_qualities = [] # If configure_show_options is enabled let's use the provided settings configure_show_options = config.checkbox_to_value( configure_show_options) if configure_show_options: # prepare the inputs for passing along scene = config.checkbox_to_value(scene) anime = config.checkbox_to_value(anime) flatten_folders = config.checkbox_to_value(flatten_folders) subtitles = config.checkbox_to_value(subtitles) if whitelist: whitelist = short_group_names(whitelist) if blacklist: blacklist = short_group_names(blacklist) if not any_qualities: any_qualities = [] if not best_qualities or try_int(quality_preset, None): best_qualities = [] if not isinstance(any_qualities, list): any_qualities = [any_qualities] if not isinstance(best_qualities, list): best_qualities = [best_qualities] quality = Quality.combine_qualities( [int(q) for q in any_qualities], [int(q) for q in best_qualities]) location = root_dir else: default_status = app.STATUS_DEFAULT quality = app.QUALITY_DEFAULT flatten_folders = app.FLATTEN_FOLDERS_DEFAULT subtitles = app.SUBTITLES_DEFAULT anime = app.ANIME_DEFAULT scene = app.SCENE_DEFAULT default_status_after = app.STATUS_DEFAULT_AFTER if app.ROOT_DIRS: root_dirs = app.ROOT_DIRS location = root_dirs[int(root_dirs[0]) + 1] else: location = None if not location: logger.log( u'There was an error creating the show, ' u'no root directory setting found', logger.WARNING) return 'No root directories setup, please go back and add one.' show_name = get_showname_from_indexer(1, indexer_id) show_dir = None # add the show app.show_queue_scheduler.action.addShow(INDEXER_TVDBV2, int(indexer_id), show_dir, int(default_status), quality, flatten_folders, indexer_lang, subtitles, anime, scene, None, blacklist, whitelist, int(default_status_after), root_dir=location) ui.notifications.message( 'Show added', 'Adding the specified show {0}'.format(show_name)) # done adding show return self.redirect('/home/')
def massAddTable(self, rootDir=None): t = PageTemplate(rh=self, filename='home_massAddTable.mako') if not rootDir: return 'No folders selected.' elif not isinstance(rootDir, list): root_dirs = [rootDir] else: root_dirs = rootDir root_dirs = [unquote_plus(x) for x in root_dirs] if app.ROOT_DIRS: default_index = int(app.ROOT_DIRS[0]) else: default_index = 0 if len(root_dirs) > default_index: tmp = root_dirs[default_index] if tmp in root_dirs: root_dirs.remove(tmp) root_dirs = [tmp] + root_dirs dir_list = [] main_db_con = db.DBConnection() for root_dir in root_dirs: try: file_list = os.listdir(root_dir) except Exception as error: logger.log('Unable to listdir {path}: {e!r}'.format( path=root_dir, e=error)) continue for cur_file in file_list: try: cur_path = os.path.normpath( os.path.join(root_dir, cur_file)) if not os.path.isdir(cur_path): continue except Exception as error: logger.log( 'Unable to get current path {path} and {file}: {e!r}'. format(path=root_dir, file=cur_file, e=error)) continue cur_dir = { 'dir': cur_path, 'display_dir': '<b>{dir}{sep}</b>{base}'.format( dir=os.path.dirname(cur_path), sep=os.sep, base=os.path.basename(cur_path)), } # see if the folder is in KODI already dir_results = main_db_con.select( b'SELECT indexer_id ' b'FROM tv_shows ' b'WHERE location = ? LIMIT 1', [cur_path]) cur_dir['added_already'] = bool(dir_results) dir_list.append(cur_dir) indexer_id = show_name = indexer = None # You may only call .values() on metadata_provider_dict! As on values() call the indexer_api attribute # is reset. This will prevent errors, when using multiple indexers and caching. for cur_provider in app.metadata_provider_dict.values(): if not (indexer_id and show_name): (indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path) cur_dir['existing_info'] = (indexer_id, show_name, indexer) if indexer_id and Show.find(app.showList, indexer_id): cur_dir['added_already'] = True return t.render(dirList=dir_list)
def find_needed_episodes(self, episode, forced_search=False, down_cur_quality=False): """Find needed episodes.""" needed_eps = {} results = [] cache_db_con = self._get_db() if not episode: sql_results = cache_db_con.select( b'SELECT * FROM [{name}]'.format(name=self.provider_id)) elif not isinstance(episode, list): sql_results = cache_db_con.select( b'SELECT * FROM [{name}] ' b'WHERE indexerid = ? AND' b' season = ? AND' b' episodes LIKE ?'.format(name=self.provider_id), [ episode.series.indexerid, episode.season, b'%|{0}|%'.format(episode.episode) ]) else: for ep_obj in episode: results.append([ b'SELECT * FROM [{name}] ' b'WHERE indexerid = ? AND' b' season = ? AND' b' episodes LIKE ? AND ' b' quality IN ({qualities})'.format( name=self.provider_id, qualities=','.join( (str(x) for x in ep_obj.wanted_quality))), [ ep_obj.series.indexerid, ep_obj.season, b'%|{0}|%'.format(ep_obj.episode) ] ]) if results: # Only execute the query if we have results sql_results = cache_db_con.mass_action(results, fetchall=True) sql_results = list(itertools.chain(*sql_results)) else: sql_results = [] log.debug( '{id}: No cached results in {provider} for series {show_name!r} episode {ep}', { 'id': episode[0].series.indexerid, 'provider': self.provider.name, 'show_name': episode[0].series.name, 'ep': episode_num(episode[0].season, episode[0].episode), }) # for each cache entry for cur_result in sql_results: search_result = self.provider.get_result() # ignored/required words, and non-tv junk if not naming.filter_bad_releases(cur_result[b'name']): continue # get the show, or ignore if it's not one of our shows show_obj = Show.find(app.showList, int(cur_result[b'indexerid'])) if not show_obj: continue # skip if provider is anime only and show is not anime if self.provider.anime_only and not show_obj.is_anime: log.debug('{0} is not an anime, skipping', show_obj.name) continue # get season and ep data (ignoring multi-eps for now) search_result.season = int(cur_result[b'season']) if search_result.season == -1: continue cur_ep = cur_result[b'episodes'].split('|')[1] if not cur_ep: continue cur_ep = int(cur_ep) search_result.quality = int(cur_result[b'quality']) search_result.release_group = cur_result[b'release_group'] search_result.version = cur_result[b'version'] # if the show says we want that episode then add it to the list if not show_obj.want_episode(search_result.season, cur_ep, search_result.quality, forced_search, down_cur_quality): log.debug('Ignoring {0}', cur_result[b'name']) continue search_result.episodes = [ show_obj.get_episode(search_result.season, cur_ep) ] search_result.actual_episodes = [search_result.episodes[0].episode] search_result.actual_season = search_result.season # build a result object search_result.name = cur_result[b'name'] search_result.url = cur_result[b'url'] log.debug( '{id}: Using cached results from {provider} for series {show_name!r} episode {ep}', { 'id': search_result.episodes[0].series.indexerid, 'provider': self.provider.name, 'show_name': search_result.episodes[0].series.name, 'ep': episode_num(search_result.episodes[0].season, search_result.episodes[0].episode), }) # Map the remaining attributes search_result.show = show_obj search_result.seeders = cur_result[b'seeders'] search_result.leechers = cur_result[b'leechers'] search_result.size = cur_result[b'size'] search_result.pubdate = cur_result[b'pubdate'] search_result.proper_tags = cur_result[b'proper_tags'].split( '|') if cur_result[b'proper_tags'] else '' search_result.content = None # FIXME: Should be changed to search_result.search_type search_result.forced_search = forced_search search_result.download_current_quality = down_cur_quality episode_object = search_result.episodes[0] # add it to the list if episode_object not in needed_eps: needed_eps[episode_object] = [search_result] else: needed_eps[episode_object].append(search_result) # datetime stamp this search so cache gets cleared self.searched = time() return needed_eps
def massUpdate(self, toUpdate=None, toRefresh=None, toRename=None, toDelete=None, toRemove=None, toMetadata=None, toSubtitle=None, toImageUpdate=None): to_update = toUpdate.split('|') if toUpdate else [] to_refresh = toRefresh.split('|') if toRefresh else [] to_rename = toRename.split('|') if toRename else [] to_subtitle = toSubtitle.split('|') if toSubtitle else [] to_delete = toDelete.split('|') if toDelete else [] to_remove = toRemove.split('|') if toRemove else [] to_metadata = toMetadata.split('|') if toMetadata else [] to_image_update = toImageUpdate.split('|') if toImageUpdate else [] errors = [] refreshes = [] updates = [] renames = [] subtitles = [] image_update = [] for cur_show_id in set(to_update + to_refresh + to_rename + to_subtitle + to_delete + to_remove + to_metadata + to_image_update): show_obj = Show.find(app.showList, int(cur_show_id)) if cur_show_id else None if not show_obj: continue if cur_show_id in to_delete + to_remove: app.show_queue_scheduler.action.removeShow( show_obj, cur_show_id in to_delete) continue # don't do anything else if it's being deleted or removed if cur_show_id in to_update: try: app.show_queue_scheduler.action.updateShow(show_obj) updates.append(show_obj.name) except CantUpdateShowException as msg: errors.append( 'Unable to update show: {error}'.format(error=msg)) elif cur_show_id in to_refresh: # don't bother refreshing shows that were updated try: app.show_queue_scheduler.action.refreshShow(show_obj) refreshes.append(show_obj.name) except CantRefreshShowException as msg: errors.append( 'Unable to refresh show {show.name}: {error}'.format( show=show_obj, error=msg)) if cur_show_id in to_rename: app.show_queue_scheduler.action.renameShowEpisodes(show_obj) renames.append(show_obj.name) if cur_show_id in to_subtitle: app.show_queue_scheduler.action.download_subtitles(show_obj) subtitles.append(show_obj.name) if cur_show_id in to_image_update: image_cache.replace_images(show_obj) if errors: ui.notifications.error('Errors encountered', '<br />\n'.join(errors)) message = '' if updates: message += '\nUpdates: {0}'.format(len(updates)) if refreshes: message += '\nRefreshes: {0}'.format(len(refreshes)) if renames: message += '\nRenames: {0}'.format(len(renames)) if subtitles: message += '\nSubtitles: {0}'.format(len(subtitles)) if image_update: message += '\nImage updates: {0}'.format(len(image_update)) if message: ui.notifications.message('Queued actions:', message) return self.redirect('/manage/')
def massEditSubmit(self, paused=None, default_ep_status=None, dvd_order=None, anime=None, sports=None, scene=None, flatten_folders=None, quality_preset=None, subtitles=None, air_by_date=None, allowed_qualities=None, preferred_qualities=None, toEdit=None, *args, **kwargs): allowed_qualities = allowed_qualities or [] preferred_qualities = preferred_qualities or [] dir_map = {} for cur_arg in kwargs: if not cur_arg.startswith('orig_root_dir_'): continue which_index = cur_arg.replace('orig_root_dir_', '') end_dir = kwargs['new_root_dir_{index}'.format(index=which_index)] dir_map[kwargs[cur_arg]] = end_dir show_ids = toEdit.split('|') if toEdit else [] errors = 0 for cur_show in show_ids: show_obj = Show.find(app.showList, int(cur_show)) if not show_obj: continue cur_root_dir = os.path.dirname(show_obj._location) cur_show_dir = os.path.basename(show_obj._location) if cur_root_dir in dir_map and cur_root_dir != dir_map[ cur_root_dir]: new_show_dir = os.path.join(dir_map[cur_root_dir], cur_show_dir) logger.log( u'For show {show.name} changing dir from {show._location} to {location}' .format(show=show_obj, location=new_show_dir)) else: new_show_dir = show_obj._location if paused == 'keep': new_paused = show_obj.paused else: new_paused = True if paused == 'enable' else False new_paused = 'on' if new_paused else 'off' if default_ep_status == 'keep': new_default_ep_status = show_obj.default_ep_status else: new_default_ep_status = default_ep_status if anime == 'keep': new_anime = show_obj.anime else: new_anime = True if anime == 'enable' else False new_anime = 'on' if new_anime else 'off' if sports == 'keep': new_sports = show_obj.sports else: new_sports = True if sports == 'enable' else False new_sports = 'on' if new_sports else 'off' if scene == 'keep': new_scene = show_obj.is_scene else: new_scene = True if scene == 'enable' else False new_scene = 'on' if new_scene else 'off' if air_by_date == 'keep': new_air_by_date = show_obj.air_by_date else: new_air_by_date = True if air_by_date == 'enable' else False new_air_by_date = 'on' if new_air_by_date else 'off' if dvd_order == 'keep': new_dvd_order = show_obj.dvd_order else: new_dvd_order = True if dvd_order == 'enable' else False new_dvd_order = 'on' if new_dvd_order else 'off' if flatten_folders == 'keep': new_flatten_folders = show_obj.flatten_folders else: new_flatten_folders = True if flatten_folders == 'enable' else False new_flatten_folders = 'on' if new_flatten_folders else 'off' if subtitles == 'keep': new_subtitles = show_obj.subtitles else: new_subtitles = True if subtitles == 'enable' else False new_subtitles = 'on' if new_subtitles else 'off' if quality_preset == 'keep': allowed_qualities, preferred_qualities = show_obj.current_qualities elif try_int(quality_preset, None): preferred_qualities = [] exceptions_list = [] errors += self.editShow(cur_show, new_show_dir, allowed_qualities, preferred_qualities, exceptions_list, defaultEpStatus=new_default_ep_status, flatten_folders=new_flatten_folders, paused=new_paused, sports=new_sports, dvd_order=new_dvd_order, subtitles=new_subtitles, anime=new_anime, scene=new_scene, air_by_date=new_air_by_date, directCall=True) if errors: ui.notifications.error( 'Errors', '{num} error{s} while saving changes. Please check logs'. format(num=errors, s='s' if errors > 1 else '')) return self.redirect('/manage/')
def massEdit(self, toEdit=None): t = PageTemplate(rh=self, filename='manage_massEdit.mako') if not toEdit: return self.redirect('/manage/') show_ids = toEdit.split('|') show_list = [] show_names = [] for cur_id in show_ids: cur_id = int(cur_id) show_obj = Show.find(app.showList, cur_id) if show_obj: show_list.append(show_obj) show_names.append(show_obj.name) flatten_folders_all_same = True last_flatten_folders = None paused_all_same = True last_paused = None default_ep_status_all_same = True last_default_ep_status = None anime_all_same = True last_anime = None sports_all_same = True last_sports = None quality_all_same = True last_quality = None subtitles_all_same = True last_subtitles = None scene_all_same = True last_scene = None air_by_date_all_same = True last_air_by_date = None dvd_order_all_same = True last_dvd_order = None root_dir_list = [] for cur_show in show_list: cur_root_dir = os.path.dirname(cur_show._location) # pylint: disable=protected-access if cur_root_dir not in root_dir_list: root_dir_list.append(cur_root_dir) # if we know they're not all the same then no point even bothering if paused_all_same: # if we had a value already and this value is different then they're not all the same if last_paused not in (None, cur_show.paused): paused_all_same = False else: last_paused = cur_show.paused if default_ep_status_all_same: if last_default_ep_status not in (None, cur_show.default_ep_status): default_ep_status_all_same = False else: last_default_ep_status = cur_show.default_ep_status if anime_all_same: # if we had a value already and this value is different then they're not all the same if last_anime not in (None, cur_show.is_anime): anime_all_same = False else: last_anime = cur_show.anime if flatten_folders_all_same: if last_flatten_folders not in (None, cur_show.flatten_folders): flatten_folders_all_same = False else: last_flatten_folders = cur_show.flatten_folders if quality_all_same: if last_quality not in (None, cur_show.quality): quality_all_same = False else: last_quality = cur_show.quality if subtitles_all_same: if last_subtitles not in (None, cur_show.subtitles): subtitles_all_same = False else: last_subtitles = cur_show.subtitles if scene_all_same: if last_scene not in (None, cur_show.scene): scene_all_same = False else: last_scene = cur_show.scene if sports_all_same: if last_sports not in (None, cur_show.sports): sports_all_same = False else: last_sports = cur_show.sports if air_by_date_all_same: if last_air_by_date not in (None, cur_show.air_by_date): air_by_date_all_same = False else: last_air_by_date = cur_show.air_by_date if dvd_order_all_same: if last_dvd_order not in (None, cur_show.dvd_order): dvd_order_all_same = False else: last_dvd_order = cur_show.dvd_order default_ep_status_value = last_default_ep_status if default_ep_status_all_same else None paused_value = last_paused if paused_all_same else None anime_value = last_anime if anime_all_same else None flatten_folders_value = last_flatten_folders if flatten_folders_all_same else None quality_value = last_quality if quality_all_same else None subtitles_value = last_subtitles if subtitles_all_same else None scene_value = last_scene if scene_all_same else None sports_value = last_sports if sports_all_same else None air_by_date_value = last_air_by_date if air_by_date_all_same else None dvd_order_value = last_dvd_order if dvd_order_all_same else None root_dir_list = root_dir_list return t.render(showList=toEdit, showNames=show_names, default_ep_status_value=default_ep_status_value, dvd_order_value=dvd_order_value, paused_value=paused_value, anime_value=anime_value, flatten_folders_value=flatten_folders_value, quality_value=quality_value, subtitles_value=subtitles_value, scene_value=scene_value, sports_value=sports_value, air_by_date_value=air_by_date_value, root_dir_list=root_dir_list, title='Mass Edit', header='Mass Edit', topmenu='manage')
def run(self, force=False): # pylint: disable=too-many-branches, too-many-statements, too-many-locals """Check for needed subtitles for users' shows. :param force: True if a force search needs to be executed :type force: bool """ if self.amActive: logger.debug( u'Subtitle finder is still running, not starting it again') return if not app.USE_SUBTITLES: logger.warning(u'Subtitle search is disabled. Please enabled it') return if not enabled_service_list(): logger.warning( u'Not enough services selected. At least 1 service is required to search subtitles in the ' u'background') return self.amActive = True def dhm(td): """Create the string for subtitles delay.""" days_delay = td.days hours_delay = td.seconds // 60**2 minutes_delay = (td.seconds // 60) % 60 ret = (u'', '{days} days, '.format(days=days_delay))[days_delay > 0] + \ (u'', '{hours} hours, '.format(hours=hours_delay))[hours_delay > 0] + \ (u'', '{minutes} minutes'.format(minutes=minutes_delay))[minutes_delay > 0] if days_delay == 1: ret = ret.replace('days', 'day') if hours_delay == 1: ret = ret.replace('hours', 'hour') if minutes_delay == 1: ret = ret.replace('minutes', 'minute') return ret.rstrip(', ') if app.POSTPONE_IF_NO_SUBS: self.subtitles_download_in_pp() logger.info(u'Checking for missed subtitles') database = db.DBConnection() # Shows with air date <= 30 days, have a limit of 100 results # Shows with air date > 30 days, have a limit of 200 results sql_args = [{ 'age_comparison': '<=', 'limit': 100 }, { 'age_comparison': '>', 'limit': 200 }] sql_like_languages = '%' + ','.join(sorted( wanted_languages())) + '%' if app.SUBTITLES_MULTI else '%und%' sql_results = [] for args in sql_args: sql_results += database.select( "SELECT " "s.show_name, " "e.showid, " "e.season, " "e.episode," "e.release_name, " "e.status, " "e.subtitles, " "e.subtitles_searchcount AS searchcount, " "e.subtitles_lastsearch AS lastsearch, " "e.location, (? - e.airdate) as age " "FROM " "tv_episodes AS e " "INNER JOIN tv_shows AS s " "ON (e.showid = s.indexer_id) " "WHERE " "s.subtitles = 1 " "AND s.paused = 0 " "AND e.status LIKE '%4' " "AND e.season > 0 " "AND e.location != '' " "AND age {} 30 " "AND e.subtitles NOT LIKE ? " "ORDER BY " "lastsearch ASC " "LIMIT {}".format(args['age_comparison'], args['limit']), [datetime.datetime.now().toordinal(), sql_like_languages]) if not sql_results: logger.info(u'No subtitles to download') self.amActive = False return for ep_to_sub in sql_results: # give the CPU a break time.sleep(cpu_presets[app.CPU_PRESET]) ep_num = episode_num(ep_to_sub['season'], ep_to_sub['episode']) or \ episode_num(ep_to_sub['season'], ep_to_sub['episode'], numbering='absolute') subtitle_path = _encode(ep_to_sub['location'], fallback='utf-8') if not os.path.isfile(subtitle_path): logger.debug( u'Episode file does not exist, cannot download subtitles for %s %s', ep_to_sub['show_name'], ep_num) continue if app.SUBTITLES_STOP_AT_FIRST and ep_to_sub['subtitles']: logger.debug( u'Episode already has one subtitle, skipping %s %s', ep_to_sub['show_name'], ep_num) continue if not needs_subtitles(ep_to_sub['subtitles']): logger.debug( u'Episode already has all needed subtitles, skipping %s %s', ep_to_sub['show_name'], ep_num) continue try: lastsearched = datetime.datetime.strptime( ep_to_sub['lastsearch'], dateTimeFormat) except ValueError: lastsearched = datetime.datetime.min if not force: now = datetime.datetime.now() days = int(ep_to_sub['age']) delay_time = datetime.timedelta( hours=1 if days <= 10 else 8 if days <= 30 else 30 * 24) delay = lastsearched + delay_time - now # Search every hour until 10 days pass # After 10 days, search every 8 hours, after 30 days search once a month # Will always try an episode regardless of age for 3 times # The time resolution is minute # Only delay is the it's bigger than one minute and avoid wrongly skipping the search slot. if delay.total_seconds() > 60 and int( ep_to_sub['searchcount']) > 2: logger.debug(u'Subtitle search for %s %s delayed for %s', ep_to_sub['show_name'], ep_num, dhm(delay)) continue show_object = Show.find(app.showList, int(ep_to_sub['showid'])) if not show_object: logger.debug(u'Show with ID %s not found in the database', ep_to_sub['showid']) continue episode_object = show_object.get_episode(ep_to_sub['season'], ep_to_sub['episode']) if isinstance(episode_object, str): logger.debug(u'%s %s not found in the database', ep_to_sub['show_name'], ep_num) continue episode_object.download_subtitles() logger.info(u'Finished checking for missed subtitles') self.amActive = False
def run(self, force=False): # pylint:disable=too-many-branches """ Run the daily searcher, queuing selected episodes for search. :param force: Force search """ if self.amActive: log.debug('Daily search is still running, not starting it again') return elif app.forced_search_queue_scheduler.action.is_forced_search_in_progress( ) and not force: log.warning( 'Manual search is running. Unable to start Daily search') return self.amActive = True if not network_dict: update_network_dict() cur_time = datetime.now(app_timezone) cur_date = (date.today() + timedelta(days=1 if network_dict else 2)).toordinal() main_db_con = DBConnection() episodes_from_db = main_db_con.select( b'SELECT showid, airdate, season, episode ' b'FROM tv_episodes ' b'WHERE status = ? AND (airdate <= ? and airdate > 1)', [common.UNAIRED, cur_date]) new_releases = [] show = None for db_episode in episodes_from_db: show_id = int(db_episode[b'showid']) try: if not show or show_id != show.indexerid: show = Show.find(app.showList, show_id) # for when there is orphaned series in the database but not loaded into our show list if not show or show.paused: continue except MultipleShowObjectsException: log.info('ERROR: expected to find a single show matching {id}', {'id': show_id}) continue if show.airs and show.network: # This is how you assure it is always converted to local time show_air_time = parse_date_time(db_episode[b'airdate'], show.airs, show.network) end_time = show_air_time.astimezone(app_timezone) + timedelta( minutes=try_int(show.runtime, 60)) # filter out any episodes that haven't finished airing yet, if end_time > cur_time: continue cur_ep = show.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: cur_ep.status = show.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', }) new_releases.append(cur_ep.get_sql()) if new_releases: main_db_con = DBConnection() main_db_con.mass_action(new_releases) # queue episode for daily search app.search_queue_scheduler.action.add_item( DailySearchQueueItem(force=force)) self.amActive = False