def clean_databases(current_time=None, database_check=True, silent=False): def _process(args): try: dbcon = database.connect(args[0], timeout=60.0) dbcur = dbcon.cursor() dbcur.execute('''PRAGMA synchronous = OFF''') dbcur.execute('''PRAGMA journal_mode = OFF''') dbcur.execute(args[1], (current_time, )) dbcon.commit() dbcur.execute('VACUUM') dbcon.close() except: pass if database_check: check_databases() if not current_time: current_time = get_current_time() command_base = 'DELETE from %s WHERE CAST(%s AS INT) <= ?' functions_list = [] functions_list_append = functions_list.append functions_list_append( (external_db, command_base % ('results_data', 'expires'))) functions_list_append( (maincache_db, command_base % ('maincache', 'expires'))) functions_list_append( (metacache_db, command_base % ('metadata', 'expires'))) functions_list_append( (metacache_db, command_base % ('function_cache', 'expires'))) functions_list_append( (metacache_db, command_base % ('season_metadata', 'expires'))) functions_list_append( (debridcache_db, command_base % ('debrid_data', 'expires'))) threads = list(make_thread_list(_process, functions_list, Thread)) [i.join() for i in threads] if not silent: notification(32576, time=2000)
def get_in_progress_tvshows(dummy_arg, page_no, letter): def _process(item): tmdb_id = item['media_id'] meta = metadata.tvshow_meta('tmdb_id', tmdb_id, meta_user_info) watched_status = get_watched_status_tvshow(watched_info, tmdb_id, meta.get('total_aired_eps')) if watched_status[0] == 0: append(item) check_trakt_refresh() duplicates = set() data = [] append = data.append watched_indicators = settings.watched_indicators() paginate = settings.paginate() limit = settings.page_limit() meta_user_info = metadata.retrieve_user_info() watched_info = get_watched_info_tv(watched_indicators) prelim_data = [{ 'media_id': i[0], 'title': i[3] } for i in watched_info if not (i[0] in duplicates or duplicates.add(i[0]))] threads = list(make_thread_list(_process, prelim_data, Thread)) [i.join() for i in threads] original_list = sort_for_article(data, 'title', settings.ignore_articles()) if paginate: final_list, total_pages = paginate_list(original_list, page_no, letter, limit) else: final_list, total_pages = original_list, 1 return final_list, total_pages
def trakt_progress_tv(progress_info): from threading import Thread from modules.utils import make_thread_list def _process_tmdb_ids(item): tmdb_id = get_trakt_tvshow_id(item['ids']) tmdb_list_append((tmdb_id, item['title'])) def _process(): for item in tmdb_list: tmdb_id = item[0] if not tmdb_id: continue title = item[1] for p_item in progress_items: if p_item['show']['title'] == title: season = p_item['episode']['season'] if season > 0: yield ('episode', str(tmdb_id), season, p_item['episode']['number'], str(round(p_item['progress'], 1)), 0) tmdb_list = [] tmdb_list_append = tmdb_list.append progress_items = [ i for i in progress_info if i['progress'] > 1 and i['type'] == 'episode' ] all_shows = [i['show'] for i in progress_items] all_shows = [ i for n, i in enumerate(all_shows) if i not in all_shows[n + 1:] ] # remove duplicates threads = list(make_thread_list(_process_tmdb_ids, all_shows, Thread)) [i.join() for i in threads] insert_list = list(_process()) trakt_cache.TraktWatched().set_bulk_tvshow_progress(insert_list)
def trakt_indicators_tv(): from threading import Thread from modules.utils import make_thread_list def _process(item): show = item['show'] seasons = item['seasons'] title = show['title'] tmdb_id = get_trakt_tvshow_id(show['ids']) for s in seasons: season_no, episodes = s['number'], s['episodes'] for e in episodes: obj = ('episode', tmdb_id, season_no, e['number'], e['last_watched_at'], title) insert_append(obj) insert_list = [] insert_append = insert_list.append url = { 'path': 'users/me/watched/shows?extended=full%s', 'with_auth': True, 'pagination': False } result = get_trakt(url) threads = list(make_thread_list(_process, result, Thread)) [i.join() for i in threads] trakt_cache.TraktWatched().set_bulk_tvshow_watched(insert_list)
def _scrape_directory(self, folder_info): def _process(item): file_type = item[1] normalized = normalize(item[0]) item_name = clean_title(normalized) if file_type == 'file': ext = os.path.splitext(urlparse(item[0]).path)[-1].lower() if ext in self.extensions: if self.db_type == 'movie': if self.assigned_content or self.title_query in item_name: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) else: if seas_ep_filter(self.season, self.episode, normalized): if self.assigned_content or not folder_name in self.folder_path: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) elif self.title_query in item_name: url_path = self.url_path(folder_name, item[0]) size = self._get_size(url_path) scrape_results_append((item[0], url_path, size)) elif file_type == 'folder': if not assigned_folder: self.assigned_content = self._assigned_content(normalize(item[0])) if self.assigned_content: if self.assigned_content == self.rootname: new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, True)) elif self.title_query in item_name or any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, self.assigned_content)) elif assigned_folder: if any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, True)) elif self.title_query in item_name or any(x in item_name for x in self.folder_query): new_folder = os.path.join(folder_name, item[0]) foler_results_append((new_folder, self.assigned_content)) folder_results = [] scrape_results_append = self.scrape_results.append foler_results_append = folder_results.append assigned_folder = folder_info[1] self.assigned_content = assigned_folder if assigned_folder else '' folder_name = folder_info[0] string = 'fen_FOLDERSCRAPER_%s_%s' % (self.scrape_provider, folder_name) folder_files = cache_object(self._make_dirs, string, folder_name, json=False, expiration=4) folder_threads = list(make_thread_list(_process, folder_files, Thread)) [i.join() for i in folder_threads] if not folder_results: return return self._scraper_worker(folder_results)
def debrid_valid_hosts(enabled_debrids): def _get_hosts(function): append(function.get_hosts()) debrid_hosts = [] append = debrid_hosts.append if enabled_debrids: threads = list( make_thread_list( _get_hosts, [i[2] for i in debrid_list if i[0] in enabled_debrids], Thread)) [i.join() for i in threads] return debrid_hosts
def trakt_progress_movies(progress_info): from threading import Thread from modules.utils import make_thread_list def _process(item): tmdb_id = get_trakt_movie_id(item['movie']['ids']) obj = ('movie', str(tmdb_id), '', '', str(round(item['progress'], 1)), 0) insert_append(obj) insert_list = [] insert_append = insert_list.append progress_items = [ i for i in progress_info if i['progress'] > 1 and i['type'] == 'movie' ] threads = list(make_thread_list(_process, progress_items, Thread)) [i.join() for i in threads] trakt_cache.TraktWatched().set_bulk_movie_progress(insert_list)
def batch_erase_bookmark(insert_list, action, watched_indicators): try: if action == 'mark_as_watched': modified_list = [(i[0], i[1], i[2], i[3]) for i in insert_list] else: modified_list = insert_list if watched_indicators == 1: def _process(arg): try: trakt_progress(*arg) except: pass process_list = [] process_list_append = process_list.append db_type = insert_list[0][0] tmdb_id = insert_list[0][1] bookmarks = get_bookmarks(db_type, watched_indicators) for i in insert_list: try: resume_point, curr_time = detect_bookmark( bookmarks, tmdb_id, i[2], i[3]) except: continue process_list_append( ('clear_progress', i[0], i[1], 0, i[2], i[3])) if process_list: threads = list(make_thread_list(_process, process_list, Thread)) database_file = TRAKT_DB else: database_file = WATCHED_DB dbcon = database.connect(database_file) dbcur = dbcon.cursor() dbcur.executemany( "DELETE FROM progress where db_type=? and media_id=? and season = ? and episode = ?", modified_list) dbcon.commit() except: pass
def get_watched_items(db_type, page_no, letter): paginate = settings.paginate() limit = settings.page_limit() watched_indicators = settings.watched_indicators() if db_type == 'tvshow': from threading import Thread from modules.utils import make_thread_list def _process(item): tmdb_id = item['media_id'] meta = metadata.tvshow_meta('tmdb_id', tmdb_id, meta_user_info) watched_status = get_watched_status_tvshow( watched_info, tmdb_id, meta.get('total_aired_eps')) if watched_status[0] == 1: append(item) meta_user_info = metadata.retrieve_user_info() watched_info = get_watched_info_tv(watched_indicators) duplicates = set() data = [] append = data.append prelim_data = [{ 'media_id': i[0], 'title': i[3] } for i in watched_info if not (i[0] in duplicates or duplicates.add(i[0]))] threads = list(make_thread_list(_process, prelim_data, Thread)) [i.join() for i in threads] original_list = sort_for_article(data, 'title', settings.ignore_articles()) else: watched_info = get_watched_info_movie(watched_indicators) data = [{'media_id': i[0], 'title': i[1]} for i in watched_info] original_list = sort_for_article(data, 'title', settings.ignore_articles()) if paginate: final_list, total_pages = paginate_list(original_list, page_no, letter, limit) else: final_list, total_pages = original_list, 1 return final_list, total_pages
def trakt_indicators_movies(): from threading import Thread from modules.utils import make_thread_list def _process(item): movie = item['movie'] tmdb_id = get_trakt_movie_id(movie['ids']) obj = ('movie', tmdb_id, '', '', item['last_watched_at'], movie['title']) insert_append(obj) insert_list = [] insert_append = insert_list.append url = { 'path': 'sync/watched/movies%s', 'with_auth': True, 'pagination': False } result = get_trakt(url) threads = list(make_thread_list(_process, result, Thread)) [i.join() for i in threads] trakt_cache.TraktWatched().set_bulk_movie_watched(insert_list)
def build_next_episode_manager(params): def build_content(tmdb_id): try: cm = [] listitem = make_listitem() set_property = listitem.setProperty cm_append = cm.append meta = tvshow_meta('tmdb_id', tmdb_id, meta_user_info) meta_get = meta.get total_aired_eps = meta_get('total_aired_eps') total_seasons = meta_get('total_seasons') title = meta_get('title') playcount, overlay, total_watched, total_unwatched = get_watched_status_tvshow( watched_info, tmdb_id, total_aired_eps) meta.update({'playcount': playcount, 'overlay': overlay}) if tmdb_id in exclude_list: color, action, status, sort_value = 'red', 'unhide', excluded_str, 1 else: color, action, status, sort_value = 'green', 'hide', included_str, 0 display = '[COLOR=%s][%s][/COLOR] %s' % (color, status, title) extras_params = { 'mode': 'extras_menu_choice', 'tmdb_id': tmdb_id, 'db_type': 'tvshow', 'is_widget': 'False' } url_params = { 'mode': 'hide_unhide_trakt_items', 'action': action, 'media_type': 'shows', 'media_id': meta_get('imdb_id'), 'section': 'progress_watched' } url = build_url(url_params) if show_all_episodes: if all_episodes == 1 and total_seasons > 1: browse_params = { 'mode': 'build_season_list', 'tmdb_id': tmdb_id } else: browse_params = { 'mode': 'build_episode_list', 'tmdb_id': tmdb_id, 'season': 'all' } else: browse_params = { 'mode': 'build_season_list', 'tmdb_id': tmdb_id } cm_append((extras_str, 'RunPlugin(%s)' % build_url(extras_params))) cm_append((browse_str, 'Container.Update(%s)' % build_url(browse_params))) listitem.setLabel(display) set_property('watchedepisodes', str(total_watched)) set_property('unwatchedepisodes', str(total_unwatched)) set_property('totalepisodes', str(total_aired_eps)) set_property('totalseasons', str(total_seasons)) listitem.addContextMenuItems(cm) listitem.setArt({ 'poster': meta_get('poster'), 'fanart': meta_get('fanart'), 'banner': meta_get('banner'), 'clearart': meta_get('clearart'), 'clearlogo': meta_get('clearlogo'), 'landscape': meta_get('landscape') }) listitem.setCast(meta['cast']) listitem.setInfo('video', remove_meta_keys(meta, dict_removals)) append({ 'listitem': (url, listitem, False), 'sort_value': sort_value, 'sort_title': title }) except: pass __handle__ = int(argv[1]) list_items = [] append = list_items.append meta_user_info, watched_indicators, watched_info, all_episodes, include_year_in_title, open_extras = get_tvshow_info( ) ep_list = get_next_episodes(watched_info) tmdb_list = [i['tmdb_id'] for i in ep_list] try: exclude_list = trakt_get_hidden_items('progress_watched') except: exclude_list = [] show_all_episodes = True if all_episodes in (1, 2) else False threads = list(make_thread_list(build_content, tmdb_list, Thread)) [i.join() for i in threads] item_list = sorted( list_items, key=lambda k: (k['sort_value'], title_key(k['sort_title'], ignore_articles())), reverse=False) item_list = [i['listitem'] for i in item_list] kodi_utils.add_dir({'mode': 'nill'}, '[I][COLOR=grey2]%s[/COLOR][/I]' % heading.upper(), __handle__, iconImage='settings.png', isFolder=False) kodi_utils.add_items(__handle__, item_list) kodi_utils.set_content(__handle__, 'tvshows') kodi_utils.end_directory(__handle__, cacheToDisc=False) kodi_utils.set_view_mode('view.main', 'tvshows') kodi_utils.focus_index(1)
def _scraper_worker(self, folder_results): scraper_threads = list(make_thread_list(self._scrape_directory, folder_results, Thread)) [i.join() for i in scraper_threads]