def load_cache(cls): # type: () -> None """ :return: """ path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_tmdb_trailers.json') path = xbmcvfs.validatePath(path) cls.abort_on_shutdown() with cls.lock: try: parent_dir, file_name = os.path.split(path) DiskUtils.create_path_if_needed(parent_dir) if os.path.exists(path): with io.open(path, mode='rt', newline=None, encoding='utf-8') as cacheFile: cls._all_missing_tmdb_trailers = json.load( cacheFile, encoding='utf-8', object_hook=TrailerUnavailableCache.datetime_parser) size = len(cls._all_missing_tmdb_trailers) Statistics.missing_tmdb_trailers_initial_size(size) except AbortException: reraise(*sys.exc_info()) except IOError as e: cls._logger.exception('') except JSONDecodeError as e: os.remove(path) except Exception as e: cls._logger.exception('') cls.abort_on_shutdown() path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_library_trailers.json') path = xbmcvfs.validatePath(path) try: parent_dir, file_name = os.path.split(path) DiskUtils.create_path_if_needed(parent_dir) if os.path.exists(path): with io.open(path, mode='rt', newline=None, encoding='utf-8') as cacheFile: cls._all_missing_library_trailers = json.load( cacheFile, encoding='utf-8', object_hook=TrailerUnavailableCache.datetime_parser) size = len(cls._all_missing_library_trailers) Statistics.missing_library_trailers_initial_size(size) except AbortException: reraise(*sys.exc_info()) except JSONDecodeError as e: os.remove(path) except IOError as e: cls._logger.exception('') except Exception as e: cls._logger.exception('') pass
def file_check(): # if this.os_linux: # if wmctrlCheck == 'true': # if subprocess.call(["which", "wmctrl"]) != 0: # log('ERROR: System program "wmctrl" not present, install it via you system package manager or if you are running the SteamOS compositor disable the addon option "Check for program wmctrl" (ONLY FOR CERTAIN USE CASES!!)') # dialog.notification(language(50212), language(50215), addonIcon, 5000) # sys.exit() # else: # log('wmctrl present, checking if a window manager is running...') # display = None # if 'DISPLAY' in os.environ: display = os.environ['DISPLAY'] # We inherited DISPLAY from Kodi, pass it down # else: # for var in open('/proc/%d/environ' % os.getppid()).read().split('\x00'): # if var.startswith('DISPLAY='): display = var[8:] # Read DISPLAY from parent process if present # if display is None or subprocess.call('DISPLAY=%s wmctrl -l' % display, shell=True) != 0: # log('ERROR: A window manager is NOT running - unless you are using the SteamOS compositor Steam BPM needs a windows manager. If you are using the SteamOS compositor disable the addon option "Check for program wmctrl"') # dialog.notification(language(50212), language(50215), addonIcon, 5000) # sys.exit() # else: # log('A window manager is running...') # if minimiseKodi == True: # if subprocess.call(["which", "xdotool"]) != 0: # log('ERROR: Minimised Kodi enabled and system program "xdotool" not present, install it via you system package manager. Xdotool is required to minimise Kodi.') # dialog.notification(language(50212), language(50215), addonIcon, 5000) # sys.exit() # else: # log('xdotool present...') if this.file_path_check == True: log(f'running program file check, option is enabled: filePathCheck = {str(this.file_path_check)}' ) if this.os_win: this.playnite_desktop_win = addon.getSettingString( "PlayniteDesktopWin") this.playnite_fullscreen_win = addon.getSettingString( "PlayniteFullscreenWin") this.kodi_win = addon.getSettingString("KodiWin") playniteDesktopExe = xbmcvfs.validatePath( this.playnite_desktop_win) playniteFullscreenExe = xbmcvfs.validatePath( this.playnite_fullscreen_win) kodiExe = xbmcvfs.validatePath(this.kodi_win) executable_check(this.playnite_desktop_win, this.playnite_fullscreen_win, kodiExe) # elif this.os_osx: # this.playnite_osx = addon.getSetting("PlayniteOsx") # kodiOsx = addon.getSetting("KodiOsx") # playniteExe = os.path.join(this.playnite_osx) # kodiExe = os.path.join(kodiOsx) # executable_check(playniteExe, kodiExe) # elif this.os_linux: # this.playnite_linux = addon.getSetting("PlayniteLinux") # this.kodi_linux = addon.getSetting("KodiLinux") # playniteExe = os.path.join(this.playnite_linux) # kodiExe = os.path.join(this.kodi_linux) # executable_check(playniteExe, kodiExe) else: log(f'skipping program file check, option disabled: filePathCheck = {filePathCheck}' )
def __init__(self, *args, **kwargs): # type: (*str, **Any) -> None """ :param args: :param kwargs: """ self._logger = module_logger.getChild(self.__class__.__name__) self._file = None if len(args) == 0: self._logger.error('Playlist constructor requires an argument') return playlist_name = args[0] self._playlist_name = playlist_name append = kwargs.get('append', True) rotate = kwargs.get('rotate', False) assert append ^ rotate self.playlist_format = kwargs.get('playlist_format', False) if self.playlist_format: self.path = Constants.PLAYLIST_PATH + '/' + \ playlist_name + Playlist.SMART_PLAYLIST_SUFFIX else: self.path = Constants.FRONTEND_DATA_PATH + '/' + \ playlist_name # + Playlist.PLAYLIST_SUFFIX self.path = xbmcvfs.validatePath(self.path) self.path = xbmcvfs.translatePath(self.path) DiskUtils.create_path_if_needed(Constants.FRONTEND_DATA_PATH) if not self.playlist_format: self.mode = 'wt' if append: self.mode = 'at' else: self.mode = 'wt' if rotate: try: save_path = Constants.FRONTEND_DATA_PATH + '/' + playlist_name + '.old' save_path = xbmcvfs.validatePath(save_path) try: if os.path.exists(self.path): os.replace(self.path, save_path) except Exception as e: self._logger.exception('') except Exception as e: self._logger.exception('') try: self._file = io.open(self.path, mode=self.mode, buffering=1, newline=None, encoding='utf-8') except Exception as e: self._logger.exception('')
def delete_file(filename): validated_file = xbmcvfs.validatePath(filename) if xbmcvfs.exists(validated_file): log("deleting file: {}".format(validated_file)) xbmcvfs.delete(validated_file) else: log("delete file failed: {}".format(validated_file))
def load_unprocessed_movie_cache(cls): # type: () -> None """ :return: """ path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tmdb_unprocessed_movies.json') path = xbmcvfs.validatePath(path) try: parent_dir, file_name = os.path.split(path) DiskUtils.create_path_if_needed(parent_dir) with CacheIndex.lock: if os.path.exists(path): with io.open(path, mode='rt', newline=None, encoding='utf-8') as cacheFile: cls._unprocessed_movies = json.load( cacheFile, encoding='utf-8', object_hook=CacheIndex.datetime_parser) cls.last_saved_movie_timestamp = None cls._unprocessed_movie_changes = 0 else: cls._unprocessed_movies = {} Monitor.throw_exception_if_abort_requested() except AbortException: reraise(*sys.exc_info()) except IOError as e: CacheIndex.logger().exception('') except JSONDecodeError as e: os.remove(path) except Exception as e: CacheIndex.logger().exception('')
def save_unprocessed_movie_cache(cls, flush=False): # type: (bool) -> None """ :param flush: :return: """ # TODO: Should use lock here, review locking with cls.lock: if cls._unprocessed_movie_changes == 0: return if (not flush and # Constants.TRAILER_CACHE_FLUSH_UPDATES) (cls._unprocessed_movie_changes < 10) and (datetime.datetime.now() - \ cls._last_saved_unprocessed_movie_timestamp) < datetime.timedelta(minutes=5)): return path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tmdb_unprocessed_movies.json') path = xbmcvfs.validatePath(path) parent_dir, file_name = os.path.split(path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) try: with io.open( path, mode='wt', newline=None, encoding='utf-8', ) as cacheFile: # TODO: Need ability to interrupt when ABORT. Object_handler # not a valid arg to dumps json_text = json.dumps(cls.get_unprocessed_movies(), encoding='utf-8', ensure_ascii=False, default=CacheIndex.handler, indent=3, sort_keys=True) cacheFile.write(json_text) cacheFile.flush() cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now( ) cls._unprocessed_movie_changes = 0 Monitor.throw_exception_if_abort_requested() except AbortException: reraise(*sys.exc_info()) except IOError as e: cls.logger().exception('') except Exception as e: cls.logger().exception('')
def validate_path(path): """Returns the translated path. :param path:Path to format :type path:str :return:Translated path :rtype:str """ if hasattr(xbmcvfs, "validatePath"): path = xbmcvfs.validatePath(path) # pylint: disable=no-member else: path = xbmc.validatePath(path) # pylint: disable=no-member return path
def hide_loading_indicator(self): bg_img = xbmcvfs.validatePath('/'.join( (ADDON_PATH, 'resources', 'skins', 'Default', 'media', self.BACKGROUND_IMAGE))) #bg_img = self.BACKGROUND_IMAGE self.loading_control.setAnimations([ ('conditional', 'effect=fade start=100 end=0 time=500 condition=true') ]) self.background_control.setAnimations([ ('conditional', 'effect=fade start=0 end=100 time=500 delay=500 condition=true') ]) self.background_control.setImage(bg_img)
def save_found_trailer_ids_cache(cls, flush=False): # type: (bool) -> None """ :param flush: :return: """ with cls.lock: if cls._unsaved_trailer_changes == 0: return if (not flush and (cls._unsaved_trailer_changes < Constants.TRAILER_CACHE_FLUSH_UPDATES) and (datetime.datetime.now() - cls._last_saved_trailer_timestamp) < datetime.timedelta(minutes=5)): return path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tmdb_found_trailers.json') path = xbmcvfs.validatePath(path) parent_dir, file_name = os.path.split(path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) try: with io.open( path, mode='wt', newline=None, encoding='utf-8', ) as cacheFile: found_trailer_id_list = list( cls._found_tmdb_trailer_ids) json_text = json.dumps(found_trailer_id_list, encoding='utf-8', ensure_ascii=False, default=CacheIndex.handler, indent=3, sort_keys=True) cacheFile.write(json_text) cacheFile.flush() cls._last_saved_trailer_timestamp = datetime.datetime.now( ) cls._unsaved_trailer_changes = 0 Monitor.throw_exception_if_abort_requested() except AbortException: reraise(*sys.exc_info()) except IOError as e: CacheIndex.logger().exception('') except Exception as e: CacheIndex.logger().exception('')
def _get_folder_images(self, path): self.log('_get_folder_images started with path: %s' % repr(path)) _, files = xbmcvfs.listdir(path) images = [ xbmcvfs.validatePath(path + f) for f in files if f.lower()[-3:] in ('jpg', 'png') ] #if addon.getSetting('recursive') == 'true': # for directory in dirs: # if directory.startswith('.'): # continue # images.extend( # self._get_folder_images( # xbmcvfs.validatePath('/'.join((path, directory, ''))) # ) # ) self.log('_get_folder_images ends') return images
def init_global_controls(self): #self.log(' init_global_controls start') loading_img = xbmcvfs.validatePath('/'.join( (ADDON_PATH, 'resources', 'skins', 'Default', 'media', 'srr_busy.gif'))) xbmc_window_half_width = int(self.xbmc_window.getWidth() / 2) xbmc_window_half_height = int(self.xbmc_window.getHeight() / 2) self.loading_control = ControlImage(xbmc_window_half_width - 64, xbmc_window_half_height - 64, 128, 128, loading_img) self.preload_control = ControlImage(-1, -1, 1, 1, '') self.background_control = ControlImage(0, 0, self.xbmc_window.getWidth(), self.xbmc_window.getHeight(), '') self.global_controls = [ self.preload_control, self.background_control, self.loading_control ] self.xbmc_window.addControls(self.global_controls)
def walkTree(self, directory, recurse=True): if (utils.getSettingBool('verbose_logging')): utils.log('walking ' + directory + ', recurse: ' + str(recurse)) if (directory[-1:] == '/' or directory[-1:] == '\\'): directory = directory[:-1] if (self.vfs.exists(directory + self.pathSep)): dirs, files = self.vfs.listdir(directory) if (recurse): # create all the subdirs first for aDir in dirs: dirPath = xbmcvfs.validatePath( xbmcvfs.translatePath(directory + self.pathSep + aDir)) file_ext = aDir.split('.')[-1] # check if directory is excluded if (not any( dirPath.startswith(exDir) for exDir in self.exclude_dir)): self.addFile("-" + dirPath) # catch for "non directory" type files shouldWalk = True for s in file_ext: if (s in self.not_dir): shouldWalk = False if (shouldWalk): self.walkTree(dirPath) # copy all the files for aFile in files: filePath = xbmcvfs.translatePath(directory + self.pathSep + aFile) self.addFile(filePath)
def get_json_cache_file_path_for_movie_id(cls, movie_id, # type: Union[int, str] source, # type: str error_msg='' # type: str ): # type: (...) -> Union[str, None] """ Returns the path for a cache JSON file for the given movie_id and source. :param movie_id: :param source: :param error_msg: Optional text to add to any error message. Typically a movie title. :return: """ try: prefix = Cache.generate_unique_id_from_source(movie_id, source, error_msg=error_msg) # if cls._logger.isEnabledFor(LazyLogger.DEBUG): # cls._logger.debug('movie_id:', movie_id, 'source:', source, # 'prefix:', prefix) # # To reduce clutter, put cached data into a folder named after the # SOURCE and first character of the id # # For local library entries, just use the first digit from the # numeric id. if source == Movie.LIBRARY_SOURCE: folder = prefix[0] elif source == Movie.TMDB_SOURCE: # # For TMDB entries, the numeric TMDB id is prefaced with: # "tmdb_". Use a folder named "t" + first digit of TMDBID # x = prefix.split('_', 1) folder = 't' + x[1][0] elif source == Movie.TFH_SOURCE: # # For TFH entries, the numeric TFH id is prefaced with: # "tfh_". Use a folder named "h" + first digit of TFH # x = prefix.split('_', 1) folder = 'h' + x[1][0] elif source == Movie.ITUNES_SOURCE: # # For ITunes entries, Apple does not supply an ID, so we # use the TMDB ID instead if we can find it. (A lot of these are # for very new or unreleased movies.) # # The TMDB id here is prefaced with: "appl_". Use a folder named # "a" + first digit of TMDBID. x = prefix.split('_', 1) folder = 'a' + x[1][0] else: cls._logger.debug('Unexpected source:', source, 'movie_id:', movie_id) return None cache_file = prefix + '.json' path = os.path.join(Settings.get_remote_db_cache_path(), folder, cache_file) path = xbmcvfs.validatePath(path) return path except AbortException: reraise(*sys.exc_info()) except Exception as e: cls._logger.exception('') return None
def save_cache(cls, ignore_shutdown=False) -> None: """ :return: """ cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown) with cls.lock: if cls.tmdb_unsaved_changes == 0 and cls.library_unsaved_changes == 0: return if cls.tmdb_unsaved_changes > 0: entries_to_delete = [] for key, entry in cls._all_missing_tmdb_trailers.items(): elapsed_time = datetime.date.today() - entry['timestamp'] elapsed_days = elapsed_time.days if elapsed_days > Settings.get_expire_remote_db_trailer_check_days(): if entry[Movie.UNIQUE_ID_TMDB] in cls._all_missing_tmdb_trailers: entries_to_delete.append( entry[Movie.UNIQUE_ID_TMDB]) for entry_to_delete in entries_to_delete: del cls._all_missing_tmdb_trailers[entry_to_delete] cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown) try: path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_tmdb_trailers.json') temp_path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_tmdb_trailers.json.temp') # path = path.encode('utf-8') path = xbmcvfs.validatePath(path) temp_path = xbmcvfs.validatePath(temp_path) parent_dir, file_name = os.path.split(temp_path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) with io.open(temp_path, mode='wt', newline=None, encoding='utf-8', ) as cacheFile: json_text = \ json.dumps(cls._all_missing_tmdb_trailers, encoding='utf-8', ensure_ascii=False, default=TrailerUnavailableCache.handler, indent=3, sort_keys=True) cacheFile.write(json_text) cacheFile.flush() try: os.replace(temp_path, path) except OSError: cls._logger.exception(f'Failed to replace index of movies' f' missing trailers cache: {path}') cls.tmdb_last_save = datetime.datetime.now() cls.tmdb_unsaved_changes = 0 except AbortException: reraise(*sys.exc_info()) except IOError as e: cls._logger.exception('') except Exception as e: cls._logger.exception('') finally: try: os.remove(temp_path) except Exception: pass cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown) if cls.library_unsaved_changes > 0: entries_to_delete = [] for key, entry in cls._all_missing_library_trailers.items(): elapsed_time = datetime.date.today() - entry['timestamp'] elapsed_days = elapsed_time.days if elapsed_days > Settings.get_expire_remote_db_trailer_check_days(): if entry[Movie.MOVIEID] in cls._all_missing_library_trailers: entries_to_delete.append(entry[Movie.MOVIEID]) for entry_to_delete in entries_to_delete: del cls._all_missing_library_trailers[entry_to_delete] cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown) try: path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_library_trailers.json') temp_path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'missing_library_trailers.json.temp') # path = path.encode('utf-8') path = xbmcvfs.validatePath(path) temp_path = xbmcvfs.validatePath(temp_path) parent_dir, file_name = os.path.split(path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) with io.open(temp_path, mode='wt', newline=None, encoding='utf-8', ) as cacheFile: # TODO: Need ability to interrupt when ABORT. Object_handler # not a valid arg to dumps json_text = \ json.dumps(cls._all_missing_library_trailers, encoding='utf-8', ensure_ascii=False, default=TrailerUnavailableCache.handler, indent=3, sort_keys=True) cacheFile.write(json_text) cacheFile.flush() try: os.replace(temp_path, path) except OSError: cls._logger.exception(f'Failed to replace missing trailer' f' information cache: {path}') cls.library_last_save = datetime.datetime.now() cls.library_unsaved_changes = 0 except AbortException: reraise(*sys.exc_info()) except IOError as e: cls._logger.exception('') except Exception as e: cls._logger.exception('') finally: try: os.remove(temp_path) except Exception: pass
def save_unprocessed_movie_cache(cls, flush: bool = False) -> None: """ :param flush: :return: """ # TODO: Should use lock here, review locking with cls.lock: if cls._unprocessed_movie_changes == 0: return if (not flush and # Constants.TRAILER_CACHE_FLUSH_UPDATES) (cls._unprocessed_movie_changes < 10) and ((datetime.datetime.now() - cls._last_saved_unprocessed_movie_timestamp)) < datetime.timedelta(minutes=5)): return try: path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tmdb_unprocessed_movies.json') path = xbmcvfs.validatePath(path) parent_dir, file_name = os.path.split(path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) # Don't save unneeded fields. Takes up disk and RAM temp_entries = {} for tmdb_id, entry in cls.get_unprocessed_movies().items(): temp_entry = {} for key in Movie.TMDB_PAGE_DATA_FIELDS: temp_entry[key] = entry[key] temp_entries[tmdb_id] = temp_entry json_text = json.dumps(temp_entries, encoding='utf-8', ensure_ascii=False, default=CacheIndex.handler, indent=3, sort_keys=True) with io.open( path, mode='wt', newline=None, encoding='utf-8', ) as cache_file: # TODO: Need ability to interrupt when ABORT. Object_handler # not a valid arg to dumps cache_file.write(json_text) cache_file.flush() cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now( ) cls._unprocessed_movie_changes = 0 Monitor.throw_exception_if_abort_requested() del json_text del temp_entries except AbortException: reraise(*sys.exc_info()) except IOError as e: cls.logger().exception('') except Exception as e: cls.logger().exception('')
def pathUserdata(path): if not f_exists(xbmcaddon.Addon().getAddonInfo('profile')): f_mkdir(xbmcaddon.Addon().getAddonInfo('profile')) return xbmcvfs.validatePath(xbmcaddon.Addon().getAddonInfo('profile') + path)
def validate_path(path): from resources.lib.system import SYSTEM_VERSION if SYSTEM_VERSION > 18: return xbmcvfs.validatePath(path) return xbmc.validatePath(path)
def get_trailer_cache_file_path_for_movie_id(cls, trailer, orig_file_name, normalized): # type: (Dict[str, Any], str, bool) -> Union[str, None] """ Generates the path for a file in the cache for a trailer for given movie. :param trailer: :param orig_file_name: :param normalized: :return: """ path = None movie_id = None source = None try: valid_sources = [Movie.LIBRARY_SOURCE, Movie.TMDB_SOURCE, Movie.ITUNES_SOURCE, Movie.TFH_SOURCE] if trailer[Movie.SOURCE] in valid_sources: movie_id = Cache.get_video_id(trailer) source = trailer[Movie.SOURCE] else: if cls._logger.isEnabledFor(LazyLogger.DEBUG): cls._logger.debug('Not valid video source title:', trailer[Movie.TITLE], 'source:', trailer[Movie.SOURCE]) if movie_id is not None: # movie_id may begin with an '_'. prefix = movie_id + '_' folder = None if source == Movie.LIBRARY_SOURCE: folder = movie_id[0] elif source == Movie.TMDB_SOURCE: x = prefix.split('_', 1) folder = 't' + x[1][0] elif source == Movie.TFH_SOURCE: x = prefix.split('_', 1) folder = 'h' + x[1][0] elif source == Movie.ITUNES_SOURCE: x = prefix.split('_', 1) folder = 'a' + movie_id[1][0] # Possible that trailer was downloaded into cache orig_file_name = re.sub( r'^' + re.escape(prefix), '', orig_file_name) if normalized: if 'normalized_' in orig_file_name: cls._logger.debug('Already normalized:', trailer.get( Movie.TITLE, 'no title'), 'orig_file_name:', orig_file_name) file_name = prefix + orig_file_name else: file_name = prefix + 'normalized_' + orig_file_name else: file_name = prefix + orig_file_name path = os.path.join(Settings.get_downloaded_trailer_cache_path(), folder, file_name) # Should not be needed path = xbmcvfs.validatePath(path) except AbortException: reraise(*sys.exc_info()) except Exception as e: title = trailer.get(Movie.TITLE, 'no title') cls._logger.exception('title:', title) path = None if cls._logger.isEnabledFor(LazyLogger.DEBUG_EXTRA_VERBOSE): cls._logger.debug_extra_verbose('Path:', path) return path
def save_cache(cls, flush: bool = False, complete: bool = False) -> None: """ :param flush: :param complete: :return: Typical json entry Items marked with * are kodi/TMDb artifacts "BZwDpOQNgpw": { "adult": false, "cast": [], "fanart": "default_fanart", "genre": [], "mpaa": "NR", "normalized_trailer": "/home/fbacher/.kodi/userdata/addon_data/script.video.randomtrailers/cache/hB/tfh_BZwDpOQNgpw_normalized_Larry Karaszewski on SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017)-movie.mkv", "original_language": "", "plot": "But wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\nBut wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\niTunes: http://itunes.trailersfromhell.com\nSpotify: http://spotify.trailersfromhell.com\nLibsyn: http://podcast.trailersfromhell.com\nGoogle Play: http://googleplay.trailersfromhell.com\nRSS: http://goo.gl/3faeG7\n\nAs always, you can find more commentary, more reviews, more podcasts, and more deep-dives into the films you don't know you love yet over at the Trailers From Hell mothership: \n\nhttp://www.trailersfromhell.com", "rating": 4.8974357, "genre": [], "rts.actors": "", "rts.certification": "Unrated", "rts.certificationImage": "ratings/us/unrated.png", "rts.directors": "", "rts.genres": "", "rts.runtime": "143 [B]Minutes[/B] - ", "rts.studios": "", "rts.tfhId": "BZwDpOQNgpw", "rts.tfh_title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)", "rts.title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017) - TFH ", "rts.tmdb_id_not_found": true, "rts.voiced.actors": [], "rts.voiced.directors": [], "rts.voiced.studios": [], "rts.voiced.writers": [], "rts.writers": "", "rts.youtube.trailers_in_index": 1449, "rts.youtube_index": 204, "runtime": 8580, "source": "TFH", "studio": [ [] ], "tags": [ "smokey and the bandit 3", "larry karaszewski", "jackie gleason" ], "thumbnail": "https://i.ytimg.com/vi_webp/BZwDpOQNgpw/maxresdefault.webp", "title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)", "trailer": "https://youtu.be/BZwDpOQNgpw", "trailerDiscoveryState": "04_discoveryReadyToDisplay", "trailerPlayed": true, "trailerType": "default_trailerType", "uniqueid": { "tmdb": "None" }, "writer": [ [] ], "year": 2017 } """ with cls.lock: if (not flush and (cls._unsaved_trailer_changes < 50) and (datetime.datetime.now() - cls._last_saved_trailer_timestamp) < datetime.timedelta(minutes=5)): return try: path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tfh_trailers.json') path = xbmcvfs.validatePath(path) tmp_path = os.path.join(Settings.get_remote_db_cache_path(), 'index', 'tfh_trailers.json.tmp') tmp_path = xbmcvfs.validatePath(tmp_path) parent_dir, file_name = os.path.split(path) if not os.path.exists(parent_dir): DiskUtils.create_path_if_needed(parent_dir) Monitor.throw_exception_if_abort_requested() with io.open(tmp_path, mode='at', newline=None, encoding='utf-8', ) as cacheFile: if complete: cls.set_creation_date() # Set to True when complete, but don't set to False # when not complete. cls._cache_complete = True creation_date_str = datetime.datetime.strftime( cls._time_of_index_creation, '%Y:%m:%d') cls._cached_trailers[cls.INDEX_CREATION_DATE] = { cls.INDEX_CREATION_DATE: creation_date_str, cls.CACHE_COMPLETE: cls._cache_complete } json_text = json.dumps(cls._cached_trailers, encoding='utf-8', ensure_ascii=False, default=TFHCache.abort_checker, indent=3, sort_keys=True) cacheFile.write(json_text) cacheFile.flush() # Get rid of dummy entry from local dict del cls._cached_trailers[cls.INDEX_CREATION_DATE] cls._last_saved_trailer_timestamp = datetime.datetime.now() cls._unsaved_trailer_changes = 0 try: os.replace(tmp_path, path) except OSError: cls._logger.exception(f'Failed to replace missing trailer' f' information cache: {path}') except IOError as e: TFHCache.logger().exception('') except Exception as e: TFHCache.logger().exception('') Monitor.throw_exception_if_abort_requested()