def load_unprocessed_movie_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_unprocessed_movies.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)
            with CacheIndex.lock:
                if os.path.exists(path):
                    with io.open(path,
                                 mode='rt',
                                 newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._unprocessed_movies = json.load(
                            cacheFile,
                            encoding='utf-8',
                            object_hook=CacheIndex.datetime_parser)
                        cls.last_saved_movie_timestamp = None
                        cls._unprocessed_movie_changes = 0
                else:
                    cls._unprocessed_movies = {}
            Monitor.throw_exception_if_abort_requested()
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def save_cache(cls):
        # type: () -> None
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        with CacheIndex.lock:
            try:
                Monitor.throw_exception_if_abort_requested()

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:
                    json_text = cls.to_json()
                    cacheFile.write(json_text)
                    cacheFile.flush()
            except AbortException:
                reraise(*sys.exc_info())
            except (IOError) as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')
Пример #3
0
    def load_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(),
                            'index', 'missing_tmdb_trailers.json')
        path = xbmcvfs.validatePath(path)
        cls.abort_on_shutdown()
        with cls.lock:
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_tmdb_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_tmdb_trailers)
                        Statistics.missing_tmdb_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                cls._logger.exception('')

            cls.abort_on_shutdown()
            path = os.path.join(Settings.get_remote_db_cache_path(),
                                'index', 'missing_library_trailers.json')
            path = xbmcvfs.validatePath(path)
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)
                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_library_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_library_trailers)
                        Statistics.missing_library_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except JSONDecodeError as e:
                os.remove(path)
            except IOError as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')

        pass
    def save_unprocessed_movie_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                    (cls._unprocessed_movie_changes < 10)
                    and
                    (datetime.datetime.now() - \
                     cls._last_saved_unprocessed_movie_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_unprocessed_movies.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)

            try:
                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    json_text = json.dumps(cls.get_unprocessed_movies(),
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=CacheIndex.handler,
                                           indent=3,
                                           sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
    def save_search_pages(self, flush=False):
        # type: (bool) -> None
        """

        :return:
        """
        with CacheIndex.lock:
            if (not flush and self.get_number_of_unsaved_changes() <
                    Constants.TRAILER_CACHE_FLUSH_UPDATES
                    and self.get_time_since_last_save() <
                    datetime.timedelta(minutes=5)):
                return
            saved_pages = len(self._cached_page_by_key.items())
            path = xbmcvfs.validatePath(self._path)
            temp_path = xbmcvfs.validatePath(self._temp_path)
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(temp_path,
                             mode='wt',
                             newline=None,
                             encoding='utf-8') as cacheFile:
                    json_dict = self.to_json()

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    json_text = json.dumps(json_dict,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=CacheIndex.handler,
                                           indent=3,
                                           sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()
                    self._number_of_unsaved_changes = 0
                    self._time_of_last_save = datetime.datetime.now()

                try:
                    os.replace(temp_path, path)
                except OSError:
                    self._logger.exception(
                        f'Failed to replace move information'
                        f' planned for download: {path}')
                Monitor.throw_exception_if_abort_requested()
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                self._logger.exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                self._logger.exception('')

        self._logger.debug_verbose("Entries Saved: ", saved_pages)
Пример #6
0
    def __init__(self, *args, **kwargs):
        # type: (*str, **Any) -> None
        """

        :param args:
        :param kwargs:
        """
        self._logger = module_logger.getChild(self.__class__.__name__)
        self._file = None

        if len(args) == 0:
            self._logger.error('Playlist constructor requires an argument')
            return

        playlist_name = args[0]
        self._playlist_name = playlist_name
        append = kwargs.get('append', True)
        rotate = kwargs.get('rotate', False)
        assert append ^ rotate
        self.playlist_format = kwargs.get('playlist_format', False)

        if self.playlist_format:
            self.path = Constants.PLAYLIST_PATH + '/' + \
                playlist_name + Playlist.SMART_PLAYLIST_SUFFIX
        else:
            self.path = Constants.FRONTEND_DATA_PATH + '/' + \
                playlist_name  # + Playlist.PLAYLIST_SUFFIX
        self.path = xbmcvfs.validatePath(self.path)
        self.path = xbmcvfs.translatePath(self.path)
        DiskUtils.create_path_if_needed(Constants.FRONTEND_DATA_PATH)
        if not self.playlist_format:
            self.mode = 'wt'
            if append:
                self.mode = 'at'
            else:
                self.mode = 'wt'
            if rotate:
                try:
                    save_path = Constants.FRONTEND_DATA_PATH + '/' + playlist_name + '.old'
                    save_path = xbmcvfs.validatePath(save_path)
                    try:
                        if os.path.exists(self.path):
                            os.replace(self.path, save_path)
                    except Exception as e:
                        self._logger.exception('')
                except Exception as e:
                    self._logger.exception('')

            try:
                self._file = io.open(self.path,
                                     mode=self.mode,
                                     buffering=1,
                                     newline=None,
                                     encoding='utf-8')
            except Exception as e:
                self._logger.exception('')
    def read_cached_value_from_disk(cls):
        # type: () -> CacheParameters
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        saved_preferences = None
        with CacheIndex.lock:
            try:
                if not os.access(path, os.R_OK):
                    cls._logger.error(
                        Messages.get_formatted_msg(Messages.CAN_NOT_READ_FILE,
                                                   path))
                    return None

                file_mod_time = datetime.datetime.fromtimestamp(
                    os.path.getmtime(path))
                now = datetime.datetime.now()
                expiration_time = now - datetime.timedelta(
                    Settings.get_expire_remote_db_cache_entry_days())

                if file_mod_time < expiration_time:
                    if cls._logger.isEnabledFor(LazyLogger.DEBUG):
                        cls._logger.debug('cache file EXPIRED for:', path)
                    return None

                Monitor.throw_exception_if_abort_requested()

                with io.open(path, mode='rt', newline=None,
                             encoding='utf-8') as cacheFile:
                    saved_preferences = json.load(cacheFile, encoding='utf-8')
                    saved_preferences = CacheParameters(saved_preferences)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
                exception_occurred = True
            except Exception as e:
                cls._logger.exception('')
                exception_occurred = True

        return saved_preferences
    def save_found_trailer_ids_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """
        with cls.lock:
            if cls._unsaved_trailer_changes == 0:
                return

            if (not flush and (cls._unsaved_trailer_changes <
                               Constants.TRAILER_CACHE_FLUSH_UPDATES) and
                (datetime.datetime.now() - cls._last_saved_trailer_timestamp) <
                    datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_found_trailers.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)
                try:
                    with io.open(
                            path,
                            mode='wt',
                            newline=None,
                            encoding='utf-8',
                    ) as cacheFile:
                        found_trailer_id_list = list(
                            cls._found_tmdb_trailer_ids)
                        json_text = json.dumps(found_trailer_id_list,
                                               encoding='utf-8',
                                               ensure_ascii=False,
                                               default=CacheIndex.handler,
                                               indent=3,
                                               sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                        cls._last_saved_trailer_timestamp = datetime.datetime.now(
                        )
                        cls._unsaved_trailer_changes = 0

                    Monitor.throw_exception_if_abort_requested()
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    CacheIndex.logger().exception('')
                except Exception as e:
                    CacheIndex.logger().exception('')
Пример #9
0
    def onAVStarted(self):
        # type: () ->None
        """
            Detect when the player is playing something not initiated by this
            script. This can be due to a JSON RPC call or similar.Starting the
            player via keyboard or remote (that does not use JSON RPC)is
            detected by other means (onAction).

            Compare the movie that the player is playing versus what we expect
            it to play. If they don't match, then assume that something else
            launched the movie.

        :return:
        """
        try:
            # All local trailers played by Random Trailers will have a fake genre of
            # 'randomtrailers'. However, if a trailer is from a remote source
            # such that youtube plugin does the actual playing, then the
            # genre will NOT be set to 'randomtrailers'. The use of caching
            # of remote trailers will eliminate this issue.

            genre = self.getVideoInfoTag().getGenre()
            # self._logger.debug('genre:', genre)
            if genre != 'randomtrailers':
                playing_file = super().getPlayingFile()
                playing_file = playing_file
                if not (self._is_url and DiskUtils.is_url(playing_file)):
                    self._is_activated = False
                    if self._logger.isEnabledFor(LazyLogger.DEBUG):
                        self._logger.debug(
                            'Player is playing movie:', playing_file)
                    self.notify_non_random_trailer_video()
        except Exception as e:
            pass
    def is_more_discovery_needed(cls, movie: MovieType) -> bool:
        if movie[Movie.DISCOVERY_STATE] <= Movie.DISCOVERY_COMPLETE:
            return False

        more_discovery_needed = False
        title = movie[Movie.TITLE]
        try:
            normalized_trailer_path = movie.get(Movie.NORMALIZED_TRAILER)
            if normalized_trailer_path is None:
                normalized_trailer_path = ''
            cached_trailer_path = movie.get(Movie.CACHED_TRAILER)
            if cached_trailer_path is None:
                cached_trailer_path = ''

            if DiskUtils.is_url(movie.get(Movie.TRAILER, '')):
                # Remote Trailer

                if Settings.is_normalize_volume_of_downloaded_trailers():
                    try:
                        if not os.path.exists(normalized_trailer_path):
                            cls._logger.debug(
                                f'title: {title} does not exist: '
                                f'{normalized_trailer_path}')
                            movie[Movie.NORMALIZED_TRAILER] = None
                            more_discovery_needed = True
                    except Exception as e:
                        cls._logger.log_exception(e)

                elif Settings.is_use_trailer_cache():
                    try:
                        if not os.path.exists(cached_trailer_path):
                            cls._logger.debug(
                                f'title: {title} does not exist: '
                                f'{cached_trailer_path}')
                            movie[Movie.CACHED_TRAILER] = None
                            movie[Movie.NORMALIZED_TRAILER] = None
                            more_discovery_needed = True
                    except Exception as e:
                        cls._logger.log_exception(e)
            elif Settings.is_normalize_volume_of_local_trailers():
                # Local trailer
                try:
                    if not os.path.exists(normalized_trailer_path):
                        cls._logger.debug(f'title: {title} does not exist: '
                                          f'{normalized_trailer_path}')
                        movie[Movie.NORMALIZED_TRAILER] = None
                        more_discovery_needed = True
                except Exception as e:
                    cls._logger.log_exception(e)

        except Exception as e:
            cls._logger.log_exception()

        if more_discovery_needed:
            movie[Movie.DISCOVERY_STATE] = Movie.DISCOVERY_COMPLETE
            cls._logger.debug(f'More discovery needed: {title}')

        return more_discovery_needed
Пример #11
0
    def write_tmdb_cache_json(cls, movie_id: Union[str, int], source: str,
                              movie: MovieType) -> None:
        """
            Write the given movie information into the cache as JSON

            Due to the small size of these files, will not check for
            AbortException during write nor save old version of file.
        """
        try:
            if source is None or source not in Movie.LIB_TMDB_ITUNES_SOURCES:
                cls._logger.debug('Invalid source:', source)
            movie_id = str(movie_id)
            path = Cache.get_json_cache_file_path_for_movie_id(
                movie_id, source)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path) and not os.access(path, os.W_OK):
                messages = Messages
                cls._logger.error(
                    messages.get_msg(Messages.CAN_NOT_WRITE_FILE) % path)
                return None
            temp_movie = {}
            for key in Movie.TMDB_ENTRY_FIELDS:
                temp_movie[key] = movie.get(key)

            Monitor.throw_exception_if_abort_requested()
            with io.open(
                    path,
                    mode='wt',
                    newline=None,
                    encoding='utf-8',
            ) as cacheFile:
                json_text = json.dumps(temp_movie,
                                       ensure_ascii=False,
                                       indent=3,
                                       sort_keys=True)
                cacheFile.write(json_text)
                cacheFile.flush()
                del temp_movie
        except AbortException:
            reraise(*sys.exc_info())
        except Exception as e:
            cls._logger.exception('')
Пример #12
0
    def set_playing_file_path(self, file_path):
        # type: (str) -> None
        """

        :param file_path:
        :return:
        """
        file_path = file_path
        self._is_url = DiskUtils.is_url(file_path)
        self._expected_file_path = file_path
    def load_search_pages(self):
        # type: () -> None
        """

        :return:
        """
        if self._cached_page_by_key is not None:
            return

        path = xbmcvfs.validatePath(self._path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path):
                Monitor.throw_exception_if_abort_requested()
                with CacheIndex.lock, io.open(path,
                                              mode='rt',
                                              newline=None,
                                              encoding='utf-8') as cacheFile:
                    encoded_values = json.load(
                        cacheFile,
                        encoding='utf-8',
                        object_hook=CacheIndex.datetime_parser)
                    loaded_cached_pages_data = self.from_json(encoded_values)
                    self._cached_page_by_key = loaded_cached_pages_data._cached_page_by_key
            else:
                self._cached_page_by_key: Dict[str, CachedPage] = dict()

        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            self._logger.exception('')
        except JSONDecodeError as e:
            os.remove(path)
            self._cached_page_by_key = dict()
        except Exception as e:
            self._logger.exception('')

        self._logger.debug_verbose("Loaded entries:",
                                   len(self._cached_page_by_key))
        self._time_of_last_save = datetime.datetime.now()
    def get_stats_for_caches(self) -> Dict[str, UsageData]:
        """
            Get disk usage information for the caches.
            Returns a map of UsageData for each cache. Primarily used
            by garbage collection and reporting.

        :return:
        """
        local_class = CacheManager
        TRAILER_PATTERN = re.compile(r'^.*-trailer\..*$')
        JSON_PATTERN = re.compile(r'^.*\.json$')
        TFH_PATTERN = re.compile(r'^.*-movie\..*$')

        TRAILER_TYPE = 'trailer'
        JSON_TYPE = 'json'

        # When the Trailer Cache and Data Cache (.json) are the same

        if (Settings.get_downloaded_trailer_cache_path() ==
                Settings.get_remote_db_cache_path()):
            usage_data_map = DiskUtils.get_stats_for_path(
                Settings.get_downloaded_trailer_cache_path(), {
                    'trailer': (TRAILER_PATTERN, TRAILER_TYPE),
                    'json': (JSON_PATTERN, JSON_TYPE),
                    'tfh': (TFH_PATTERN, TRAILER_TYPE)
                })
        else:
            # When Trailer Cache and Data Cache are different directories.

            usage_data_map = DiskUtils.get_stats_for_path(
                Settings.get_downloaded_trailer_cache_path(), {
                    'trailer': (TRAILER_PATTERN, TRAILER_TYPE),
                    'tfh': (TFH_PATTERN, TRAILER_TYPE)
                })
            json_usage_data = DiskUtils.get_stats_for_path(
                Settings.get_remote_db_cache_path(),
                {'json': (JSON_PATTERN, JSON_TYPE)})
            usage_data_map['json'] = json_usage_data['json']

        return usage_data_map
    def load_cache(cls) -> None:
        """

        :return: True if cache is full and no further discovery needed
        """
        with cls.lock:
            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')
                path = xbmcvfs.validatePath(path)

                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt', newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._cached_trailers = json.load(
                            cacheFile,
                            # object_hook=TFHCache.abort_checker,
                        )
                        cls.last_saved_movie_timestamp = None
                        cls._unsaved_trailer_changes = 0
                        cls.load_creation_date()
                else:
                    cls._cached_trailers = dict()
                    # Set to an old time so that cache is expired
                    cls._time_of_index_creation = datetime.datetime(2000, 1, 1)
                    cls._index_complete = False

            except IOError as e:
                TFHCache.logger().exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
        return
    def load_found_trailer_cache(cls) -> None:
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_found_trailers.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path):
                with CacheIndex.lock, io.open(path,
                                              mode='rt',
                                              newline=None,
                                              encoding='utf-8') as cacheFile:
                    found_trailers_list = json.load(
                        cacheFile,
                        encoding='utf-8',
                        object_hook=CacheIndex.datetime_parser)
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._found_tmdb_trailer_ids: Set[int] = set(
                        found_trailers_list)
                    cls._unsaved_trailer_changes = 0
            else:
                cls._found_tmdb_trailer_ids: Set[int] = set()

            Monitor.throw_exception_if_abort_requested()
            cls.remove_unprocessed_movies(list(cls._found_tmdb_trailer_ids))
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def __init__(self, trailer_cache: bool) -> None:
        """
            Populates this instance with relevant settings. Allows for uniform
            access to those settings.

        :param trailer_cache:
        """
        local_class = CacheData
        if local_class._logger is None:
            local_class._logger = module_logger.getChild(local_class.__name__)

        self._usage_data = None
        self._messages = Messages
        self._is_trailer_cache = trailer_cache
        if trailer_cache:
            self._is_limit_number_of_cached_files = \
                Settings.is_limit_number_of_cached_trailers()
            if self._is_limit_number_of_cached_files:
                self._max_number_of_files = Settings.get_max_number_of_cached_trailers()

            self._is_limit_size_of_cache = Settings.is_limit_size_of_cached_trailers()
            if self._is_limit_size_of_cache:
                self._max_cache_size_mb = Settings.get_max_size_of_cached_trailers_mb()

            self._is_limit_percent_of_cache_disk = \
                Settings.is_limit_percent_of_cached_trailers()
            if self._is_limit_percent_of_cache_disk:
                self._max_percent_of_cache_disk = \
                    Settings.get_max_percent_of_cached_trailers()
                # TODO: Delete me
                # self._max_percent_of_cache_disk = 0.83

        else:
            self._is_limit_number_of_cached_files = \
                Settings.is_limit_number_of_cached_json()
            if self._is_limit_number_of_cached_files:
                self._max_number_of_files = Settings.get_max_number_of_cached_json()

            self._is_limit_size_of_cache = Settings.is_limit_size_of_cached_json()
            if self._is_limit_size_of_cache:
                self._max_cache_size_mb = Settings.get_max_size_of_cached_json_mb()

            self._is_limit_percent_of_cache_disk = \
                Settings.is_limit_percent_of_cached_json()
            if self._is_limit_percent_of_cache_disk:
                self._max_percent_of_cache_disk = \
                    Settings.get_max_percent_of_cached_json()
                # TODO_ delete me
                # self._max_percent_of_cache_disk = 0.0058

        if self._is_limit_number_of_cached_files:
            self._max_number_of_files_str = locale.format("%d",
                                                          self._max_number_of_files,
                                                          grouping=True)
        else:
            self._max_number_of_files = 0
            self._max_number_of_files_str = self._messages.get_msg(
                Messages.UNLIMITED)
        if self._is_limit_size_of_cache:
            self._max_cache_size_mb_str = DiskUtils.sizeof_fmt(
                self._max_cache_size_mb * 1024 * 1024)
        else:
            self._max_cache_size_mb = 0
            self._max_cache_size_mb_str = self._messages.get_msg(
                Messages.UNLIMITED)

        if self._is_limit_percent_of_cache_disk:
            self._max_percent_of_cache_disk_str = RATIO_DECIMAL_DIGITS_TO_PRINT.format(
                self._max_percent_of_cache_disk) + '%'
        else:
            self._max_percent_of_cache_disk = 100.0
            self._max_percent_of_cache_disk_str = self._messages.get_msg(
                Messages.UNLIMITED)

        self._remaining_allowed_files = None
        self._used_space_in_cache_fs = None
        self._free_disk_in_cache_fs = None
        self._total_size_of_cache_fs = None
        self._disk_used_by_cache = None
        self._actual_cache_percent = None
    def collect_garbage(self):
        # type: () -> None
        """
        Runs garbage collection on all of the caches according to the
        settings.

        This is a time-consuming process. It is normally kicked-off by
        drive_garbage_collection

        :return:
        """
        local_class = CacheData
        try:
            if local_class._logger.isEnabledFor(LazyLogger.DEBUG_EXTRA_VERBOSE):
                if self._is_trailer_cache:
                    local_class._logger.debug_extra_verbose('TRAILER CACHE')
                else:
                    local_class._logger.debug_extra_verbose('JSON CACHE')

            if self._is_limit_number_of_cached_files:
                #
                # Delete enough of the oldest files to keep the number
                # within limit

                number_of_cache_files_to_delete = - self._remaining_allowed_files
                if number_of_cache_files_to_delete > 0:
                    if local_class._logger.isEnabledFor(LazyLogger.INFO):
                        local_class._logger.info(
                            'limit_number_of_cached_files. number_of_files_to_delete:',
                            locale.format("%d", number_of_cache_files_to_delete,
                                          grouping=True))
                    # Order json files by age

                    for cache_file in self._usage_data.get_file_data_by_creation_date():
                        Monitor.throw_exception_if_abort_requested()
                        self._usage_data.remove_file(cache_file)
                        number_of_cache_files_to_delete -= 1
                        if number_of_cache_files_to_delete <= 0:
                            break
                else:
                    if local_class._logger.isEnabledFor(LazyLogger.INFO):
                        local_class._logger.info(
                            'limit_number_of_cached_files. Additional allowed files:',
                            locale.format("%d", number_of_cache_files_to_delete,
                                          grouping=True))

            if self._is_limit_size_of_cache:
                #
                # Delete enough of the oldest files to keep the number
                # within limit

                max_bytes_in_cache = (self._max_cache_size_mb * 1024 * 1024)
                bytes_of_files_to_delete = (self._usage_data.get_disk_used_by_cache()
                                            - max_bytes_in_cache)
                if local_class._logger.isEnabledFor(LazyLogger.INFO):
                    local_class._logger.info('limit_size_of_cache. max allowed size:',
                                       DiskUtils.sizeof_fmt(max_bytes_in_cache))
                    local_class._logger.debug('actual disk used in cache:',
                                       DiskUtils.sizeof_fmt(
                                           self._usage_data.get_disk_used_by_cache()))
                    local_class._logger.debug('Amount to delete:',
                                       DiskUtils.sizeof_fmt(bytes_of_files_to_delete))
                if bytes_of_files_to_delete > 0:
                    # Order json files by age

                    for cache_file in self._usage_data.get_file_data_by_creation_date():
                        Monitor.throw_exception_if_abort_requested()
                        self._usage_data.remove_file(cache_file)
                        bytes_of_files_to_delete = (
                            self._usage_data.get_disk_used_by_cache()
                            - max_bytes_in_cache)
                        if bytes_of_files_to_delete <= 0:
                            break

            if self._is_limit_percent_of_cache_disk:
                #
                # Delete enough of the oldest files to keep the number
                # within limit

                max_bytes_in_cache = (self._total_size_of_cache_fs *
                                      self._max_percent_of_cache_disk / 100.00)
                bytes_of_files_to_delete = (self._usage_data.get_disk_used_by_cache() -
                                            max_bytes_in_cache)

                if local_class._logger.isEnabledFor(LazyLogger.INFO):
                    local_class._logger.info(
                        'limit_percent of cached files. Calculated max size:',
                        DiskUtils.sizeof_fmt(max_bytes_in_cache))
                    local_class._logger.info('size to delete:',
                                             DiskUtils.sizeof_fmt(
                                                 bytes_of_files_to_delete))
                if bytes_of_files_to_delete > 0:
                    # Order json files by age

                    for cache_file in self._usage_data.get_file_data_by_creation_date():
                        Monitor.throw_exception_if_abort_requested()
                        self._usage_data.remove_file(cache_file)
                        bytes_of_files_to_delete = (
                            self._usage_data.get_disk_used_by_cache()
                            - max_bytes_in_cache)
                        if bytes_of_files_to_delete <= 0:
                            break

        except AbortException:
            reraise(*sys.exc_info())
        except Exception as e:
            local_class._logger.exception('')
    def report_status(self):
        # type: () -> None
        """
            Produces a simple report about the cache using the Settings
            and UsageData.

        :return:
        """
        local_class = CacheData

        try:
            if self._is_trailer_cache:
                msg_max_trailers = 'max allowed trailers:'
                msg_disk_usage = 'max_trailer_cache_disk_usage:'
                msg_cache_percent = 'max percent of trailer cache disk usage:'
                msg_files_in_cache = 'trailers_in_cache:'
                msg_remaining_allowed_files = 'remaining_allowed_trailers:'
                msg_total_size_of_cache_fs = 'Size of trailer cache fs:'
                msg_used_space_in_cache_fs = 'Used space in trailer cache fs:'
                msg_free_space_in_cache_fs = 'free space in trailer cache fs:'
                msg_actual_fs_cache_percent = 'Actual percent of disk used by trailer ' \
                                              'cache:'
                msg_disk_used_by_cache = 'Actual disk used by trailer cache:'

            else:
                msg_max_trailers = 'max number of json files:'
                msg_disk_usage = 'max_json_cache_disk_usage:'
                msg_cache_percent = 'max_percent of json cache disk usage:'
                msg_files_in_cache = 'json_files_in_cache'
                msg_remaining_allowed_files = 'remaining_allowed_json_files'
                msg_total_size_of_cache_fs = 'Size of json cache fs:'
                msg_used_space_in_cache_fs = 'Used space in json cache fs:'
                msg_free_space_in_cache_fs = 'free space in json cache fs:'
                msg_actual_fs_cache_percent = 'Actual percent of disk used by json ' \
                                              'cache:'
                msg_disk_used_by_cache = 'Actual disk used by json cache:'

            if local_class._logger.isEnabledFor(LazyLogger.INFO):
                local_class._logger.info(msg_max_trailers,
                                                self._max_number_of_files_str,
                                                trace=Trace.STATS_CACHE)
                local_class._logger.info(msg_disk_usage,
                                                self._max_cache_size_mb_str,
                                                trace=Trace.STATS_CACHE)
                local_class._logger.info(msg_cache_percent,
                                                self._max_percent_of_cache_disk_str,
                                                trace=Trace.STATS_CACHE)

            files_in_cache = self._usage_data.get_number_of_files()
            if self._is_limit_number_of_cached_files:
                self._remaining_allowed_files = (self._max_number_of_files -
                                                 files_in_cache)
                remaining_allowed_files_str = locale.format("%d",
                                                            self._remaining_allowed_files,
                                                            grouping=True)
            else:
                self._remaining_allowed_files = None
                remaining_allowed_files_str = self._messages.get_msg(
                    Messages.UNLIMITED)

            self._used_space_in_cache_fs = self._usage_data.get_used_space()
            self._free_disk_in_cache_fs = self._usage_data.get_free_size()
            self._total_size_of_cache_fs = self._usage_data.get_total_size()
            self._disk_used_by_cache = self._usage_data.get_disk_used_by_cache()
            self._actual_cache_percent = (self._disk_used_by_cache /
                                          self._total_size_of_cache_fs) * 100.0

            if local_class._logger.isEnabledFor(LazyLogger.INFO):
                local_class._logger.info(msg_total_size_of_cache_fs,
                                   DiskUtils.sizeof_fmt(self._total_size_of_cache_fs))
                local_class._logger.info(msg_used_space_in_cache_fs,
                                   DiskUtils.sizeof_fmt(self._used_space_in_cache_fs))
                local_class._logger.info(msg_free_space_in_cache_fs,
                                   DiskUtils.sizeof_fmt(self._free_disk_in_cache_fs))

                local_class._logger.info(msg_files_in_cache,
                                   locale.format("%d", files_in_cache, grouping=True))
                local_class._logger.info(msg_remaining_allowed_files,
                                   remaining_allowed_files_str)

                local_class._logger.info(msg_actual_fs_cache_percent,
                                   RATIO_DECIMAL_DIGITS_TO_PRINT.format(
                                       self._actual_cache_percent) + '%')

                local_class._logger.info(msg_disk_used_by_cache,
                                   DiskUtils.sizeof_fmt(self._disk_used_by_cache))
        except AbortException:
            reraise(*sys.exc_info())
        except Exception as e:
            local_class._logger.exception('')
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:
        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:
                    if complete:
                        creation_date_str = datetime.datetime.strftime(
                            cls._time_of_index_creation, '%Y:%m:%d')
                    else:
                        creation_date_str = TFHCache.INCOMPLETE_CREATION_DATE_STR
                    cls._cached_trailers[TFHCache.INDEX_CREATION_DATE] = {
                        TFHCache.INDEX_CREATION_DATE: creation_date_str,
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry
                    del cls._cached_trailers['INDEX_CREATION_DATE']
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
Пример #21
0
    def save_unprocessed_movie_cache(cls, flush: bool = False) -> None:
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                (cls._unprocessed_movie_changes < 10)
                    and ((datetime.datetime.now() -
                          cls._last_saved_unprocessed_movie_timestamp)) <
                    datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tmdb_unprocessed_movies.json')
                path = xbmcvfs.validatePath(path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                # Don't save unneeded fields. Takes up disk and RAM

                temp_entries = {}
                for tmdb_id, entry in cls.get_unprocessed_movies().items():
                    temp_entry = {}
                    for key in Movie.TMDB_PAGE_DATA_FIELDS:
                        temp_entry[key] = entry[key]
                    temp_entries[tmdb_id] = temp_entry

                json_text = json.dumps(temp_entries,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=CacheIndex.handler,
                                       indent=3,
                                       sort_keys=True)

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cache_file:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    cache_file.write(json_text)
                    cache_file.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

                del json_text
                del temp_entries
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
Пример #22
0
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:

        Typical json entry
        Items marked with * are kodi/TMDb artifacts

            "BZwDpOQNgpw": {
              "adult": false,
              "cast": [],
              "fanart": "default_fanart",
              "genre": [],
              "mpaa": "NR",
              "normalized_trailer": "/home/fbacher/.kodi/userdata/addon_data/script.video.randomtrailers/cache/hB/tfh_BZwDpOQNgpw_normalized_Larry Karaszewski on SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017)-movie.mkv",
              "original_language": "",
              "plot": "But wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\nBut wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\niTunes: http://itunes.trailersfromhell.com\nSpotify: http://spotify.trailersfromhell.com\nLibsyn: http://podcast.trailersfromhell.com\nGoogle Play: http://googleplay.trailersfromhell.com\nRSS: http://goo.gl/3faeG7\n\nAs always, you can find more commentary, more reviews, more podcasts, and more deep-dives into the films you don't know you love yet over at the Trailers From Hell mothership: \n\nhttp://www.trailersfromhell.com",
              "rating": 4.8974357,
              "genre": [],
              "rts.actors": "",
              "rts.certification": "Unrated",
              "rts.certificationImage": "ratings/us/unrated.png",
              "rts.directors": "",
              "rts.genres": "",
              "rts.runtime": "143 [B]Minutes[/B] - ",
              "rts.studios": "",
              "rts.tfhId": "BZwDpOQNgpw",
              "rts.tfh_title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "rts.title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017) - TFH ",
              "rts.tmdb_id_not_found": true,
              "rts.voiced.actors": [],
              "rts.voiced.directors": [],
              "rts.voiced.studios": [],
              "rts.voiced.writers": [],
              "rts.writers": "",
              "rts.youtube.trailers_in_index": 1449,
              "rts.youtube_index": 204,
              "runtime": 8580,
              "source": "TFH",
              "studio": [
                 []
              ],
              "tags": [
                 "smokey and the bandit 3",
                 "larry karaszewski",
                 "jackie gleason"
              ],
              "thumbnail": "https://i.ytimg.com/vi_webp/BZwDpOQNgpw/maxresdefault.webp",
              "title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "trailer": "https://youtu.be/BZwDpOQNgpw",
              "trailerDiscoveryState": "04_discoveryReadyToDisplay",
              "trailerPlayed": true,
              "trailerType": "default_trailerType",
              "uniqueid": {
                 "tmdb": "None"
              },
              "writer": [
                 []
              ],
              "year": 2017
           }

        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:

                    if complete:
                        cls.set_creation_date()
                        # Set to True when complete, but don't set to False
                        # when not complete.

                        cls._cache_complete = True

                    creation_date_str = datetime.datetime.strftime(
                        cls._time_of_index_creation, '%Y:%m:%d')

                    cls._cached_trailers[cls.INDEX_CREATION_DATE] = {
                        cls.INDEX_CREATION_DATE: creation_date_str,
                        cls.CACHE_COMPLETE: cls._cache_complete
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry from local dict
                    del cls._cached_trailers[cls.INDEX_CREATION_DATE]
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
Пример #23
0
    def save_cache(cls, ignore_shutdown=False) -> None:
        """

        :return:
        """
        cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
        with cls.lock:
            if cls.tmdb_unsaved_changes == 0 and cls.library_unsaved_changes == 0:
                return
            if cls.tmdb_unsaved_changes > 0:
                entries_to_delete = []
                for key, entry in cls._all_missing_tmdb_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.UNIQUE_ID_TMDB] in cls._all_missing_tmdb_trailers:
                            entries_to_delete.append(
                                entry[Movie.UNIQUE_ID_TMDB])
                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_tmdb_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:
                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json.temp')

                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(temp_path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)
                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:
                        json_text = \
                            json.dumps(cls._all_missing_tmdb_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace index of movies'
                                              f' missing trailers cache: {path}')

                    cls.tmdb_last_save = datetime.datetime.now()
                    cls.tmdb_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass

            cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
            if cls.library_unsaved_changes > 0:
                entries_to_delete = []

                for key, entry in cls._all_missing_library_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.MOVIEID] in cls._all_missing_library_trailers:
                            entries_to_delete.append(entry[Movie.MOVIEID])

                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_library_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:

                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json.temp')
                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)

                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:

                        # TODO: Need ability to interrupt when ABORT. Object_handler
                        # not a valid arg to dumps

                        json_text = \
                            json.dumps(cls._all_missing_library_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()

                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace missing trailer'
                                              f' information cache: {path}')

                    cls.library_last_save = datetime.datetime.now()
                    cls.library_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass