def load_unprocessed_movie_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_unprocessed_movies.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)
            with CacheIndex.lock:
                if os.path.exists(path):
                    with io.open(path,
                                 mode='rt',
                                 newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._unprocessed_movies = json.load(
                            cacheFile,
                            encoding='utf-8',
                            object_hook=CacheIndex.datetime_parser)
                        cls.last_saved_movie_timestamp = None
                        cls._unprocessed_movie_changes = 0
                else:
                    cls._unprocessed_movies = {}
            Monitor.throw_exception_if_abort_requested()
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def save_cache(cls):
        # type: () -> None
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        with CacheIndex.lock:
            try:
                Monitor.throw_exception_if_abort_requested()

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:
                    json_text = cls.to_json()
                    cacheFile.write(json_text)
                    cacheFile.flush()
            except AbortException:
                reraise(*sys.exc_info())
            except (IOError) as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')
示例#3
0
    def load_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(),
                            'index', 'missing_tmdb_trailers.json')
        path = xbmcvfs.validatePath(path)
        cls.abort_on_shutdown()
        with cls.lock:
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_tmdb_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_tmdb_trailers)
                        Statistics.missing_tmdb_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                cls._logger.exception('')

            cls.abort_on_shutdown()
            path = os.path.join(Settings.get_remote_db_cache_path(),
                                'index', 'missing_library_trailers.json')
            path = xbmcvfs.validatePath(path)
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)
                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_library_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_library_trailers)
                        Statistics.missing_library_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except JSONDecodeError as e:
                os.remove(path)
            except IOError as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')

        pass
    def save_unprocessed_movie_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                    (cls._unprocessed_movie_changes < 10)
                    and
                    (datetime.datetime.now() - \
                     cls._last_saved_unprocessed_movie_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_unprocessed_movies.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)

            try:
                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    json_text = json.dumps(cls.get_unprocessed_movies(),
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=CacheIndex.handler,
                                           indent=3,
                                           sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
    def save_search_pages(self, flush=False):
        # type: (bool) -> None
        """

        :return:
        """
        with CacheIndex.lock:
            if (not flush and self.get_number_of_unsaved_changes() <
                    Constants.TRAILER_CACHE_FLUSH_UPDATES
                    and self.get_time_since_last_save() <
                    datetime.timedelta(minutes=5)):
                return
            saved_pages = len(self._cached_page_by_key.items())
            path = xbmcvfs.validatePath(self._path)
            temp_path = xbmcvfs.validatePath(self._temp_path)
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(temp_path,
                             mode='wt',
                             newline=None,
                             encoding='utf-8') as cacheFile:
                    json_dict = self.to_json()

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    json_text = json.dumps(json_dict,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=CacheIndex.handler,
                                           indent=3,
                                           sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()
                    self._number_of_unsaved_changes = 0
                    self._time_of_last_save = datetime.datetime.now()

                try:
                    os.replace(temp_path, path)
                except OSError:
                    self._logger.exception(
                        f'Failed to replace move information'
                        f' planned for download: {path}')
                Monitor.throw_exception_if_abort_requested()
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                self._logger.exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                self._logger.exception('')

        self._logger.debug_verbose("Entries Saved: ", saved_pages)
示例#6
0
    def __init__(self, *args, **kwargs):
        # type: (*str, **Any) -> None
        """

        :param args:
        :param kwargs:
        """
        self._logger = module_logger.getChild(self.__class__.__name__)
        self._file = None

        if len(args) == 0:
            self._logger.error('Playlist constructor requires an argument')
            return

        playlist_name = args[0]
        self._playlist_name = playlist_name
        append = kwargs.get('append', True)
        rotate = kwargs.get('rotate', False)
        assert append ^ rotate
        self.playlist_format = kwargs.get('playlist_format', False)

        if self.playlist_format:
            self.path = Constants.PLAYLIST_PATH + '/' + \
                playlist_name + Playlist.SMART_PLAYLIST_SUFFIX
        else:
            self.path = Constants.FRONTEND_DATA_PATH + '/' + \
                playlist_name  # + Playlist.PLAYLIST_SUFFIX
        self.path = xbmcvfs.validatePath(self.path)
        self.path = xbmcvfs.translatePath(self.path)
        DiskUtils.create_path_if_needed(Constants.FRONTEND_DATA_PATH)
        if not self.playlist_format:
            self.mode = 'wt'
            if append:
                self.mode = 'at'
            else:
                self.mode = 'wt'
            if rotate:
                try:
                    save_path = Constants.FRONTEND_DATA_PATH + '/' + playlist_name + '.old'
                    save_path = xbmcvfs.validatePath(save_path)
                    try:
                        if os.path.exists(self.path):
                            os.replace(self.path, save_path)
                    except Exception as e:
                        self._logger.exception('')
                except Exception as e:
                    self._logger.exception('')

            try:
                self._file = io.open(self.path,
                                     mode=self.mode,
                                     buffering=1,
                                     newline=None,
                                     encoding='utf-8')
            except Exception as e:
                self._logger.exception('')
    def read_cached_value_from_disk(cls):
        # type: () -> CacheParameters
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        saved_preferences = None
        with CacheIndex.lock:
            try:
                if not os.access(path, os.R_OK):
                    cls._logger.error(
                        Messages.get_formatted_msg(Messages.CAN_NOT_READ_FILE,
                                                   path))
                    return None

                file_mod_time = datetime.datetime.fromtimestamp(
                    os.path.getmtime(path))
                now = datetime.datetime.now()
                expiration_time = now - datetime.timedelta(
                    Settings.get_expire_remote_db_cache_entry_days())

                if file_mod_time < expiration_time:
                    if cls._logger.isEnabledFor(LazyLogger.DEBUG):
                        cls._logger.debug('cache file EXPIRED for:', path)
                    return None

                Monitor.throw_exception_if_abort_requested()

                with io.open(path, mode='rt', newline=None,
                             encoding='utf-8') as cacheFile:
                    saved_preferences = json.load(cacheFile, encoding='utf-8')
                    saved_preferences = CacheParameters(saved_preferences)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
                exception_occurred = True
            except Exception as e:
                cls._logger.exception('')
                exception_occurred = True

        return saved_preferences
    def save_found_trailer_ids_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """
        with cls.lock:
            if cls._unsaved_trailer_changes == 0:
                return

            if (not flush and (cls._unsaved_trailer_changes <
                               Constants.TRAILER_CACHE_FLUSH_UPDATES) and
                (datetime.datetime.now() - cls._last_saved_trailer_timestamp) <
                    datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_found_trailers.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)
                try:
                    with io.open(
                            path,
                            mode='wt',
                            newline=None,
                            encoding='utf-8',
                    ) as cacheFile:
                        found_trailer_id_list = list(
                            cls._found_tmdb_trailer_ids)
                        json_text = json.dumps(found_trailer_id_list,
                                               encoding='utf-8',
                                               ensure_ascii=False,
                                               default=CacheIndex.handler,
                                               indent=3,
                                               sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                        cls._last_saved_trailer_timestamp = datetime.datetime.now(
                        )
                        cls._unsaved_trailer_changes = 0

                    Monitor.throw_exception_if_abort_requested()
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    CacheIndex.logger().exception('')
                except Exception as e:
                    CacheIndex.logger().exception('')
示例#9
0
    def write_tmdb_cache_json(cls, movie_id: Union[str, int], source: str,
                              movie: MovieType) -> None:
        """
            Write the given movie information into the cache as JSON

            Due to the small size of these files, will not check for
            AbortException during write nor save old version of file.
        """
        try:
            if source is None or source not in Movie.LIB_TMDB_ITUNES_SOURCES:
                cls._logger.debug('Invalid source:', source)
            movie_id = str(movie_id)
            path = Cache.get_json_cache_file_path_for_movie_id(
                movie_id, source)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path) and not os.access(path, os.W_OK):
                messages = Messages
                cls._logger.error(
                    messages.get_msg(Messages.CAN_NOT_WRITE_FILE) % path)
                return None
            temp_movie = {}
            for key in Movie.TMDB_ENTRY_FIELDS:
                temp_movie[key] = movie.get(key)

            Monitor.throw_exception_if_abort_requested()
            with io.open(
                    path,
                    mode='wt',
                    newline=None,
                    encoding='utf-8',
            ) as cacheFile:
                json_text = json.dumps(temp_movie,
                                       ensure_ascii=False,
                                       indent=3,
                                       sort_keys=True)
                cacheFile.write(json_text)
                cacheFile.flush()
                del temp_movie
        except AbortException:
            reraise(*sys.exc_info())
        except Exception as e:
            cls._logger.exception('')
    def load_search_pages(self):
        # type: () -> None
        """

        :return:
        """
        if self._cached_page_by_key is not None:
            return

        path = xbmcvfs.validatePath(self._path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path):
                Monitor.throw_exception_if_abort_requested()
                with CacheIndex.lock, io.open(path,
                                              mode='rt',
                                              newline=None,
                                              encoding='utf-8') as cacheFile:
                    encoded_values = json.load(
                        cacheFile,
                        encoding='utf-8',
                        object_hook=CacheIndex.datetime_parser)
                    loaded_cached_pages_data = self.from_json(encoded_values)
                    self._cached_page_by_key = loaded_cached_pages_data._cached_page_by_key
            else:
                self._cached_page_by_key: Dict[str, CachedPage] = dict()

        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            self._logger.exception('')
        except JSONDecodeError as e:
            os.remove(path)
            self._cached_page_by_key = dict()
        except Exception as e:
            self._logger.exception('')

        self._logger.debug_verbose("Loaded entries:",
                                   len(self._cached_page_by_key))
        self._time_of_last_save = datetime.datetime.now()
    def load_cache(cls) -> None:
        """

        :return: True if cache is full and no further discovery needed
        """
        with cls.lock:
            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')
                path = xbmcvfs.validatePath(path)

                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt', newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._cached_trailers = json.load(
                            cacheFile,
                            # object_hook=TFHCache.abort_checker,
                        )
                        cls.last_saved_movie_timestamp = None
                        cls._unsaved_trailer_changes = 0
                        cls.load_creation_date()
                else:
                    cls._cached_trailers = dict()
                    # Set to an old time so that cache is expired
                    cls._time_of_index_creation = datetime.datetime(2000, 1, 1)
                    cls._index_complete = False

            except IOError as e:
                TFHCache.logger().exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
        return
    def load_found_trailer_cache(cls) -> None:
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_found_trailers.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path):
                with CacheIndex.lock, io.open(path,
                                              mode='rt',
                                              newline=None,
                                              encoding='utf-8') as cacheFile:
                    found_trailers_list = json.load(
                        cacheFile,
                        encoding='utf-8',
                        object_hook=CacheIndex.datetime_parser)
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._found_tmdb_trailer_ids: Set[int] = set(
                        found_trailers_list)
                    cls._unsaved_trailer_changes = 0
            else:
                cls._found_tmdb_trailer_ids: Set[int] = set()

            Monitor.throw_exception_if_abort_requested()
            cls.remove_unprocessed_movies(list(cls._found_tmdb_trailer_ids))
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:
        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:
                    if complete:
                        creation_date_str = datetime.datetime.strftime(
                            cls._time_of_index_creation, '%Y:%m:%d')
                    else:
                        creation_date_str = TFHCache.INCOMPLETE_CREATION_DATE_STR
                    cls._cached_trailers[TFHCache.INDEX_CREATION_DATE] = {
                        TFHCache.INDEX_CREATION_DATE: creation_date_str,
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry
                    del cls._cached_trailers['INDEX_CREATION_DATE']
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
示例#14
0
    def save_cache(cls, ignore_shutdown=False) -> None:
        """

        :return:
        """
        cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
        with cls.lock:
            if cls.tmdb_unsaved_changes == 0 and cls.library_unsaved_changes == 0:
                return
            if cls.tmdb_unsaved_changes > 0:
                entries_to_delete = []
                for key, entry in cls._all_missing_tmdb_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.UNIQUE_ID_TMDB] in cls._all_missing_tmdb_trailers:
                            entries_to_delete.append(
                                entry[Movie.UNIQUE_ID_TMDB])
                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_tmdb_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:
                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json.temp')

                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(temp_path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)
                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:
                        json_text = \
                            json.dumps(cls._all_missing_tmdb_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace index of movies'
                                              f' missing trailers cache: {path}')

                    cls.tmdb_last_save = datetime.datetime.now()
                    cls.tmdb_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass

            cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
            if cls.library_unsaved_changes > 0:
                entries_to_delete = []

                for key, entry in cls._all_missing_library_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.MOVIEID] in cls._all_missing_library_trailers:
                            entries_to_delete.append(entry[Movie.MOVIEID])

                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_library_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:

                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json.temp')
                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)

                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:

                        # TODO: Need ability to interrupt when ABORT. Object_handler
                        # not a valid arg to dumps

                        json_text = \
                            json.dumps(cls._all_missing_library_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()

                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace missing trailer'
                                              f' information cache: {path}')

                    cls.library_last_save = datetime.datetime.now()
                    cls.library_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass
示例#15
0
    def save_unprocessed_movie_cache(cls, flush: bool = False) -> None:
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                (cls._unprocessed_movie_changes < 10)
                    and ((datetime.datetime.now() -
                          cls._last_saved_unprocessed_movie_timestamp)) <
                    datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tmdb_unprocessed_movies.json')
                path = xbmcvfs.validatePath(path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                # Don't save unneeded fields. Takes up disk and RAM

                temp_entries = {}
                for tmdb_id, entry in cls.get_unprocessed_movies().items():
                    temp_entry = {}
                    for key in Movie.TMDB_PAGE_DATA_FIELDS:
                        temp_entry[key] = entry[key]
                    temp_entries[tmdb_id] = temp_entry

                json_text = json.dumps(temp_entries,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=CacheIndex.handler,
                                       indent=3,
                                       sort_keys=True)

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cache_file:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    cache_file.write(json_text)
                    cache_file.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

                del json_text
                del temp_entries
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
示例#16
0
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:

        Typical json entry
        Items marked with * are kodi/TMDb artifacts

            "BZwDpOQNgpw": {
              "adult": false,
              "cast": [],
              "fanart": "default_fanart",
              "genre": [],
              "mpaa": "NR",
              "normalized_trailer": "/home/fbacher/.kodi/userdata/addon_data/script.video.randomtrailers/cache/hB/tfh_BZwDpOQNgpw_normalized_Larry Karaszewski on SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017)-movie.mkv",
              "original_language": "",
              "plot": "But wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\nBut wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\niTunes: http://itunes.trailersfromhell.com\nSpotify: http://spotify.trailersfromhell.com\nLibsyn: http://podcast.trailersfromhell.com\nGoogle Play: http://googleplay.trailersfromhell.com\nRSS: http://goo.gl/3faeG7\n\nAs always, you can find more commentary, more reviews, more podcasts, and more deep-dives into the films you don't know you love yet over at the Trailers From Hell mothership: \n\nhttp://www.trailersfromhell.com",
              "rating": 4.8974357,
              "genre": [],
              "rts.actors": "",
              "rts.certification": "Unrated",
              "rts.certificationImage": "ratings/us/unrated.png",
              "rts.directors": "",
              "rts.genres": "",
              "rts.runtime": "143 [B]Minutes[/B] - ",
              "rts.studios": "",
              "rts.tfhId": "BZwDpOQNgpw",
              "rts.tfh_title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "rts.title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017) - TFH ",
              "rts.tmdb_id_not_found": true,
              "rts.voiced.actors": [],
              "rts.voiced.directors": [],
              "rts.voiced.studios": [],
              "rts.voiced.writers": [],
              "rts.writers": "",
              "rts.youtube.trailers_in_index": 1449,
              "rts.youtube_index": 204,
              "runtime": 8580,
              "source": "TFH",
              "studio": [
                 []
              ],
              "tags": [
                 "smokey and the bandit 3",
                 "larry karaszewski",
                 "jackie gleason"
              ],
              "thumbnail": "https://i.ytimg.com/vi_webp/BZwDpOQNgpw/maxresdefault.webp",
              "title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "trailer": "https://youtu.be/BZwDpOQNgpw",
              "trailerDiscoveryState": "04_discoveryReadyToDisplay",
              "trailerPlayed": true,
              "trailerType": "default_trailerType",
              "uniqueid": {
                 "tmdb": "None"
              },
              "writer": [
                 []
              ],
              "year": 2017
           }

        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:

                    if complete:
                        cls.set_creation_date()
                        # Set to True when complete, but don't set to False
                        # when not complete.

                        cls._cache_complete = True

                    creation_date_str = datetime.datetime.strftime(
                        cls._time_of_index_creation, '%Y:%m:%d')

                    cls._cached_trailers[cls.INDEX_CREATION_DATE] = {
                        cls.INDEX_CREATION_DATE: creation_date_str,
                        cls.CACHE_COMPLETE: cls._cache_complete
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry from local dict
                    del cls._cached_trailers[cls.INDEX_CREATION_DATE]
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()