def __init__(
            self,
            key='',  # type: str
            total_pages=0,  # type: int
            query_by_year=False  # type: bool
    ):
        # type: (...) -> None
        """

        :param key:
        :param total_pages:
        """
        self._logger = module_logger.getChild(type(self).__name__)
        self._number_of_unsaved_changes = 0
        self._time_of_last_save = None
        self._key = key
        self._total_pages = total_pages
        self._total_pages_by_year = {}
        self._query_by_year = query_by_year
        self._years_to_query = None
        self._search_pages_configured = False
        self._logger.debug('remote_db_cache_path:',
                           Settings.get_remote_db_cache_path())
        self._path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                  f'tmdb_{key}.json')
        self._temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                       'index', f'tmdb_{key}.json.tmp')
        # type:
        self._cached_page_by_key: Optional[Dict[str, CachedPage]] = None
Ejemplo n.º 2
0
    def load_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(),
                            'index', 'missing_tmdb_trailers.json')
        path = xbmcvfs.validatePath(path)
        cls.abort_on_shutdown()
        with cls.lock:
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_tmdb_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_tmdb_trailers)
                        Statistics.missing_tmdb_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                cls._logger.exception('')

            cls.abort_on_shutdown()
            path = os.path.join(Settings.get_remote_db_cache_path(),
                                'index', 'missing_library_trailers.json')
            path = xbmcvfs.validatePath(path)
            try:
                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)
                if os.path.exists(path):
                    with io.open(path, mode='rt',
                                           newline=None,
                                           encoding='utf-8') as cacheFile:
                        cls._all_missing_library_trailers = json.load(
                            cacheFile, encoding='utf-8',
                            object_hook=TrailerUnavailableCache.datetime_parser)
                        size = len(cls._all_missing_library_trailers)
                        Statistics.missing_library_trailers_initial_size(size)
            except AbortException:
                reraise(*sys.exc_info())
            except JSONDecodeError as e:
                os.remove(path)
            except IOError as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')

        pass
    def save_cache(cls):
        # type: () -> None
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        with CacheIndex.lock:
            try:
                Monitor.throw_exception_if_abort_requested()

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:
                    json_text = cls.to_json()
                    cacheFile.write(json_text)
                    cacheFile.flush()
            except AbortException:
                reraise(*sys.exc_info())
            except (IOError) as e:
                cls._logger.exception('')
            except Exception as e:
                cls._logger.exception('')
    def load_unprocessed_movie_cache(cls):
        # type: () -> None
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_unprocessed_movies.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)
            with CacheIndex.lock:
                if os.path.exists(path):
                    with io.open(path,
                                 mode='rt',
                                 newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._unprocessed_movies = json.load(
                            cacheFile,
                            encoding='utf-8',
                            object_hook=CacheIndex.datetime_parser)
                        cls.last_saved_movie_timestamp = None
                        cls._unprocessed_movie_changes = 0
                else:
                    cls._unprocessed_movies = {}
            Monitor.throw_exception_if_abort_requested()
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def save_unprocessed_movie_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                    (cls._unprocessed_movie_changes < 10)
                    and
                    (datetime.datetime.now() - \
                     cls._last_saved_unprocessed_movie_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_unprocessed_movies.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)

            try:
                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cacheFile:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    json_text = json.dumps(cls.get_unprocessed_movies(),
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=CacheIndex.handler,
                                           indent=3,
                                           sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
    def get_stats_for_caches(self) -> Dict[str, UsageData]:
        """
            Get disk usage information for the caches.
            Returns a map of UsageData for each cache. Primarily used
            by garbage collection and reporting.

        :return:
        """
        local_class = CacheManager
        TRAILER_PATTERN = re.compile(r'^.*-trailer\..*$')
        JSON_PATTERN = re.compile(r'^.*\.json$')
        TFH_PATTERN = re.compile(r'^.*-movie\..*$')

        TRAILER_TYPE = 'trailer'
        JSON_TYPE = 'json'

        # When the Trailer Cache and Data Cache (.json) are the same

        if (Settings.get_downloaded_trailer_cache_path() ==
                Settings.get_remote_db_cache_path()):
            usage_data_map = DiskUtils.get_stats_for_path(
                Settings.get_downloaded_trailer_cache_path(), {
                    'trailer': (TRAILER_PATTERN, TRAILER_TYPE),
                    'json': (JSON_PATTERN, JSON_TYPE),
                    'tfh': (TFH_PATTERN, TRAILER_TYPE)
                })
        else:
            # When Trailer Cache and Data Cache are different directories.

            usage_data_map = DiskUtils.get_stats_for_path(
                Settings.get_downloaded_trailer_cache_path(), {
                    'trailer': (TRAILER_PATTERN, TRAILER_TYPE),
                    'tfh': (TFH_PATTERN, TRAILER_TYPE)
                })
            json_usage_data = DiskUtils.get_stats_for_path(
                Settings.get_remote_db_cache_path(),
                {'json': (JSON_PATTERN, JSON_TYPE)})
            usage_data_map['json'] = json_usage_data['json']

        return usage_data_map
    def read_cached_value_from_disk(cls):
        # type: () -> CacheParameters
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        saved_preferences = None
        with CacheIndex.lock:
            try:
                if not os.access(path, os.R_OK):
                    cls._logger.error(
                        Messages.get_formatted_msg(Messages.CAN_NOT_READ_FILE,
                                                   path))
                    return None

                file_mod_time = datetime.datetime.fromtimestamp(
                    os.path.getmtime(path))
                now = datetime.datetime.now()
                expiration_time = now - datetime.timedelta(
                    Settings.get_expire_remote_db_cache_entry_days())

                if file_mod_time < expiration_time:
                    if cls._logger.isEnabledFor(LazyLogger.DEBUG):
                        cls._logger.debug('cache file EXPIRED for:', path)
                    return None

                Monitor.throw_exception_if_abort_requested()

                with io.open(path, mode='rt', newline=None,
                             encoding='utf-8') as cacheFile:
                    saved_preferences = json.load(cacheFile, encoding='utf-8')
                    saved_preferences = CacheParameters(saved_preferences)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
                exception_occurred = True
            except Exception as e:
                cls._logger.exception('')
                exception_occurred = True

        return saved_preferences
    def save_found_trailer_ids_cache(cls, flush=False):
        # type: (bool) -> None
        """
        :param flush:
        :return:
        """
        with cls.lock:
            if cls._unsaved_trailer_changes == 0:
                return

            if (not flush and (cls._unsaved_trailer_changes <
                               Constants.TRAILER_CACHE_FLUSH_UPDATES) and
                (datetime.datetime.now() - cls._last_saved_trailer_timestamp) <
                    datetime.timedelta(minutes=5)):
                return

            path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                                'tmdb_found_trailers.json')
            path = xbmcvfs.validatePath(path)
            parent_dir, file_name = os.path.split(path)
            if not os.path.exists(parent_dir):
                DiskUtils.create_path_if_needed(parent_dir)
                try:
                    with io.open(
                            path,
                            mode='wt',
                            newline=None,
                            encoding='utf-8',
                    ) as cacheFile:
                        found_trailer_id_list = list(
                            cls._found_tmdb_trailer_ids)
                        json_text = json.dumps(found_trailer_id_list,
                                               encoding='utf-8',
                                               ensure_ascii=False,
                                               default=CacheIndex.handler,
                                               indent=3,
                                               sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                        cls._last_saved_trailer_timestamp = datetime.datetime.now(
                        )
                        cls._unsaved_trailer_changes = 0

                    Monitor.throw_exception_if_abort_requested()
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    CacheIndex.logger().exception('')
                except Exception as e:
                    CacheIndex.logger().exception('')
    def load_cache(cls) -> None:
        """

        :return: True if cache is full and no further discovery needed
        """
        with cls.lock:
            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')
                path = xbmcvfs.validatePath(path)

                parent_dir, file_name = os.path.split(path)
                DiskUtils.create_path_if_needed(parent_dir)

                if os.path.exists(path):
                    with io.open(path, mode='rt', newline=None,
                                 encoding='utf-8') as cacheFile:
                        cls._cached_trailers = json.load(
                            cacheFile,
                            # object_hook=TFHCache.abort_checker,
                        )
                        cls.last_saved_movie_timestamp = None
                        cls._unsaved_trailer_changes = 0
                        cls.load_creation_date()
                else:
                    cls._cached_trailers = dict()
                    # Set to an old time so that cache is expired
                    cls._time_of_index_creation = datetime.datetime(2000, 1, 1)
                    cls._index_complete = False

            except IOError as e:
                TFHCache.logger().exception('')
            except JSONDecodeError as e:
                os.remove(path)
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
        return
    def load_found_trailer_cache(cls) -> None:
        """

        :return:
        """
        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_found_trailers.json')
        path = xbmcvfs.validatePath(path)
        try:
            parent_dir, file_name = os.path.split(path)
            DiskUtils.create_path_if_needed(parent_dir)

            if os.path.exists(path):
                with CacheIndex.lock, io.open(path,
                                              mode='rt',
                                              newline=None,
                                              encoding='utf-8') as cacheFile:
                    found_trailers_list = json.load(
                        cacheFile,
                        encoding='utf-8',
                        object_hook=CacheIndex.datetime_parser)
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._found_tmdb_trailer_ids: Set[int] = set(
                        found_trailers_list)
                    cls._unsaved_trailer_changes = 0
            else:
                cls._found_tmdb_trailer_ids: Set[int] = set()

            Monitor.throw_exception_if_abort_requested()
            cls.remove_unprocessed_movies(list(cls._found_tmdb_trailer_ids))
        except AbortException:
            reraise(*sys.exc_info())
        except IOError as e:
            CacheIndex.logger().exception('')
        except JSONDecodeError as e:
            os.remove(path)
        except Exception as e:
            CacheIndex.logger().exception('')
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:
        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:
                    if complete:
                        creation_date_str = datetime.datetime.strftime(
                            cls._time_of_index_creation, '%Y:%m:%d')
                    else:
                        creation_date_str = TFHCache.INCOMPLETE_CREATION_DATE_STR
                    cls._cached_trailers[TFHCache.INDEX_CREATION_DATE] = {
                        TFHCache.INDEX_CREATION_DATE: creation_date_str,
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry
                    del cls._cached_trailers['INDEX_CREATION_DATE']
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()
    def get_stats_for_path(
            cls, top: str, patterns: Dict[str,
                                          Tuple[Pattern[str],
                                                str]]) -> Dict[str, UsageData]:
        """
            Gets disk usage information for a subtree of the filesystem

        :param top:
        :param patterns:
        :return:
        """
        usage_data = None
        fileMap = {}

        usage_data_map = {}
        try:
            free = 0
            total = 0
            used = 0
            size_on_disk = 0
            block_size = None
            # Available in Python >= 3.3
            # shutil.disk_usage(top)
            # _ntuple_diskusage = collections.namedtuple('usage', 'total used free')

            # units in bytes
            disk_usage = cls.disk_usage(top)

            if disk_usage is not None:
                free = disk_usage['free']
                total = disk_usage['total']
                used = disk_usage['used']
                block_size = disk_usage['blocksize']

            #statvfs = os.statvfs(top)
            #block_size = statvfs.f_bsize
            #free = int(statvfs.f_bavail * statvfs.f_frsize / megaByte)
            #total = int(statvfs.f_blocks * statvfs.f_frsize / megaByte)
            # used = int((statvfs.f_blocks - statvfs.f_bfree) *
            #           statvfs.f_frsize / megaByte)
            # st.f_blocks is # blocks in filesystem
            # f_bavail free blocks for non-super user
            # f_bsize # preferred block size
            # f_frsize # fundamental file system block size
            # f_blocks total blocks in filesystem
            # f_bfree total # free blocks in filesystem

            for cache_name, (pattern, cache_type) in patterns.items():
                usage_data = UsageData(cache_name, pattern)
                usage_data.set_free_size(free)
                usage_data.set_total_size(total)
                usage_data.set_used_space(used)
                usage_data.set_block_size(block_size)
                usage_data_map[cache_name] = usage_data

            db_cache_file_expiration_days = \
                Settings.get_expire_remote_db_cache_entry_days()
            db_cache_file_expiration_seconds =\
                db_cache_file_expiration_days * 24 * 60 * 60
            db_cache_path_top = Settings.get_remote_db_cache_path()

            trailer_cache_file_expiration_days = Settings.get_expire_trailer_cache_days(
            )
            trailer_cache_file_expiration_seconds = \
                trailer_cache_file_expiration_days * 24 * 60 * 60
            trailer_cache_path_top = Settings.get_downloaded_trailer_cache_path(
            )
            now = datetime.datetime.now()

            found_directories = set()
            for root, dirs, files in os.walk(top):
                for filename in files:
                    for cache_name, (pattern, cache_type) in patterns.items():
                        Monitor.throw_exception_if_abort_requested()
                        usage_data = usage_data_map[cache_name]
                        if pattern.match(filename):
                            path = os.path.join(root, filename)
                            mod_time = now
                            try:
                                if not os.path.isdir(path):
                                    st = os.stat(path)
                                    mod_time = datetime.datetime.fromtimestamp(
                                        st.st_mtime)
                                    size_in_blocks = st.st_size
                                    size_on_disk = (
                                        (size_in_blocks - 1) / block_size +
                                        1) * block_size
                                else:
                                    found_directories.add(path)
                            except OSError as e:
                                continue  # File doesn't exist
                            except Exception as e:
                                cls._logger.exception('')
                                continue

                            deleted = False
                            try:
                                if (top == db_cache_path_top
                                        and cache_type == 'json'):
                                    if ((now - mod_time).total_seconds() >
                                            db_cache_file_expiration_seconds):
                                        if cls._logger.isEnabledFor(
                                                LazyLogger.INFO):
                                            cls._logger.info('deleting:', path)
                                        os.remove(path)
                                        deleted = True
                                        usage_data.add_to_disk_deleted(
                                            size_on_disk)
                                    break  # Next file

                                if (top == trailer_cache_path_top
                                        and cache_type == 'trailer'):
                                    if ((now - mod_time).total_seconds(
                                    ) > trailer_cache_file_expiration_seconds):
                                        if cls._logger.isEnabledFor(
                                                LazyLogger.INFO):
                                            cls._logger.info('deleting:', path)
                                        os.remove(path)
                                        deleted = True
                                        usage_data.add_to_disk_deleted(
                                            size_on_disk)
                                    break  # Next file

                            except AbortException:
                                reraise(*sys.exc_info())
                            except Exception as e:
                                cls._logger.exception('')

                            if not deleted:
                                file_data = FileData(path, mod_time,
                                                     size_on_disk)
                                usage_data.add_file_data(file_data)
                                usage_data.add_to_disk_used_by_cache(
                                    size_on_disk)

            for directory in found_directories:
                try:
                    os.rmdir(directory)
                except Exception as e:
                    pass

        except AbortException:
            reraise(*sys.exc_info())

        except Exception as e:
            cls._logger.exception('')

        cls._logger.exit()
        return usage_data_map
Ejemplo n.º 13
0
    def save_cache(cls, ignore_shutdown=False) -> None:
        """

        :return:
        """
        cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
        with cls.lock:
            if cls.tmdb_unsaved_changes == 0 and cls.library_unsaved_changes == 0:
                return
            if cls.tmdb_unsaved_changes > 0:
                entries_to_delete = []
                for key, entry in cls._all_missing_tmdb_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.UNIQUE_ID_TMDB] in cls._all_missing_tmdb_trailers:
                            entries_to_delete.append(
                                entry[Movie.UNIQUE_ID_TMDB])
                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_tmdb_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:
                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_tmdb_trailers.json.temp')

                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(temp_path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)
                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:
                        json_text = \
                            json.dumps(cls._all_missing_tmdb_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()
                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace index of movies'
                                              f' missing trailers cache: {path}')

                    cls.tmdb_last_save = datetime.datetime.now()
                    cls.tmdb_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass

            cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
            if cls.library_unsaved_changes > 0:
                entries_to_delete = []

                for key, entry in cls._all_missing_library_trailers.items():
                    elapsed_time = datetime.date.today() - entry['timestamp']
                    elapsed_days = elapsed_time.days
                    if elapsed_days > Settings.get_expire_remote_db_trailer_check_days():
                        if entry[Movie.MOVIEID] in cls._all_missing_library_trailers:
                            entries_to_delete.append(entry[Movie.MOVIEID])

                for entry_to_delete in entries_to_delete:
                    del cls._all_missing_library_trailers[entry_to_delete]

                cls.abort_on_shutdown(ignore_shutdown=ignore_shutdown)
                try:

                    path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json')
                    temp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'missing_library_trailers.json.temp')
                    # path = path.encode('utf-8')
                    path = xbmcvfs.validatePath(path)
                    temp_path = xbmcvfs.validatePath(temp_path)

                    parent_dir, file_name = os.path.split(path)
                    if not os.path.exists(parent_dir):
                        DiskUtils.create_path_if_needed(parent_dir)

                    with io.open(temp_path, mode='wt', newline=None,
                                 encoding='utf-8', ) as cacheFile:

                        # TODO: Need ability to interrupt when ABORT. Object_handler
                        # not a valid arg to dumps

                        json_text = \
                            json.dumps(cls._all_missing_library_trailers,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=TrailerUnavailableCache.handler,
                                       indent=3, sort_keys=True)
                        cacheFile.write(json_text)
                        cacheFile.flush()

                    try:
                        os.replace(temp_path, path)
                    except OSError:
                        cls._logger.exception(f'Failed to replace missing trailer'
                                              f' information cache: {path}')

                    cls.library_last_save = datetime.datetime.now()
                    cls.library_unsaved_changes = 0
                except AbortException:
                    reraise(*sys.exc_info())
                except IOError as e:
                    cls._logger.exception('')
                except Exception as e:
                    cls._logger.exception('')
                finally:
                    try:
                        os.remove(temp_path)
                    except Exception:
                        pass
Ejemplo n.º 14
0
    def save_unprocessed_movie_cache(cls, flush: bool = False) -> None:
        """
        :param flush:
        :return:
        """

        #  TODO: Should use lock here, review locking
        with cls.lock:
            if cls._unprocessed_movie_changes == 0:
                return

            if (not flush and
                    # Constants.TRAILER_CACHE_FLUSH_UPDATES)
                (cls._unprocessed_movie_changes < 10)
                    and ((datetime.datetime.now() -
                          cls._last_saved_unprocessed_movie_timestamp)) <
                    datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tmdb_unprocessed_movies.json')
                path = xbmcvfs.validatePath(path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                # Don't save unneeded fields. Takes up disk and RAM

                temp_entries = {}
                for tmdb_id, entry in cls.get_unprocessed_movies().items():
                    temp_entry = {}
                    for key in Movie.TMDB_PAGE_DATA_FIELDS:
                        temp_entry[key] = entry[key]
                    temp_entries[tmdb_id] = temp_entry

                json_text = json.dumps(temp_entries,
                                       encoding='utf-8',
                                       ensure_ascii=False,
                                       default=CacheIndex.handler,
                                       indent=3,
                                       sort_keys=True)

                with io.open(
                        path,
                        mode='wt',
                        newline=None,
                        encoding='utf-8',
                ) as cache_file:

                    # TODO: Need ability to interrupt when ABORT. Object_handler
                    # not a valid arg to dumps

                    cache_file.write(json_text)
                    cache_file.flush()
                    cls._last_saved_unprocessed_movie_timestamp = datetime.datetime.now(
                    )
                    cls._unprocessed_movie_changes = 0

                    Monitor.throw_exception_if_abort_requested()

                del json_text
                del temp_entries
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls.logger().exception('')
            except Exception as e:
                cls.logger().exception('')
Ejemplo n.º 15
0
    def get_json_cache_file_path_for_movie_id(cls, movie_id,  # type: Union[int, str]
                                              source,  # type: str
                                              error_msg=''  # type: str
                                              ):
        # type: (...) -> Union[str, None]
        """
            Returns the path for a cache JSON file for the given movie_id
            and source.

        :param movie_id:
        :param source:
        :param error_msg: Optional text to add to any error message.
                    Typically a movie title.
        :return:
        """
        try:
            prefix = Cache.generate_unique_id_from_source(movie_id, source,
                                                          error_msg=error_msg)
            # if cls._logger.isEnabledFor(LazyLogger.DEBUG):
            #     cls._logger.debug('movie_id:', movie_id, 'source:', source,
            #                        'prefix:', prefix)
            #
            # To reduce clutter, put cached data into a folder named after the
            # SOURCE and first character of the id
            #
            # For local library entries, just use the first digit from the
            # numeric id.

            if source == Movie.LIBRARY_SOURCE:
                folder = prefix[0]
            elif source == Movie.TMDB_SOURCE:
                #
                # For TMDB entries, the numeric TMDB id is prefaced with:
                # "tmdb_". Use a folder named "t" + first digit of TMDBID
                #
                x = prefix.split('_', 1)
                folder = 't' + x[1][0]
            elif source == Movie.TFH_SOURCE:
                #
                # For TFH entries, the numeric TFH id is prefaced with:
                # "tfh_". Use a folder named "h" + first digit of TFH
                #
                x = prefix.split('_', 1)
                folder = 'h' + x[1][0]
            elif source == Movie.ITUNES_SOURCE:
                #
                # For ITunes entries, Apple does not supply an ID, so we
                # use the TMDB ID instead if we can find it. (A lot of these are
                # for very new or unreleased movies.)
                #
                # The TMDB id here is prefaced with: "appl_". Use a folder named
                # "a" + first digit of TMDBID.

                x = prefix.split('_', 1)
                folder = 'a' + x[1][0]
            else:
                cls._logger.debug('Unexpected source:', source,
                                  'movie_id:', movie_id)
                return None

            cache_file = prefix + '.json'
            path = os.path.join(Settings.get_remote_db_cache_path(),
                                folder, cache_file)
            path = xbmcvfs.validatePath(path)

            return path
        except AbortException:
            reraise(*sys.exc_info())
        except Exception as e:
            cls._logger.exception('')
        return None
Ejemplo n.º 16
0
    def save_cache(cls, flush: bool = False, complete: bool = False) -> None:
        """
        :param flush:
        :param complete:
        :return:

        Typical json entry
        Items marked with * are kodi/TMDb artifacts

            "BZwDpOQNgpw": {
              "adult": false,
              "cast": [],
              "fanart": "default_fanart",
              "genre": [],
              "mpaa": "NR",
              "normalized_trailer": "/home/fbacher/.kodi/userdata/addon_data/script.video.randomtrailers/cache/hB/tfh_BZwDpOQNgpw_normalized_Larry Karaszewski on SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017)-movie.mkv",
              "original_language": "",
              "plot": "But wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\nBut wait! There's more! TFH has a podcast! \n\nIt's THE MOVIES THAT MADE ME, where you can join Oscar-nominated screenwriter Josh Olson and his, ummm, \"co-host\" Joe Dante in conversation with filmmakers, comedians, and all-around interesting people about the movies that made them who they are. Check it out now, and please subscribe wherever podcasts can be found.\n\niTunes: http://itunes.trailersfromhell.com\nSpotify: http://spotify.trailersfromhell.com\nLibsyn: http://podcast.trailersfromhell.com\nGoogle Play: http://googleplay.trailersfromhell.com\nRSS: http://goo.gl/3faeG7\n\nAs always, you can find more commentary, more reviews, more podcasts, and more deep-dives into the films you don't know you love yet over at the Trailers From Hell mothership: \n\nhttp://www.trailersfromhell.com",
              "rating": 4.8974357,
              "genre": [],
              "rts.actors": "",
              "rts.certification": "Unrated",
              "rts.certificationImage": "ratings/us/unrated.png",
              "rts.directors": "",
              "rts.genres": "",
              "rts.runtime": "143 [B]Minutes[/B] - ",
              "rts.studios": "",
              "rts.tfhId": "BZwDpOQNgpw",
              "rts.tfh_title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "rts.title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3) (2017) - TFH ",
              "rts.tmdb_id_not_found": true,
              "rts.voiced.actors": [],
              "rts.voiced.directors": [],
              "rts.voiced.studios": [],
              "rts.voiced.writers": [],
              "rts.writers": "",
              "rts.youtube.trailers_in_index": 1449,
              "rts.youtube_index": 204,
              "runtime": 8580,
              "source": "TFH",
              "studio": [
                 []
              ],
              "tags": [
                 "smokey and the bandit 3",
                 "larry karaszewski",
                 "jackie gleason"
              ],
              "thumbnail": "https://i.ytimg.com/vi_webp/BZwDpOQNgpw/maxresdefault.webp",
              "title": "SMOKEY IS THE BANDIT (SMOKEY AND THE BANDIT PART 3)",
              "trailer": "https://youtu.be/BZwDpOQNgpw",
              "trailerDiscoveryState": "04_discoveryReadyToDisplay",
              "trailerPlayed": true,
              "trailerType": "default_trailerType",
              "uniqueid": {
                 "tmdb": "None"
              },
              "writer": [
                 []
              ],
              "year": 2017
           }

        """
        with cls.lock:
            if (not flush and
                    (cls._unsaved_trailer_changes < 50)
                    and
                    (datetime.datetime.now() - cls._last_saved_trailer_timestamp)
                    < datetime.timedelta(minutes=5)):
                return

            try:
                path = os.path.join(Settings.get_remote_db_cache_path(),
                                    'index', 'tfh_trailers.json')

                path = xbmcvfs.validatePath(path)
                tmp_path = os.path.join(Settings.get_remote_db_cache_path(),
                                        'index', 'tfh_trailers.json.tmp')

                tmp_path = xbmcvfs.validatePath(tmp_path)
                parent_dir, file_name = os.path.split(path)
                if not os.path.exists(parent_dir):
                    DiskUtils.create_path_if_needed(parent_dir)

                Monitor.throw_exception_if_abort_requested()
                with io.open(tmp_path, mode='at', newline=None,
                             encoding='utf-8', ) as cacheFile:

                    if complete:
                        cls.set_creation_date()
                        # Set to True when complete, but don't set to False
                        # when not complete.

                        cls._cache_complete = True

                    creation_date_str = datetime.datetime.strftime(
                        cls._time_of_index_creation, '%Y:%m:%d')

                    cls._cached_trailers[cls.INDEX_CREATION_DATE] = {
                        cls.INDEX_CREATION_DATE: creation_date_str,
                        cls.CACHE_COMPLETE: cls._cache_complete
                    }

                    json_text = json.dumps(cls._cached_trailers,
                                           encoding='utf-8',
                                           ensure_ascii=False,
                                           default=TFHCache.abort_checker,
                                           indent=3, sort_keys=True)
                    cacheFile.write(json_text)
                    cacheFile.flush()

                    # Get rid of dummy entry from local dict
                    del cls._cached_trailers[cls.INDEX_CREATION_DATE]
                    cls._last_saved_trailer_timestamp = datetime.datetime.now()
                    cls._unsaved_trailer_changes = 0

                try:
                    os.replace(tmp_path, path)
                except OSError:
                    cls._logger.exception(f'Failed to replace missing trailer'
                                          f' information cache: {path}')
            except IOError as e:
                TFHCache.logger().exception('')
            except Exception as e:
                TFHCache.logger().exception('')

        Monitor.throw_exception_if_abort_requested()