def read_cached_value_from_disk(cls):
        # type: () -> CacheParameters
        """

        :return:
        """

        path = os.path.join(Settings.get_remote_db_cache_path(), 'index',
                            'tmdb_discovery_parameters.json')
        path = xbmcvfs.validatePath(path)
        parent_dir, file_name = os.path.split(path)
        if not os.path.exists(parent_dir):
            DiskUtils.create_path_if_needed(parent_dir)

        saved_preferences = None
        with CacheIndex.lock:
            try:
                if not os.access(path, os.R_OK):
                    cls._logger.error(
                        Messages.get_formatted_msg(Messages.CAN_NOT_READ_FILE,
                                                   path))
                    return None

                file_mod_time = datetime.datetime.fromtimestamp(
                    os.path.getmtime(path))
                now = datetime.datetime.now()
                expiration_time = now - datetime.timedelta(
                    Settings.get_expire_remote_db_cache_entry_days())

                if file_mod_time < expiration_time:
                    if cls._logger.isEnabledFor(LazyLogger.DEBUG):
                        cls._logger.debug('cache file EXPIRED for:', path)
                    return None

                Monitor.throw_exception_if_abort_requested()

                with io.open(path, mode='rt', newline=None,
                             encoding='utf-8') as cacheFile:
                    saved_preferences = json.load(cacheFile, encoding='utf-8')
                    saved_preferences = CacheParameters(saved_preferences)
            except AbortException:
                reraise(*sys.exc_info())
            except IOError as e:
                cls._logger.exception('')
                exception_occurred = True
            except Exception as e:
                cls._logger.exception('')
                exception_occurred = True

        return saved_preferences
    def get_stats_for_path(
            cls, top: str, patterns: Dict[str,
                                          Tuple[Pattern[str],
                                                str]]) -> Dict[str, UsageData]:
        """
            Gets disk usage information for a subtree of the filesystem

        :param top:
        :param patterns:
        :return:
        """
        usage_data = None
        fileMap = {}

        usage_data_map = {}
        try:
            free = 0
            total = 0
            used = 0
            size_on_disk = 0
            block_size = None
            # Available in Python >= 3.3
            # shutil.disk_usage(top)
            # _ntuple_diskusage = collections.namedtuple('usage', 'total used free')

            # units in bytes
            disk_usage = cls.disk_usage(top)

            if disk_usage is not None:
                free = disk_usage['free']
                total = disk_usage['total']
                used = disk_usage['used']
                block_size = disk_usage['blocksize']

            #statvfs = os.statvfs(top)
            #block_size = statvfs.f_bsize
            #free = int(statvfs.f_bavail * statvfs.f_frsize / megaByte)
            #total = int(statvfs.f_blocks * statvfs.f_frsize / megaByte)
            # used = int((statvfs.f_blocks - statvfs.f_bfree) *
            #           statvfs.f_frsize / megaByte)
            # st.f_blocks is # blocks in filesystem
            # f_bavail free blocks for non-super user
            # f_bsize # preferred block size
            # f_frsize # fundamental file system block size
            # f_blocks total blocks in filesystem
            # f_bfree total # free blocks in filesystem

            for cache_name, (pattern, cache_type) in patterns.items():
                usage_data = UsageData(cache_name, pattern)
                usage_data.set_free_size(free)
                usage_data.set_total_size(total)
                usage_data.set_used_space(used)
                usage_data.set_block_size(block_size)
                usage_data_map[cache_name] = usage_data

            db_cache_file_expiration_days = \
                Settings.get_expire_remote_db_cache_entry_days()
            db_cache_file_expiration_seconds =\
                db_cache_file_expiration_days * 24 * 60 * 60
            db_cache_path_top = Settings.get_remote_db_cache_path()

            trailer_cache_file_expiration_days = Settings.get_expire_trailer_cache_days(
            )
            trailer_cache_file_expiration_seconds = \
                trailer_cache_file_expiration_days * 24 * 60 * 60
            trailer_cache_path_top = Settings.get_downloaded_trailer_cache_path(
            )
            now = datetime.datetime.now()

            found_directories = set()
            for root, dirs, files in os.walk(top):
                for filename in files:
                    for cache_name, (pattern, cache_type) in patterns.items():
                        Monitor.throw_exception_if_abort_requested()
                        usage_data = usage_data_map[cache_name]
                        if pattern.match(filename):
                            path = os.path.join(root, filename)
                            mod_time = now
                            try:
                                if not os.path.isdir(path):
                                    st = os.stat(path)
                                    mod_time = datetime.datetime.fromtimestamp(
                                        st.st_mtime)
                                    size_in_blocks = st.st_size
                                    size_on_disk = (
                                        (size_in_blocks - 1) / block_size +
                                        1) * block_size
                                else:
                                    found_directories.add(path)
                            except OSError as e:
                                continue  # File doesn't exist
                            except Exception as e:
                                cls._logger.exception('')
                                continue

                            deleted = False
                            try:
                                if (top == db_cache_path_top
                                        and cache_type == 'json'):
                                    if ((now - mod_time).total_seconds() >
                                            db_cache_file_expiration_seconds):
                                        if cls._logger.isEnabledFor(
                                                LazyLogger.INFO):
                                            cls._logger.info('deleting:', path)
                                        os.remove(path)
                                        deleted = True
                                        usage_data.add_to_disk_deleted(
                                            size_on_disk)
                                    break  # Next file

                                if (top == trailer_cache_path_top
                                        and cache_type == 'trailer'):
                                    if ((now - mod_time).total_seconds(
                                    ) > trailer_cache_file_expiration_seconds):
                                        if cls._logger.isEnabledFor(
                                                LazyLogger.INFO):
                                            cls._logger.info('deleting:', path)
                                        os.remove(path)
                                        deleted = True
                                        usage_data.add_to_disk_deleted(
                                            size_on_disk)
                                    break  # Next file

                            except AbortException:
                                reraise(*sys.exc_info())
                            except Exception as e:
                                cls._logger.exception('')

                            if not deleted:
                                file_data = FileData(path, mod_time,
                                                     size_on_disk)
                                usage_data.add_file_data(file_data)
                                usage_data.add_to_disk_used_by_cache(
                                    size_on_disk)

            for directory in found_directories:
                try:
                    os.rmdir(directory)
                except Exception as e:
                    pass

        except AbortException:
            reraise(*sys.exc_info())

        except Exception as e:
            cls._logger.exception('')

        cls._logger.exit()
        return usage_data_map