def get_stats_for_path( cls, top: str, patterns: Dict[str, Tuple[Pattern[str], str]]) -> Dict[str, UsageData]: """ Gets disk usage information for a subtree of the filesystem :param top: :param patterns: :return: """ usage_data = None fileMap = {} usage_data_map = {} try: free = 0 total = 0 used = 0 size_on_disk = 0 block_size = None # Available in Python >= 3.3 # shutil.disk_usage(top) # _ntuple_diskusage = collections.namedtuple('usage', 'total used free') # units in bytes disk_usage = cls.disk_usage(top) if disk_usage is not None: free = disk_usage['free'] total = disk_usage['total'] used = disk_usage['used'] block_size = disk_usage['blocksize'] #statvfs = os.statvfs(top) #block_size = statvfs.f_bsize #free = int(statvfs.f_bavail * statvfs.f_frsize / megaByte) #total = int(statvfs.f_blocks * statvfs.f_frsize / megaByte) # used = int((statvfs.f_blocks - statvfs.f_bfree) * # statvfs.f_frsize / megaByte) # st.f_blocks is # blocks in filesystem # f_bavail free blocks for non-super user # f_bsize # preferred block size # f_frsize # fundamental file system block size # f_blocks total blocks in filesystem # f_bfree total # free blocks in filesystem for cache_name, (pattern, cache_type) in patterns.items(): usage_data = UsageData(cache_name, pattern) usage_data.set_free_size(free) usage_data.set_total_size(total) usage_data.set_used_space(used) usage_data.set_block_size(block_size) usage_data_map[cache_name] = usage_data db_cache_file_expiration_days = \ Settings.get_expire_remote_db_cache_entry_days() db_cache_file_expiration_seconds =\ db_cache_file_expiration_days * 24 * 60 * 60 db_cache_path_top = Settings.get_remote_db_cache_path() trailer_cache_file_expiration_days = Settings.get_expire_trailer_cache_days( ) trailer_cache_file_expiration_seconds = \ trailer_cache_file_expiration_days * 24 * 60 * 60 trailer_cache_path_top = Settings.get_downloaded_trailer_cache_path( ) now = datetime.datetime.now() found_directories = set() for root, dirs, files in os.walk(top): for filename in files: for cache_name, (pattern, cache_type) in patterns.items(): Monitor.throw_exception_if_abort_requested() usage_data = usage_data_map[cache_name] if pattern.match(filename): path = os.path.join(root, filename) mod_time = now try: if not os.path.isdir(path): st = os.stat(path) mod_time = datetime.datetime.fromtimestamp( st.st_mtime) size_in_blocks = st.st_size size_on_disk = ( (size_in_blocks - 1) / block_size + 1) * block_size else: found_directories.add(path) except OSError as e: continue # File doesn't exist except Exception as e: cls._logger.exception('') continue deleted = False try: if (top == db_cache_path_top and cache_type == 'json'): if ((now - mod_time).total_seconds() > db_cache_file_expiration_seconds): if cls._logger.isEnabledFor( LazyLogger.INFO): cls._logger.info('deleting:', path) os.remove(path) deleted = True usage_data.add_to_disk_deleted( size_on_disk) break # Next file if (top == trailer_cache_path_top and cache_type == 'trailer'): if ((now - mod_time).total_seconds( ) > trailer_cache_file_expiration_seconds): if cls._logger.isEnabledFor( LazyLogger.INFO): cls._logger.info('deleting:', path) os.remove(path) deleted = True usage_data.add_to_disk_deleted( size_on_disk) break # Next file except AbortException: reraise(*sys.exc_info()) except Exception as e: cls._logger.exception('') if not deleted: file_data = FileData(path, mod_time, size_on_disk) usage_data.add_file_data(file_data) usage_data.add_to_disk_used_by_cache( size_on_disk) for directory in found_directories: try: os.rmdir(directory) except Exception as e: pass except AbortException: reraise(*sys.exc_info()) except Exception as e: cls._logger.exception('') cls._logger.exit() return usage_data_map
def read_tmdb_cache_json(cls, movie_id: Union[int, str], source: str, error_msg: str = '' ) -> Union[MovieType, None]: """ Attempts to read TMDB detail data for a specific movie from local cache. :param movie_id: TMDB movie ID :param source: Source database that caused this request (local, TMDB, iTunes) :param error_msg: Supplies additional text to display on error. Typically a movie title :return: MovieType containing cached data, or None if not found """ trailer = None movie_id = str(movie_id) exception_occurred = False path = None try: path = Cache.get_json_cache_file_path_for_movie_id(movie_id, source, error_msg=error_msg) if not os.path.exists(path): if cls._logger.isEnabledFor(LazyLogger.DEBUG_EXTRA_VERBOSE): cls._logger.debug_extra_verbose('cache file not found for:', error_msg, 'id:', movie_id, 'source:', source) return None if not os.access(path, os.R_OK): messages = Messages cls._logger.warning(messages.get_msg( Messages.CAN_NOT_READ_FILE) % path) return None file_mod_time = datetime.datetime.fromtimestamp( os.path.getmtime(path)) now = datetime.datetime.now() expiration_time = now - datetime.timedelta( Settings.get_expire_trailer_cache_days()) if file_mod_time < expiration_time: if cls._logger.isEnabledFor(LazyLogger.DEBUG_EXTRA_VERBOSE): cls._logger.debug_extra_verbose('cache file EXPIRED for:', error_msg, 'id:', movie_id, 'source:', source, 'path:', path) return None Monitor.throw_exception_if_abort_requested() with io.open(path, mode='rt', newline=None, encoding='utf-8') as cacheFile: try: trailer = json.load(cacheFile, encoding='utf-8') except Exception as e: cls._logger.exception(e) cls._logger.debug_extra_verbose( 'Failing json:', path, cacheFile) # exception_occurred = True trailer[Movie.CACHED] = True except AbortException: reraise(*sys.exc_info()) except IOError as e: cls._logger.exception('') trailer = None exception_occurred = True except Exception as e: cls._logger.exception('') trailer = None exception_occurred = True try: # Blow away bad cache file if exception_occurred and path is not None: os.remove(path) except AbortException: reraise(*sys.exc_info()) except Exception as e: cls._logger.exception('Trying to delete bad cache file.') return trailer