def listdir(self, key, path, metadata): key = force_utf8(key) path = force_utf8(path) if metadata is None: metadata = {} # Create root symlink symlink_dir = op.join(settings.FILES_ROOT, key) if not op.isdir(symlink_dir): os.symlink(settings.FILES_SOURCE, symlink_dir) # Get files and directories source_dir = op.join(settings.FILES_SOURCE, path) if not op.isdir(source_dir): raise http.Http404() entries = [] directories_data, files_data = cache.listdir(source_dir) for entry_name, size, timestamp in directories_data: full_path = op.join(source_dir, entry_name) rel_path = op.join(path, entry_name) entry_metadata = metadata.get(rel_path, {}) entries.append(Directory(entry_name, full_path, rel_path, size, timestamp, entry_metadata)) for entry_name, size, timestamp in files_data: full_path = op.join(source_dir, entry_name) rel_path = op.join(path, entry_name) entry_metadata = metadata.get(rel_path, {}) entries.append(File(entry_name, full_path, rel_path, size, timestamp, entry_metadata, op.join(settings.FILES_URL, key, path, entry_name))) entries.sort(key=lambda e: e.timestamp, reverse=True) return entries
def dir_cache_data(path): """ Return the data to store in the cache for directory at *path*. """ path = force_utf8(path) files = [] directories = [] for entry in os.listdir(path): for pattern in settings.EXCLUDE_FILES: if pattern.match(entry): continue entry_path = op.join(path, entry) if not op.exists(entry_path): # File was deleted during directory listing continue timestamp = op.getmtime(entry_path) if op.isdir(entry_path): size = 0 for dirpath, dirnames, filenames in os.walk(entry_path): for f in filenames: fp = op.join(dirpath, f) if op.exists(fp): size += op.getsize(fp) directories.append((entry, size, timestamp)) else: size = op.getsize(entry_path) files.append((entry, size, timestamp)) return directories, files
def dir_cache_key(path): """ Get the cache key for *path*. """ path = force_utf8(path) name = op.abspath(path).replace(" ", "_") return "leechy-dir-cache-%s" % name
def cache_directory(path): """ Put the directory at *path* in the cache. """ path = force_utf8(path) cache_key = dir_cache_key(path) if op.exists(path): cache.set(cache_key, dir_cache_data(path)) else: cache.delete(cache_key)
def listdir(path): """ Retrieve the contents of directory at *path*. """ path = force_utf8(path) cache_key = dir_cache_key(path) data = cache.get(cache_key) if data is not None: return data return dir_cache_data(path)
def handle(self, *args, **options): # First fill the cache files_source = force_utf8(settings.FILES_SOURCE) cache.cache_directory(files_source) for dirpath, dirnames, filenames in os.walk(files_source): for dirname in dirnames: cache.cache_directory(op.join(dirpath, dirname)) # Then watch for changes manager = pyinotify.WatchManager() mask = (pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MOVED_TO) manager.add_watch(files_source, mask, rec=True) notifier = pyinotify.Notifier(manager, self.update_dir) notifier.loop()
def update_dir(self, event): path = force_utf8(event.pathname) cache.cache_directory(op.dirname(path))