def elf_dependencies(path: str) -> List[PrefetchSuggestion]: """ Prefetch shared libraries dependencies of an ELF binary. This rule is based on the assumption that if an ELF binary is read then that's likely because it's being executed and its dependencies will soon be loaded as well. """ prefetches = [] if is_elf_binary(path): try: dependencies = read_elf_dependencies(path) except Exception as e: log.warning(f"failed to read elf dependencies of {path}: {e}") dependencies = [] # Dependencies may be symlinks, so prefetch those. prefetches += [ PrefetchSuggestion(path=dep, contents=False) for dep in dependencies ] # Prefetch contents of the final shared libraries. prefetches += [ PrefetchSuggestion(path=os.path.realpath(dep), contents=True) for dep in dependencies ] return prefetches
def wrapper(*args, **kwargs): # Support coverage.py within FUSE threads. if hasattr(threading, "_trace_hook"): sys.settrace(getattr(threading, "_trace_hook")) try: res = fn(*args, **kwargs) if res is None: res = 0 return res except OSError as e: # FUSE expects an error to be returned as negative errno. if e.errno: return -e.errno else: return -errno.EIO except NotImplementedError: log.debug(f"fuse::{name}() not implemented!") return -errno.ENOSYS except Exception: log.warning(f"fuse::{name}() raised an unexpected exception:") log.warning(traceback.format_exc()) return -errno.EIO
def is_elf_binary(path: str) -> bool: """Check if the specified file is an ELF binary.""" try: output = subprocess.check_output(["file", path]).decode() return "ELF" in output except Exception as e: log.warning(f"failed to check if {path} is elf binary: {e}") return False
def readfile_prefetch( self, path: str) -> Tuple[FileContents, List[PrefetchEntry]]: """Retrieve file contents and prefetch related data.""" base = self.readfile(path) try: suggestions = prefetching.file_read(path) return base, self._resolve_prefetches(suggestions) except Exception as e: # Avoid complete I/O failure if prefetching breaks log.warning(f"prefetching for readfile({path}) failed: {e}") return base, []
def get_metadata_prefetch( self, path: str) -> Tuple[Metadata, List[PrefetchEntry]]: """Retrieve metadata of an entry and prefetch related data.""" base = self.get_metadata(path) try: suggestions = prefetching.file_access(path) return base, self._resolve_prefetches(suggestions) except Exception as e: # Avoid complete I/O failure if prefetching breaks log.warning(f"prefetching for get_metadata({path}) failed: {e}") return base, []
def _resolve_prefetches( self, suggestions: List[prefetching.PrefetchSuggestion] ) -> List[PrefetchEntry]: # Group prefetch suggestions by path suggestions_by_path = collections.defaultdict(list) for suggestion in suggestions: suggestions_by_path[suggestion.path].append(suggestion) # Resolve suggestions into actual prefetches prefetches: List[PrefetchEntry] = [] for path, path_suggestions in suggestions_by_path.items(): # Don't prefetch things outside the prefetchable paths if not self._is_prefetchable(path): continue prefetch_contents = any(s.contents for s in path_suggestions) # Don't prefetch contents that have already been fetched, or metadata that # has already been fetched. with self._fetched_lock: if prefetch_contents and path in self._fetched_contents: continue elif not prefetch_contents and path in self._fetched_metadata: continue entry = PrefetchEntry(path=path, metadata=self.get_metadata(path), contents=None) # Try to prefetch contents if requested and available if prefetch_contents and not entry.metadata.error: try: entry.contents = self.readfile(path) except Exception as e: log.warning(f"failed to prefetch contents of {path}: {e}") prefetches.append(entry) return prefetches