def _update_vfs_index(self, index_collection, tickets): """Extract all the directories and store them in the index.""" path = utils.normpath(self.path) for ticket in tickets: for collection in ticket.collections: index_collection.insert( dirname=path, timestamp=ticket.timestamp, location_path=collection.location.to_path())
def collect(self): path = utils.normpath(self.plugin_args.path) self.collections = {} if not self.client_id: raise plugin.PluginError("Client ID expected.") # If the user asks for fresh data then launch the flow and wait for it # to finish. if self.plugin_args.refresh: flow_obj = self.session.plugins.launch_flow( flow="ListDirectory", args=dict( path=path, recursive=self.plugin_args.recursive, )).make_flow_object() # Wait until the list directory is completed. self.launch_and_wait(flow_obj) # First get the VFS index. vfs_index = find.VFSIndex.load_from_location( self._config.server.vfs_index_for_server(self.client_id), session=self.session) # We use the index to get the best StatEntryCollection() which covers # the requested path. There are three possible cases: # 1) All the existing StatEntryCollection()s start at a directory deeper # than path. In this case we emulate the directories of all existing # collections' starting paths. # 2) The requested path begins with the starting path of one or more # StatEntryCollection()s. This means these collections contain it. # 3) path is longer than all StatEntryCollection()'s starting paths # plus their depth. path_components = filter(None, path.split("/")) for row in self._collect_one_dir(vfs_index, path_components): row["Path"] = renderers.UILink("vfs", row["Path"]) yield row
def _process_files(self, root, files): root = utils.normpath(root) for f in files: path = os.path.join(root, f) result = dict(filename=f, dirname=root) try: s = os.lstat(path) result["st_mode"] = s.st_mode result["st_ino"] = s.st_ino result["st_dev"] = s.st_dev result["st_nlink"] = s.st_nlink result["st_uid"] = s.st_uid result["st_gid"] = s.st_gid result["st_size"] = s.st_size result["st_mtime"] = s.st_mtime result["st_ctime"] = s.st_ctime result["st_atime"] = s.st_atime except Exception: pass self._session.report_progress("Processing %s", path) yield result
def _process_files(self, root, files): drive, path = self.splitdrive(root) if not drive: from rekall.plugins.response import windows for drive in windows.get_drives(): yield dict(filename=self.normalize_path(drive + os.path.sep), dirname="/", st_mode=0775) return root = utils.normpath(root) for f in files: path = os.path.join(root, f) result = dict(filename=self.normalize_path(f), dirname=self.normalize_path(root), st_mode=0) try: s = os.lstat(path) result["st_mode"] = s.st_mode result["st_ino"] = s.st_ino result["st_dev"] = s.st_dev result["st_nlink"] = s.st_nlink result["st_uid"] = s.st_uid result["st_gid"] = s.st_gid result["st_size"] = s.st_size result["st_mtime"] = s.st_mtime result["st_ctime"] = s.st_ctime result["st_atime"] = s.st_atime except Exception: pass self._session.report_progress("Processing %s", path) yield result
def _collect_one_dir(self, vfs_index, path_components): path = utils.normpath(utils.join_path(*path_components)) stat_collection_path = None virtual_directories = set() # More recent collections override older collections. for row in vfs_index.query(order_by="timestamp asc"): collection_path_components = filter(None, row["dirname"].split("/")) # e.g. collection_path_components = /home/ # path_components = /home/scudette/ if (len(collection_path_components) <= len(path_components) and collection_path_components == path_components[:len(collection_path_components)]): stat_collection_path = row["location_path"] # e.g. path_components = /home/ # collection_path_components = /home/scudette/ elif (len(collection_path_components) > len(path_components) and collection_path_components[:len(path_components)] == path_components): virtual_directories.add( collection_path_components[len(path_components)]) # We found a collection that contains this path. if stat_collection_path: with files.StatEntryCollection.load_from_location( self._config.server.location_from_path_for_server( stat_collection_path), session=self.session) as stat_collection: for row in list( stat_collection.query(dirname=path, order_by="filename")): mode = response_common.Permissions(row["st_mode"] or 0) result = dict( Path=utils.join_path(row["dirname"], row["filename"]), st_mode=mode, st_size=row["st_size"], st_mtime=arrow.get(row["st_mtime"]), st_atime=arrow.get(row["st_atime"]), st_ctime=arrow.get(row["st_ctime"]), ) for field in "st_ino st_dev st_nlink st_uid st_gid".split( ): result[field] = row[field] yield result if self.plugin_args.recursive and mode.is_dir(): for row in self._collect_one_dir( vfs_index, path_components + [row["filename"]]): yield row else: for directory in virtual_directories: mode = response_common.Permissions(0755) yield dict(Path=utils.join_path(path, directory), st_mode=mode) if self.plugin_args.recursive: for row in self._collect_one_dir( vfs_index, path_components + [directory]): yield row