def main(): filename = sys.argv[1] with Mmap(filename) as buf: enum = MFTEnumerator(buf) for record in enum.enumerate_records(): slack = record.slack_data() sys.stdout.write("\x00" * (1024 - len(slack))) sys.stdout.write(slack)
def main(): parser = argparse.ArgumentParser(description='Inspect ' 'a given MFT file record.') parser.add_argument('-a', action="store", metavar="cache_size", type=int, dest="cache_size", default=1024, help="Size of cache.") parser.add_argument('-p', action="store", metavar="prefix", nargs=1, dest="prefix", default="\\.", help="Prefix paths with `prefix` rather than \\.\\") parser.add_argument('-v', action="store_true", dest="verbose", help="Print debugging information") parser.add_argument('mft', action="store", help="Path to MFT") parser.add_argument('record_or_path', action="store", help="MFT record or file path to inspect") results = parser.parse_args() if results.verbose: logging.basicConfig(level=logging.DEBUG) with Mmap(results.mft) as buf: record_cache = Cache(results.cache_size) path_cache = Cache(results.cache_size) enum = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) should_use_inode = False try: record_num = int(results.record_or_path) should_use_inode = True except ValueError: should_use_inode = False if should_use_inode: record = enum.get_record(record_num) path = results.prefix + enum.get_path(record) print_indx_info(record, path) else: path = results.record_or_path record = enum.get_record_by_path(path) print_indx_info(record, results.prefix + path)
def __init__(self, root, mfttree, buf): self._root = root self._tree = mfttree self._buf = buf self._opened_files = {} # dict(int --> FH subclass) record_cache = Cache(1024) path_cache = Cache(1024) self._enumerator = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache)
def main(): parser = argparse.ArgumentParser(description='Parse MFT ' 'filesystem structures.') parser.add_argument('-f', action="store", metavar="regex", nargs=1, dest="filter", help="Only consider entries whose path " "matches this regular expression") parser.add_argument('-c', action="store", metavar="cache_size", type=int, dest="cache_size", default=1024, help="Size of cache.") parser.add_argument('-p', action="store", metavar="prefix", nargs=1, dest="prefix", default="\\.", help="Prefix paths with `prefix` rather than \\.\\") parser.add_argument('--progress', action="store_true", dest="progress", help="Update a status indicator on STDERR " "if STDOUT is redirected") parser.add_argument('-v', action="store_true", dest="verbose", help="Print debugging information") parser.add_argument('filename', action="store", help="Input MFT file path") results = parser.parse_args() if results.verbose: logging.basicConfig(level=logging.DEBUG) with Mmap(results.filename) as buf: record_cache = Cache(results.cache_size) path_cache = Cache(results.cache_size) enum = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) for record, record_path in enum.enumerate_paths(): output_mft_record(enum, record, results.prefix)
def main(): parser = argparse.ArgumentParser(description='Parse MFT ' 'filesystem structures.') parser.add_argument('-c', action="store", metavar="cache_size", type=int, dest="cache_size", default=1024, help="Size of cache.") parser.add_argument('-p', action="store", metavar="prefix", nargs=1, dest="prefix", default="\\.", help="Prefix paths with `prefix` rather than \\.\\") parser.add_argument('-v', action="store_true", dest="verbose", help="Print debugging information") parser.add_argument('--progress', action="store_true", dest="progress", help="Update a status indicator on STDERR " "if STDOUT is redirected") parser.add_argument('--format', action="store", metavar="format", nargs=1, dest="format", help="Output format specification") parser.add_argument('--format_file', action="store", metavar="format_file", nargs=1, dest="format_file", help="File containing output format specification") parser.add_argument('--json', action="store_true", dest="json", help="Output in JSON format") parser.add_argument('--dfxml', action="store_true", dest="dfxml", help="Output in DFXML format") #dfxml option parser.add_argument('-f', action="store", metavar="regex", nargs=1, dest="filter", help="Only consider entries whose path " "matches this regular expression") parser.add_argument('filename', action="store", help="Input MFT file path") results = parser.parse_args() use_default_output = True if results.verbose: logging.basicConfig(level=logging.DEBUG) env = Environment(trim_blocks=True, lstrip_blocks=True) env.filters["unixtimestampformat"] = unixtimestampformat flags_count = 0 if results.format: flags_count += 1 template = env.from_string(results.format[0]) if results.format_file: flags_count += 1 with open(results.format_file[0], "rb") as f: template = env.from_string(f.read()) if results.json: flags_count += 1 pass if results.dfxml: # flags_count += 1 pass if flags_count > 1: sys.stderr.write( "Only one of --format, --format_file, --json, --dfxml may be provided.\n" ) sys.exit(-1) elif flags_count == 1: use_default_output = False elif flags_count == 0: flags_count += 1 template = get_default_template(env) use_default_output = True if results.dfxml: #make dfxml object here dfxml_doc = Objects.DFXMLObject() if results.progress: progress_cls = ProgressBarProgress else: progress_cls = NullProgress with Mmap(results.filename) as buf: record_cache = Cache(results.cache_size) path_cache = Cache(results.cache_size) enum = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) progress = progress_cls(enum.len()) if use_default_output: for record, record_path in enum.enumerate_paths(): output_mft_record(enum, record, results.prefix[0]) progress.set_current(record.inode) elif results.json: class MFTEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat("T") + "Z" elif isinstance(obj, types.GeneratorType): return [o for o in obj] return json.JSONEncoder.default(self, obj) print("[") for record, record_path in enum.enumerate_paths(): m = make_model(record, record_path) print(json.dumps(m, cls=MFTEncoder, indent=2) + ",") progress.set_current(record.inode) print("]") elif results.dfml: for record, record_path in enum.enumerate_paths(): dfxml_mft_record(enum, record, results.prefix[0], dfxml_doc) progress.set_current(record.inode) else: for record, record_path in enum.enumerate_paths(): sys.stdout.write( template.render(record=make_model(record, record_path), prefix=results.prefix[0]) + "\n") progress.set_current(record.inode) progress.set_complete() if results.dfxml: dfxml_doc.print_dfxml()
class MFTFuseOperations(Operations): """ MFTFuseOperations is a FUSE driver for NTFS MFT files. """ def __init__(self, root, mfttree, buf): self._root = root self._tree = mfttree self._buf = buf self._opened_files = {} # dict(int --> FH subclass) record_cache = Cache(1024) path_cache = Cache(1024) self._enumerator = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) # Helpers # ======= def _get_node(self, path): """ _get_node returns the MFTTreeNode associated with a path. @type path: str @rtype: MFT.MFTTreeNode @raises: FuseOSError(errno.ENOENT) """ if path.startswith("/"): path = path[1:] current_node = self._tree.get_root() for component in path.split("/"): if component == "": continue try: current_node = current_node.get_child_node(component) except KeyError: raise FuseOSError(errno.ENOENT) return current_node def _get_record(self, path): """ _get_record returns the MFTRecord associated with a path. @type path: str @rtype: MFT.MFTRecord """ return self._enumerator.get_record(self._get_node(path).get_record_number()) # Filesystem methods # ================== @log def getattr(self, path, fh=None): (uid, gid, pid) = fuse_get_context() working_path = path if is_special_file(path): (working_path, special) = explode_special_file(working_path) record = self._get_record(working_path) if record.is_directory(): mode = (stat.S_IFDIR | PERMISSION_ALL_READ) nlink = 2 else: mode = (stat.S_IFREG | PERMISSION_ALL_READ) nlink = 1 # TODO(wb): fix the duplication of this code with the FH classes if is_special_file(path): size = 0 (working_path, special) = explode_special_file(path) if special == "meta": node = self._get_node(working_path) record_buf = self._enumerator.get_record_buf(node.get_record_number()) size = len(get_meta_for_file(record, working_path)) else: data_attribute = record.data_attribute() if data_attribute is not None: if data_attribute.non_resident() == 0: size = len(data_attribute.value()) else: size = data_attribute.data_size() else: size = record.filename_information().logical_size() return { "st_atime": unixtimestamp(record.standard_information().accessed_time()), "st_ctime": unixtimestamp(record.standard_information().changed_time()), #"st_crtime": unixtimestamp(record.standard_information().created_time()), "st_mtime": unixtimestamp(record.standard_information().modified_time()), "st_size": size, "st_uid": uid, "st_gid": gid, "st_mode": mode, "st_nlink": nlink, } @log def readdir(self, path, fh): dirents = ['.', '..'] record = self._get_node(path) dirents.extend(map(lambda r: r.get_filename(), record.get_children_nodes())) for r in dirents: yield r @log def readlink(self, path): return path @log def statfs(self, path): return dict((key, 0) for key in ('f_bavail', 'f_bfree', 'f_blocks', 'f_bsize', 'f_favail', 'f_ffree', 'f_files', 'f_flag', 'f_frsize', 'f_namemax')) @log def chmod(self, path, mode): return errno.EROFS @log def chown(self, path, uid, gid): return errno.EROFS @log def mknod(self, path, mode, dev): return errno.EROFS @log def rmdir(self, path): return errno.EROFS @log def mkdir(self, path, mode): return errno.EROFS @log def unlink(self, path): return errno.EROFS @log def symlink(self, target, name): return errno.EROFS @log def rename(self, old, new): return errno.EROFS @log def link(self, target, name): return errno.EROFS @log def utimens(self, path, times=None): return errno.EROFS # File methods # ============ def _get_available_fh(self): """ _get_available_fh returns an unused fh The caller must be careful to handle race conditions. @rtype: int """ for i in xrange(65534): if i not in self._opened_files: return i @log def open(self, path, flags): if flags & os.O_WRONLY > 0: return errno.EROFS if flags & os.O_RDWR > 0: return errno.EROFS # TODO(wb): race here on fh used/unused fh = self._get_available_fh() if is_special_file(path): (path, special) = explode_special_file(path) if special == "meta": record = self._get_record(path) node = self._get_node(path) record_buf = self._enumerator.get_record_buf(node.get_record_number()) self._opened_files[fh] = MetaFH(fh, record, path, record_buf) else: raise FuseOSError(errno.ENOENT) else: self._opened_files[fh] = RegularFH(fh, self._get_record(path)) return fh @log def read(self, path, length, offset, fh): txt = self._opened_files[fh].get_data().encode("utf-8") return txt[offset:offset+length] @log def flush(self, path, fh): return "" @log def release(self, path, fh): del self._opened_files[fh] @log def create(self, path, mode, fi=None): return errno.EROFS @log def write(self, path, buf, offset, fh): return errno.EROFS @log def truncate(self, path, length, fh=None): return errno.EROFS @log def fsync(self, path, fdatasync, fh): return errno.EPERM
def main(): parser = argparse.ArgumentParser(description='Parse MFT ' 'filesystem structures.') parser.add_argument('-c', action="store", metavar="cache_size", type=int, dest="cache_size", default=1024, help="Size of cache.") parser.add_argument('-p', action="store", metavar="prefix", nargs=1, dest="prefix", default="\\.", help="Prefix paths with `prefix` rather than \\.\\") parser.add_argument('-v', action="store_true", dest="verbose", help="Print debugging information") parser.add_argument('--progress', action="store_true", dest="progress", help="Update a status indicator on STDERR " "if STDOUT is redirected") parser.add_argument('--format', action="store", metavar="format", nargs=1, dest="format", help="Output format specification") parser.add_argument('--format_file', action="store", metavar="format_file", nargs=1, dest="format_file", help="File containing output format specification") parser.add_argument('--json', action="store_true", dest="json", help="Output in JSON format") parser.add_argument('--dfxml', action="store_true", dest="dfxml", help="Output in DFXML format") #dfxml option parser.add_argument('-f', action="store", metavar="regex", nargs=1, dest="filter", help="Only consider entries whose path " "matches this regular expression") parser.add_argument('filename', action="store", help="Input MFT file path") results = parser.parse_args() use_default_output = True if results.verbose: logging.basicConfig(level=logging.DEBUG) env = Environment(trim_blocks=True, lstrip_blocks=True) env.filters["unixtimestampformat"] = unixtimestampformat flags_count = 0 if results.format: flags_count += 1 template = env.from_string(results.format[0]) if results.format_file: flags_count += 1 with open(results.format_file[0], "rb") as f: template = env.from_string(f.read()) if results.json: flags_count += 1 pass if results.dfxml: # flags_count += 1 pass if flags_count > 1: sys.stderr.write("Only one of --format, --format_file, --json, --dfxml may be provided.\n") sys.exit(-1) elif flags_count == 1: use_default_output = False elif flags_count == 0: flags_count += 1 template = get_default_template(env) use_default_output = True if results.dfxml: #make dfxml object here dfxml_doc = Objects.DFXMLObject() if results.progress: progress_cls = ProgressBarProgress else: progress_cls = NullProgress with Mmap(results.filename) as buf: record_cache = Cache(results.cache_size) path_cache = Cache(results.cache_size) enum = MFTEnumerator(buf, record_cache=record_cache, path_cache=path_cache) progress = progress_cls(enum.len()) if use_default_output: for record, record_path in enum.enumerate_paths(): output_mft_record(enum, record, results.prefix[0]) progress.set_current(record.inode) elif results.json: class MFTEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat("T") + "Z" elif isinstance(obj, types.GeneratorType): return [o for o in obj] return json.JSONEncoder.default(self, obj) print("[") for record, record_path in enum.enumerate_paths(): m = make_model(record, record_path) print(json.dumps(m, cls=MFTEncoder, indent=2) + ",") progress.set_current(record.inode) print("]") elif results.dfml: for record, record_path in enum.enumerate_paths(): dfxml_mft_record(enum, record, results.prefix[0], dfxml_doc) progress.set_current(record.inode) else: for record, record_path in enum.enumerate_paths(): sys.stdout.write(template.render(record=make_model(record, record_path), prefix=results.prefix[0]) + "\n") progress.set_current(record.inode) progress.set_complete() if results.dfxml: dfxml_doc.print_dfxml()