def main(): dobj = Objects.parse(args.in_dfxml) assert not dobj is None _logger = logging.getLogger(os.path.basename(__file__)) _logger.debug("dobj.diff_file_ignores = %r." % dobj.diff_file_ignores) assert "atime" in dobj.diff_file_ignores assert "crtime" in dobj.diff_file_ignores
def main(): predicates = { "all": (lambda x: True), "allocated": is_allocated, "new": is_new_file, "mod": is_mod_file, "newormod": is_new_or_mod_file } if args.predicate is None: args.predicate = "new" if args.predicate not in predicates: raise ValueError("--predicate must be from this list: %r. Received: %r." % (predicates.keys(), args.predicate)) if args.xml: d = Objects.parse(args.xml) else: d = Objects.parse(args.disk_image) write_sector_hashes_to_db(args.disk_image, d, is_allocated, args.db_output, args.pad)
def main(): predicates = { "all": (lambda x: True), "allocated": is_allocated, "new": is_new_file, "mod": is_mod_file, "newormod": is_new_or_mod_file } if args.predicate is None: args.predicate = "new" if args.predicate not in predicates: raise ValueError( "--predicate must be from this list: %r. Received: %r." % (predicates.keys(), args.predicate)) if args.xml: d = Objects.parse(args.xml) else: d = Objects.parse(args.disk_image) write_sector_hashes_to_db(args.disk_image, d, is_allocated, args.db_output, args.pad)
def file_round_trip_dfxmlobject(dobj): """ Serializes the DFXMLObject (dobj) to a temporary file. Parses that temporary file into a new DFXMLObject. For debugging review, the temporary file is left in place, and it is the caller's responsibility to delete this file (if OS cleanup is not expected to automatically handle it). Returns pair: * Path of temporary file. * DFXMLObject, reconstituted from parsing that temporary file. """ tmp_filename = None dobj_reconst = None try: with tempfile.NamedTemporaryFile(mode="w", suffix=".dfxml", delete=False) as out_fh: tmp_filename = out_fh.name dobj.print_dfxml(output_fh=out_fh) confirm_schema_conformance(tmp_filename) dobj_reconst = Objects.parse(tmp_filename) except: _logger.debug("tmp_filename = %r." % tmp_filename) raise return (tmp_filename, dobj_reconst)
def main(): global args dfxmlobject = Objects.parse(args.infile) report(dfxmlobject, sort_by=args.sort_by, summary=args.summary)
def main(): dobj = Objects.parse(args.in_dfxml) assert dobj.program == args.expected_program assert dobj.program_version == args.expected_program_version
logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO) tempxml1_path = __file__ + "-test1.xml" tempxml2_path = __file__ + "-test2.xml" _logger = logging.getLogger(os.path.basename(__file__)) _logger.info("Building iteration: 0.") d_in_memory = make_differential_dfxml.make_differential_dfxml( os.path.join(SAMPLES_DIR, "difference_test_0.xml"), os.path.join(SAMPLES_DIR, "difference_test_1.xml")) #Write and read the DFXML stream a couple times to ensure consistent serialization and deserialization with open(tempxml1_path, "w") as fh: d_in_memory.print_dfxml(output_fh=fh) _logger.info("Building iteration: 1.") d_from_disk = Objects.parse(tempxml1_path) with open(tempxml2_path, "w") as fh: d_from_disk.print_dfxml(output_fh=fh) _logger.info("Building iteration: 2.") d_from_disk_again = Objects.parse(tempxml2_path) for (iteration, d) in enumerate( (d_in_memory, d_from_disk, d_from_disk_again)): _logger.info("Checking iteration: %d." % iteration) for o in d: #_logger.debug(repr(o)) if isinstance(o, Objects.FileObject): if "deleted" in o.annos: _name = o.original_fileobject.filename else: _name = o.filename
__version__="0.1.0" import sys import logging import os sys.path.append( os.path.join(os.path.dirname(__file__), "../..")) import dfxml import dfxml.objects as Objects if __name__=="__main__": logging.basicConfig(level=logging.DEBUG) _logger = logging.getLogger(os.path.basename(__file__)) dobj = Objects.parse(sys.argv[1]) _logger.debug("dobj.creator_libraries = %r." % dobj.creator_libraries) assert Objects.LibraryObject("libfoo", "1.2.3") in dobj.creator_libraries assert Objects.LibraryObject("libbaz", "4.5") in dobj.build_libraries found = None for library in dobj.creator_libraries: if library.relaxed_eq(Objects.LibraryObject("libfoo")): found = True break assert found
__version__ = "0.1.0" import sys import logging import os sys.path.append(os.path.join(os.path.dirname(__file__), "../..")) import dfxml import dfxml.objects as Objects if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) _logger = logging.getLogger(os.path.basename(__file__)) dobj = Objects.parse(sys.argv[1]) _logger.debug("dobj.creator_libraries = %r." % dobj.creator_libraries) assert Objects.LibraryObject("libfoo", "1.2.3") in dobj.creator_libraries assert Objects.LibraryObject("libbaz", "4.5") in dobj.build_libraries found = None for library in dobj.creator_libraries: if library.relaxed_eq(Objects.LibraryObject("libfoo")): found = True break assert found
tempxml1_path = __file__ + "-test1.xml" tempxml2_path = __file__ + "-test2.xml" _logger = logging.getLogger(os.path.basename(__file__)) _logger.info("Building iteration: 0.") d_in_memory = make_differential_dfxml.make_differential_dfxml( os.path.join(SAMPLES_DIR, "difference_test_0.xml"), os.path.join(SAMPLES_DIR, "difference_test_1.xml") ) #Write and read the DFXML stream a couple times to ensure consistent serialization and deserialization with open(tempxml1_path, "w") as fh: d_in_memory.print_dfxml(output_fh=fh) _logger.info("Building iteration: 1.") d_from_disk = Objects.parse(tempxml1_path) with open(tempxml2_path, "w") as fh: d_from_disk.print_dfxml(output_fh=fh) _logger.info("Building iteration: 2.") d_from_disk_again = Objects.parse(tempxml2_path) for (iteration, d) in enumerate((d_in_memory, d_from_disk, d_from_disk_again)): _logger.info("Checking iteration: %d." % iteration) for o in d: #_logger.debug(repr(o)) if isinstance(o, Objects.FileObject): if "deleted" in o.annos: _name = o.original_fileobject.filename else: _name = o.filename expected_fileobject_diffs = {
def main(): d = Objects.DFXMLObject(version="1.2.0") d.program = sys.argv[0] d.program_version = __version__ d.command_line = " ".join(sys.argv) d.dc["type"] = "File system walk concatentation" d.add_creator_library("Python", ".".join( map(str, sys.version_info[0:3] ))) #A bit of a bend, but gets the major version information out. d.add_creator_library("Objects.py", Objects.__version__) d.add_creator_library("dfxml.py", Objects.dfxml.__version__) _offsets_and_pxml_paths = [] for (lxfno, lxf) in enumerate(args.labeled_xml_file): lxf_parts = lxf.split(":") if len(lxf_parts) != 2 or not lxf_parts[0].isdigit(): raise ValueError( "Malformed argument in labeled_xml_file. Expecting space-delimited list of '<number>:<path>'. This entry doesn't work: %r." % lxf) offset = int(lxf_parts[0]) path = lxf_parts[1] _offsets_and_pxml_paths.append((offset, path)) offsets_and_pxml_paths = sorted(_offsets_and_pxml_paths) for (pxml_path_index, (offset, pxml_path)) in enumerate(offsets_and_pxml_paths): _logger.debug("Running on path %r." % pxml_path) pdo = Objects.parse(pxml_path) building_volume = None #Fetch or build volume we'll append if len(pdo.volumes) > 1: raise ValueError( "An input DFXML document has multiple volumes; this script assumes each input document only has one. The document here has %d: %r." % (len(pdo.volumes), pxml_path)) elif len(pdo.volumes) == 0: v = Objects.VolumeObject() building_volume = True else: v = pdo.volumes[0] building_volume = False v.partition_offset = offset #Accumulate namespaces for (prefix, url) in pdo.iter_namespaces(): d.add_namespace(prefix, url) for obj in pdo: #Force-update image offsets in byte runs for brs_prop in ["data_brs", "name_brs", "inode_brs"]: if hasattr(obj, brs_prop): brs = getattr(obj, brs_prop) if brs is None: continue for br in brs: if not br.fs_offset is None: br.img_offset = br.fs_offset + offset #For files, set partition identifier and attach to partition if isinstance(obj, Objects.FileObject): obj.partition = pxml_path_index + 1 if building_volume: v.append(obj) #Collect the constructed and/or updated volume d.append(v) d.print_dfxml()
def main(): d = Objects.DFXMLObject(version="1.2.0") d.program = sys.argv[0] d.program_version = __version__ d.command_line = " ".join(sys.argv) d.dc["type"] = "File system walk concatentation" d.add_creator_library("Python", ".".join(map(str, sys.version_info[0:3]))) #A bit of a bend, but gets the major version information out. d.add_creator_library("Objects.py", Objects.__version__) d.add_creator_library("dfxml.py", Objects.dfxml.__version__) _offsets_and_pxml_paths = [] for (lxfno, lxf) in enumerate(args.labeled_xml_file): lxf_parts = lxf.split(":") if len(lxf_parts) != 2 or not lxf_parts[0].isdigit(): raise ValueError("Malformed argument in labeled_xml_file. Expecting space-delimited list of '<number>:<path>'. This entry doesn't work: %r." % lxf) offset = int(lxf_parts[0]) path = lxf_parts[1] _offsets_and_pxml_paths.append((offset,path)) offsets_and_pxml_paths = sorted(_offsets_and_pxml_paths) for (pxml_path_index, (offset, pxml_path)) in enumerate(offsets_and_pxml_paths): _logger.debug("Running on path %r." % pxml_path) pdo = Objects.parse(pxml_path) building_volume = None #Fetch or build volume we'll append if len(pdo.volumes) > 1: raise ValueError("An input DFXML document has multiple volumes; this script assumes each input document only has one. The document here has %d: %r." % (len(pdo.volumes), pxml_path)) elif len(pdo.volumes) == 0: v = Objects.VolumeObject() building_volume = True else: v = pdo.volumes[0] building_volume = False v.partition_offset = offset #Accumulate namespaces for (prefix, url) in pdo.iter_namespaces(): d.add_namespace(prefix, url) for obj in pdo: #Force-update image offsets in byte runs for brs_prop in ["data_brs", "name_brs", "inode_brs"]: if hasattr(obj, brs_prop): brs = getattr(obj, brs_prop) if brs is None: continue for br in brs: if not br.fs_offset is None: br.img_offset = br.fs_offset + offset #For files, set partition identifier and attach to partition if isinstance(obj, Objects.FileObject): obj.partition = pxml_path_index + 1 if building_volume: v.append(obj) #Collect the constructed and/or updated volume d.append(v) d.print_dfxml()