def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing( mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for (i, chunk) in enumerate(fh.chunks()): for template in chunk.templates().values(): print "Template {%s} at chunk %d, offset %s" % \ (template.guid(), i, hex(template.absolute_offset(0x0))) print evtx_template_readable_view(template)
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for (i, chunk) in enumerate(fh.chunks()): for template in chunk.templates().values(): print("Template {%s} at chunk %d, offset %s" % \ (template.guid(), i, hex(template.absolute_offset(0x0)))) print(evtx_template_readable_view(template))
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) print "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" print "<Events>" for chunk in fh.chunks(): for record in chunk.records(): print record.root().xml([]).encode("utf-8") print "</Events>"
def read_evtx_records(evtx_file): ''' Reads an evtx file, extracts the records, and returns them as a generator :param evtx_file: string path to input evtx file :return: generator ''' evtx_file.seek(0) buf = evtx_file.read() fh = FileHeader(buf, 0x0) for chunk in fh.chunks(): for record in chunk.records(): yield record
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for (i, chunk) in enumerate(fh.chunks()): for template in chunk.templates().values(): print "Template {%s} at chunk %d, offset %s" % \ (template.guid(), i, hex(template.absolute_offset(0x0))) # strip leading newline... print template.template_format()[1:] print ""
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for chunk in fh.chunks(): for record in chunk.records(): try: evtx_record_xml_view(record).encode("utf-8") except Exception as e: print str(e) print repr(e) print evtx_record_xml_view(record).encode("utf-8") return
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing( mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for chunk in fh.chunks(): for record in chunk.records(): try: evtx_record_xml_view(record).encode("utf-8") except Exception as e: print str(e) print repr(e) print evtx_record_xml_view(record).encode("utf-8") return
def getEventCount(self, filename): if(os.name == 'posix'): log_dir = log_dir_linux else: log_dir = log_dir_windows with open(os.path.join(log_dir, filename), 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) count = 0 for chunk in fh.chunks(): for record in chunk.records(): count += 1 # print count, "events found" return count
def main(): parser = argparse.ArgumentParser( description="Dump the slack space of an EVTX file.") parser.add_argument("evtx", type=str, help="Path to the Windows EVTX event log file") args = parser.parse_args() with open(args.evtx, 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) for chunk in fh.chunks(): chunk_start = chunk.offset() last_allocated_offset = chunk_start for record in chunk.records(): last_allocated_offset = record.offset() + record.size() sys.stdout.write(buf[last_allocated_offset:chunk_start + 0x10000])
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing( mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) print "Information from file header:" print "Format version : %d.%d" % (fh.major_version(), fh.minor_version()) print "Flags : 0x%08x" % (fh.flags()) dirty_string = "clean" if fh.is_dirty(): dirty_string = "dirty" print "File is : %s" % (dirty_string) full_string = "no" if fh.is_full(): full_string = "yes" print "Log is full : %s" % (full_string) print "Current chunk : %d of %d" % (fh.current_chunk_number(), fh.chunk_count()) print "Oldest chunk : %d" % (fh.oldest_chunk() + 1) print "Next record# : %d" % (fh.next_record_number()) checksum_string = "fail" if fh.calculate_checksum() == fh.checksum(): checksum_string = "pass" print "Check sum : %s" % (checksum_string) print "" if fh.is_dirty(): chunk_count = sum([1 for c in fh.chunks() if c.verify()]) last_chunk = None for chunk in fh.chunks(): if not chunk.verify(): continue last_chunk = chunk next_record_num = last_chunk.log_last_record_number() + 1 print "Suspected updated header values (header is dirty):" print "Current chunk : %d of %d" % (chunk_count, chunk_count) print "Next record# : %d" % (next_record_num) print "" print "Information from chunks:" print " Chunk file (first/last) log (first/last) Header Data" print "- ----- --------------------- --------------------- ------ ------" for (i, chunk) in enumerate(fh.chunks(), 1): note_string = " " if i == fh.current_chunk_number() + 1: note_string = "*" elif i == fh.oldest_chunk() + 1: note_string = ">" if not chunk.check_magic(): if chunk.magic() == "\x00\x00\x00\x00\x00\x00\x00\x00": print "%s %4d [EMPTY]" % (note_string, i) else: print "%s %4d [INVALID]" % (note_string, i) continue header_checksum_string = "fail" if chunk.calculate_header_checksum() == chunk.header_checksum( ): header_checksum_string = "pass" data_checksum_string = "fail" if chunk.calculate_data_checksum() == chunk.data_checksum(): data_checksum_string = "pass" print "%s %4d %8d %8d %8d %8d %s %s" % \ (note_string, i, chunk.file_first_record_number(), chunk.file_last_record_number(), chunk.log_first_record_number(), chunk.log_last_record_number(), header_checksum_string, data_checksum_string)
def main(): with open(sys.argv[1], 'r') as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: fh = FileHeader(buf, 0x0) print "Information from file header:" print "Format version : %d.%d" % (fh.major_version(), fh.minor_version()) print "Flags : 0x%08x" % (fh.flags()) dirty_string = "clean" if fh.is_dirty(): dirty_string = "dirty" print "File is : %s" % (dirty_string) full_string = "no" if fh.is_full(): full_string = "yes" print "Log is full : %s" % (full_string) print "Current chunk : %d of %d" % (fh.current_chunk_number(), fh.chunk_count()) print "Oldest chunk : %d" % (fh.oldest_chunk() + 1) print "Next record# : %d" % (fh.next_record_number()) checksum_string = "fail" if fh.calculate_checksum() == fh.checksum(): checksum_string = "pass" print "Check sum : %s" % (checksum_string) print "" if fh.is_dirty(): chunk_count = sum([1 for c in fh.chunks() if c.verify()]) last_chunk = None for chunk in fh.chunks(): if not chunk.verify(): continue last_chunk = chunk next_record_num = last_chunk.log_last_record_number() + 1 print "Suspected updated header values (header is dirty):" print "Current chunk : %d of %d" % (chunk_count, chunk_count) print "Next record# : %d" % (next_record_num) print "" print "Information from chunks:" print " Chunk file (first/last) log (first/last) Header Data" print "- ----- --------------------- --------------------- ------ ------" for (i, chunk) in enumerate(fh.chunks(), 1): note_string = " " if i == fh.current_chunk_number() + 1: note_string = "*" elif i == fh.oldest_chunk() + 1: note_string = ">" if not chunk.check_magic(): if chunk.magic() == "\x00\x00\x00\x00\x00\x00\x00\x00": print "%s %4d [EMPTY]" % (note_string, i) else: print "%s %4d [INVALID]" % (note_string, i) continue header_checksum_string = "fail" if chunk.calculate_header_checksum() == chunk.header_checksum(): header_checksum_string = "pass" data_checksum_string = "fail" if chunk.calculate_data_checksum() == chunk.data_checksum(): data_checksum_string = "pass" print "%s %4d %8d %8d %8d %8d %s %s" % \ (note_string, i, chunk.file_first_record_number(), chunk.file_last_record_number(), chunk.log_first_record_number(), chunk.log_last_record_number(), header_checksum_string, data_checksum_string)
def main(): args = parse_cli_arguments() print print("Reading records from {}...".format(args.input_path.name)) # try to create a temporary copy of the evtx file corrected_file = temp_evtx_copy(args.input_path) if not corrected_file: print("Could not create temporary file!") exit(1) else: offsets_restored_records = [] # Loop through corrected file until no changes are needed anymore file_ready = False while not file_ready: # No changes made (since last loop) corrected = False # loop over the records inside the evtx file for record in read_evtx_records(corrected_file): # Retrieve the complete contents of this record. record_data = record.data() # Deleted records are not actually deleted, but are still # present within the content of the preceeding record. # This is easily spotted by checking for the magic value # of a record within the record data. # We start searching at the end of the header start_pos = 24 while not corrected: magic_pos = record_data.find('\x2a\x2a\x00\x00', start_pos, record.size() - 28) if magic_pos == -1: # No magic found break else: # Magic found; this could be a record. # If this is in fact a deleted record, a copy the # record size of the preceeding record is present # just before the magic of the deleted record. We would # find the size of the deleted record just after # the magic. The position where we found the magic is # also the old size of the record. cur_size = record.size() (old_size,) = unpack('I', record_data[(magic_pos - 4) :magic_pos]) (del_size,) = unpack('I', record_data[(magic_pos + 4) :(magic_pos + 8)]) if magic_pos == old_size and \ (del_size + old_size) == record.size(): print( 'Found a deleted record within record number {}' ' at offset 0x{:04X}').format( record.record_num(), magic_pos) # Flag corrected = True # Restore original size corrected_file.seek(record.offset() + 4) corrected_file.write(pack('I', old_size)) # Restore deleted size corrected_file.seek(record.offset() + cur_size - 4) corrected_file.write(pack('I', del_size)) # Remember that we restored a record on this offset offsets_restored_records.append( old_size + record.offset()) # Find next start_pos = magic_pos + 1 if not corrected: file_ready = True # Else: if a correction was made, we need to # go back and start over, # as more records may have been deleted. # Dump corrected record if args.output_path: corrected_file.seek(0) with closing(mmap(corrected_file.fileno(), 0, \ access=ACCESS_READ)) as buf: args.output_path.write(buf) # Print all records for which we found a deleted record if offsets_restored_records: corrected_file.seek(0) with closing(mmap(corrected_file.fileno(), 0, \ access=ACCESS_READ)) as buf: # We simply open the corrected file and enumerate the records # until we found a record which starts on the offset that we # have remembered. fh = FileHeader(buf, 0x0) for chunk in fh.chunks(): for record in chunk.records(): if record.offset() in offsets_restored_records: xml = evtx_record_xml_view(record) # Export record in XML if args.export_path: args.export_path.write(xml) print