def read_ftr(filename, delete_keys=None): offset_size = 1859 ftr_record_struct = Struct("record", LFloat32("Datetime"), LFloat32("PosX"), LFloat32("PosY"), LFloat32("Altitude"), LFloat32("Qx"), LFloat32("Qy"), LFloat32("Qz"), LFloat32("Qw"), LFloat32("DistUnused"), ) ftr_struct = Struct("ftr_header", String("filetype", 4), #Bytes("unknown00", 136), #String("FirstName", 17), Bytes("unknown00", 135), String("FirstName", 17), String("FamilyName", 17), String("Country", 17), String("RN", 8), String("CN", 4), Bytes("unknown02", 5), String("Landscape", 17), Bytes("unknown03", offset_size - 4 - 135 - 17 - 17 - 17 - 17 - 17), ULInt32("length"), # uint32 (4 bytes) @ 1859 Array(lambda ctx: ctx.length, ftr_record_struct), ) with open(filename, "rb") as fd: dat = ftr_struct.parse_stream(fd) df_ftr = pd.DataFrame(dat['record']) df_ftr['Time'] = df_ftr['Datetime'] * 3600.0 df_ftr['Time'] = df_ftr['Time'] - df_ftr['Time'].irow(0) df_ftr['Datetime'] = pd.to_datetime(df_ftr['Time'], unit='s') df_ftr['Deltatime'] = df_ftr['Datetime'] - df_ftr['Datetime'].shift(1) df_ftr['Deltatime'] = df_ftr['Deltatime'] / np.timedelta64(1, 's') # Deltatime as seconds #df_ftr['Vz'] = ((df_ftr['Altitude'] - df_ftr['Altitude'].shift(1)).fillna(0) / df_ftr['Deltatime']).fillna(0) #df_ftr = df_ftr.set_index('Datetime', verify_integrity=True) df_ftr = df_ftr.set_index('Time', verify_integrity=True) # Time (s) dat['record'] = df_ftr if delete_keys is not None: for key in delete_keys: if key in dat.keys(): del dat[key] for key in ['FirstName', 'FamilyName', 'Country', 'Landscape', 'RN', 'CN']: length = ord(dat[key][0]) s = dat[key][1:length+1] dat[key] = s.replace('\x00', '') assert len(dat[key])==length, "Length error with %s len=%d should be %d" % (s, len(s), length) return(dat)
def aranges(self): if not '.debug_aranges' in self.sections: return section = self.sections['.debug_aranges'] image = io.BytesIO(section.image) length = len(section.image) print("ARANGES") Header = Struct( "start" / Tell, "unit_length" / self.u32, "version" / self.u16, "debug_info_offset" / self.u32, "address_size" / self.u8, "segment_size" / self.u8, "stop" / Tell, ) Entry = Struct( "start" / Tell, "length" / self.u32, "address" / self.u32, "stop" / Tell, ) offset = 0 finished = False while True: if finished: break header = Header.parse_stream(image) offset += header.stop - header.start print(offset, header) if offset >= length - 1: break while True: entry = Entry.parse_stream(image) offset += entry.stop - entry.start print("ENTRY", entry) if entry.address == 0 and entry.length == 0: break if entry.stop >= header.unit_length: finished = True break
def pubnames(self): if not '.debug_pubnames' in self.sections: return section = self.sections['.debug_pubnames'] image = io.BytesIO(section.image) length = len(section.image) Header = Struct( "start" / Tell, "unit_length" / self.u32, "version" / self.u16, "debug_info_offset" / self.u32, "debug_info_length" / self.u32, "stop" / Tell, ) Entry = Struct( "start" / Tell, "offset" / self.u32, "name" / CString(encoding="ascii"), "stop" / Tell, ) offset = 0 finished = False while True: if finished: break header = Header.parse_stream(image) offset += header.stop - header.start print(offset, header) if offset >= length - 1: break while True: entry = Entry.parse_stream(image) offset += entry.stop - entry.start print("ENTRY", entry) if entry.offset == 0: break if entry.stop >= header.unit_length: finished = True break
def __init__(self, stream: typ.BinaryIO, predataregion: Struct): """ :param stream: filedescriptor of a FAT filesystem :param predataregion: Struct that represents the PreDataRegion of the concrete FAT filesystem """ self.stream = stream self.offset = stream.tell() self.predataregion_definition = predataregion self.pre = predataregion.parse_stream(stream) self.start_dataregion = stream.tell() self._fat_entry = None self.entries_per_fat = None self.fat_type = None
def do_mac_info(self): if not '.debug_macinfo' in self.sections: return section = self.sections['.debug_macinfo'] image = io.BytesIO(section.image) length = len(section.image) MacInfo = Struct( "start" / Tell, "macType" / Enum( ULEB, default=ident, DW_MACINFO_define=0x01, DW_MACINFO_undef=0x02, DW_MACINFO_start_file=0x03, DW_MACINFO_end_file=0x04, DW_MACINFO_vendor_ext=0xff, ), "parameters" / Switch( this.macType, { "DW_MACINFO_define": Struct("lineNumber" / ULEB, "value" / CString(encoding="ascii")), "DW_MACINFO_undef": Struct("lineNumber" / ULEB, "value" / CString(encoding="ascii")), "DW_MACINFO_start_file": Struct("lineNumber" / ULEB, "fileNumber" / ULEB), "DW_MACINFO_end_file": Pass, "DW_MACINFO_vendor_ext": Struct("constant" / ULEB, "value" / CString(encoding="ascii")), }, default=Pass, ), "stop" / Tell, ) offset = 0 while True: macInfo = MacInfo.parse_stream(image) offset += macInfo.stop - macInfo.start print(offset, macInfo) if offset >= length - 1: break
def process_attributes(self, image, readers, size, abbrevOffset): Attribute = Struct( "start" / Tell, "attr" / ULEB, "stop" / Tell, "size" / Computed(this.stop - this.start), ) level = 0 offset = 0 lastAttr = False while True: start = image.tell() attr = Attribute.parse_stream(image) offset += attr.size abbr = self.abbreviations.get((abbrevOffset, attr.attr)) if attr.attr == 0 or not abbr.tag: print("<{}><{:02x}>: {}".format(level, start, "Abbrev Number: 0")) level -= 1 if lastAttr: break else: print("<{}><{:02x}>: Abbrev Number: {} ({})".format( level, start, attr.attr, abbr.tag.name)) for enc, form in abbr.attrs: reader = readers.get(form) start = image.tell() if form != constants.DW_FORM_flag_present: value = reader.parse_stream(image) else: value = 1 startValue = "<{:x}>".format(start) print(" {:7} {:20}: {}".format(startValue, enc.name, value)) stop = image.tell() offset += (stop - start) if offset >= size - 1: lastAttr = True # 872b # offset += (attr.stop - attr.start) pos = image.tell() if hasattr(abbr, "children") and abbr.children: level += 1
def process_compile_unit(self, image): CompileUnit = Struct( "start" / Tell, "unit_length" / self.u32, "version" / self.u16, "debug_abbrev_offset" / self.u32, "address_size" / self.u8, "stop" / Tell, "size" / Computed(this.stop - this.start), ) startPos = image.tell() cu = CompileUnit.parse_stream(image) print(" Compilation Unit @ offset 0x{:x}:".format(cu.start)) print(" Length: 0x{:x} (32-bit)".format(cu.unit_length)) print(" Version: 2".format(cu.version)) print(" Abbrev Offset: 0x{:x}".format(cu.debug_abbrev_offset)) print(" Pointer Size: {}".format(cu.address_size)) stopPos = image.tell() return cu
def dispatch(stream, protodef): opcode = ULInt16("opcode").parse_stream(stream) if opcode in protodef: func, macro = protodef[opcode] data = macro.parse_stream(stream) func(data) else: data = '' pktlen = packet_lengths.get(opcode, -1) if pktlen > 0: data = stream.read(pktlen - 2) elif pktlen == -1: datadef = Struct( "data", ULInt16("length"), MetaField("ignore", lambda ctx: ctx["length"] - 4)) data = datadef.parse_stream(stream) netlog.warning('UNIMPLEMENTED opcode={:04x} data={}'.format( opcode, data))
def dispatch(stream, protodef): opcode = ULInt16("opcode").parse_stream(stream) if opcode in protodef: func, macro = protodef[opcode] data = macro.parse_stream(stream) func(data) else: data = '' pktlen = packet_lengths.get(opcode, -1) if pktlen > 0: data = stream.read(pktlen - 2) elif pktlen == -1: datadef = Struct("data", ULInt16("length"), MetaField("ignore", lambda ctx: ctx["length"] - 4)) data = datadef.parse_stream(stream) netlog.warning('UNIMPLEMENTED opcode={:04x} data={}'.format( opcode, data))
help='Verbose debugging output') parser.add_argument('input', metavar='FILE', type=argparse.FileType('rb'), help='Input file') args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) with args.input as fd: # ident logging.info(hgIdent.parse_stream(fd)) fd.seek(0x1400, os.SEEK_SET) logging.info(urladerlink.parse_stream(fd)) fd.seek(pagesize) a = anchor.parse_stream(fd) # task root (level 1) fd.seek(a.taskRoot.value * pagesize) taskRoot = blockTable.parse_stream(fd) # task dataspaces(?) (level 2) for taskid, taskref in enumerate(taskRoot): if taskref.value == 0xffffff: continue logging.info(
pyplot.ylabel(y_label) pyplot.show() def parse_traj(fh): # Parser requires binary file mode if hasattr(fh, 'mode') and 'b' not in fh.mode: fh = open(fh.name, 'rb') data = SPCFile.parse_stream(fh) h = data.Header.header assert h.nsub == 1, 'parse_traj only supports 1 SPC subfile' for x, y in _convert_arrays(data): return np.transpose((x, y)) if __name__ == '__main__': from optparse import OptionParser op = OptionParser() op.add_option('--print', action='store_true', dest='_print') op.add_option('--plot', action='store_true') opts, args = op.parse_args() if len(args) != 1: op.error('Supply exactly one filename argument.') if not (opts._print or opts.plot): op.error('Must supply either --plot or --print') data = SPCFile.parse_stream(open(args[0], 'rb')) if opts._print: prettyprint(data) if opts.plot: plot(data)
BitStruct(None, Flag("nrcpus"), Flag("arch"), Flag("version"), Flag("osrelease"), Flag("hostname"), Flag("build_id"), Flag("tracing_data"), Flag("reserved"), Flag("branch_stack"), Flag("numa_topology"), Flag("cpu_topology"), Flag("event_desc"), Flag("cmdline"), Flag("total_mem"), Flag("cpuid"), Flag("cpudesc"), Padding(6), Flag("group_desc"), Flag("pmu_mappings"), Padding(256 - 3 * 8))), Pointer(lambda ctx: ctx.data.offset + ctx.data.size, perf_features()), Padding(3 * 8)) def get_events(h): return h.data.perf_data if __name__ == '__main__': import argparse args = argparse.ArgumentParser() args.add_argument('file', help='perf.data to read', default='perf.data', nargs='?') p = args.parse_args() with open(p.file, "rb") as f: h = perf_file.parse_stream(f) print(h) #print(get_events(h))
def read_ftr(filename, delete_keys=None): offset_size = 1859 ftr_record_struct = Struct( "record", LFloat32("Datetime"), LFloat32("PosX"), LFloat32("PosY"), LFloat32("Altitude"), LFloat32("Qx"), LFloat32("Qy"), LFloat32("Qz"), LFloat32("Qw"), LFloat32("DistUnused"), ) ftr_struct = Struct( "ftr_header", String("filetype", 4), #Bytes("unknown00", 136), #String("FirstName", 17), Bytes("unknown00", 135), String("FirstName", 17), String("FamilyName", 17), String("Country", 17), String("RN", 8), String("CN", 4), Bytes("unknown02", 5), String("Landscape", 17), Bytes("unknown03", offset_size - 4 - 135 - 17 - 17 - 17 - 17 - 17), ULInt32("length"), # uint32 (4 bytes) @ 1859 Array(lambda ctx: ctx.length, ftr_record_struct), ) with open(filename, "rb") as fd: dat = ftr_struct.parse_stream(fd) df_ftr = pd.DataFrame(dat['record']) df_ftr['Time'] = df_ftr['Datetime'] * 3600.0 df_ftr['Time'] = df_ftr['Time'] - df_ftr['Time'].irow(0) df_ftr['Datetime'] = pd.to_datetime(df_ftr['Time'], unit='s') df_ftr['Deltatime'] = df_ftr['Datetime'] - df_ftr['Datetime'].shift(1) df_ftr['Deltatime'] = df_ftr['Deltatime'] / np.timedelta64( 1, 's') # Deltatime as seconds #df_ftr['Vz'] = ((df_ftr['Altitude'] - df_ftr['Altitude'].shift(1)).fillna(0) / df_ftr['Deltatime']).fillna(0) #df_ftr = df_ftr.set_index('Datetime', verify_integrity=True) df_ftr = df_ftr.set_index('Time', verify_integrity=True) # Time (s) dat['record'] = df_ftr if delete_keys is not None: for key in delete_keys: if key in dat.keys(): del dat[key] for key in ['FirstName', 'FamilyName', 'Country', 'Landscape', 'RN', 'CN']: length = ord(dat[key][0]) s = dat[key][1:length + 1] dat[key] = s.replace('\x00', '') assert len( dat[key]) == length, "Length error with %s len=%d should be %d" % ( s, len(s), length) return (dat)
def do_dbg_info(self): if not '.debug_info' in self.sections: return section = self.sections['.debug_info'] image = io.BytesIO(section.image) length = len(section.image) DbgInfo = Struct( "start" / Tell, "unit_length" / self.u32, "version" / self.u16, "debug_abbrev_offset" / self.u32, "address_size" / self.u8, "stop" / Tell, "size" / Computed(this.stop - this.start), ) Attribute = Struct( "start" / Tell, "attr" / ULEB, "stop" / Tell, "size" / Computed(this.stop - this.start), ) offset = 0 finished = False while True: pos = image.tell() if pos >= length - 1: break dbgInfo = DbgInfo.parse_stream(image) # CU print(" Compilation Unit @ offset 0x{:x}:".format(dbgInfo.start)) print(" Start: {:08x}".format(dbgInfo.start)) print(" Stop: {:08x}".format(dbgInfo.stop)) print(" Size: {:08x}".format(dbgInfo.size)) print(" Length: 0x{:x} (32-bit)".format( dbgInfo.unit_length)) print(" Version: 2".format(dbgInfo.version)) print(" Abbrev Offset: 0x{:x}".format( dbgInfo.debug_abbrev_offset)) print(" Pointer Size: {}".format(dbgInfo.address_size)) pos = 0 offset += dbgInfo.stop - dbgInfo.start formReaders = self.get_form_readers(dbgInfo.address_size) print("Pos, Length", pos, dbgInfo.unit_length) if pos >= dbgInfo.unit_length: break while True: start = image.tell() attr = Attribute.parse_stream(image) abbr = self.abbreviations.get( (dbgInfo.debug_abbrev_offset, attr.attr)) if not abbr: print("<{:02x}>: {}".format(start, "Abbrev Number: 0")) else: #print("<{:02x}>: Abbrev Number: {} ({})".format(start, enc.name, value)) for enc, form in abbr.attrs: reader = formReaders.get(form) if reader is None: print("*EF", enc, form, start, attr, abbr) start = image.tell() value = reader.parse_stream(image) print(" <{:02x}> {}: {}".format(start, enc, value)) stop = image.tell() offset += (attr.stop - attr.start) pos = image.tell() #if pos >= 0x8727: # print("chk") if pos >= dbgInfo.unit_length: image.seek(image.tell() + 1) break
ULInt32("unk_zero"), ULInt32("off_burnhdr3"), Value("len_code", lambda ctx: ctx.off_burnhdr3 - ctx.off_code), #Padding(472), Pointer(lambda ctx: ctx.off_burnhdr1, burnhdr1), Pointer(lambda ctx: ctx.off_burnhdr2, burnhdr2), Pointer(lambda ctx: ctx.off_code, code), Pointer(lambda ctx: ctx.off_burnhdr3, burnhdr3), ) if __name__ == '__main__': import pprint import argparse parser = argparse.ArgumentParser( description='Parse a SUNP BURN FILE (SPHOST.BRN)') parser.add_argument("filename", type=str, help="input file, e.g. SPHOST.BRN") parser.add_argument('--debug', action='store_true', help='Wrap parser in Construct.Debugger()') args = parser.parse_args() if args.debug: sunp_file = c.Debugger(sunp_file) with open(args.filename, 'rb') as fp: sunp = sunp_file.parse_stream(fp) pprint.pprint(sunp)