def iterfiles(parser, paths): parser_name = '%s.%s' % (parser.__module__, parser.__name__) for i, filename in enumerate(paths): source = ptypes.file(filename, mode='r') p = parser(source=source) log(': %d : %s : %s : parsing...', i + 1, parser_name, filename) t1 = time.time() try: p = p.l except Exception: t2 = time.time() exception = traceback.format_exc() log(': %d : %s : %s : failure while parsing : %f : %s', i + 1, parser_name, filename, t2 - t1, exception) yield filename, p continue t2 = time.time() if p.initializedQ(): log(': %d : %s : %s : completed : %f', i + 1, parser_name, filename, t2 - t1) else: log(': %d : %s : %s : completed partially: %f', i + 1, parser_name, filename, t2 - t1) yield filename, p return
def iterfiles(parser, paths): parser_name = '%s.%s'% (parser.__module__, parser.__name__) for i,filename in enumerate(paths): source = ptypes.file(filename,mode='r') p = parser(source=source) log(': %d : %s : %s : parsing...',i+1,parser_name,filename) t1 = time.time() try: p=p.l except: t2 = time.time() exception = traceback.format_exc() log(': %d : %s : %s : failure while parsing : %f : %s',i+1,parser_name,filename,t2-t1,exception) yield filename,p continue t2 = time.time() if p.initialized: log(': %d : %s : %s : completed : %f',i+1,parser_name,filename,t2-t1) else: log(': %d : %s : %s : completed partially: %f',i+1,parser_name,filename,t2-t1) yield filename,p return
memblocksize = memberheader.alloc().size() for index,o in enumerate(offsets): o = int(o)+memblocksize p.setoffset(o) if p.load().serialize() == '\x00\x00\xff\xff': continue # yield self.new(Object.File, __name__='Member[%d]'% index, offset=o) yield self.new(Object.File, offset=o) return if __name__ == '__main__': import Archive from ptypes import * source = ptypes.file('~/python26/libs/python26.lib') print 'Reading .lib header' # Archive.File = ptypes.debugrecurse(Archive.File) self = Archive.File() # self.source = provider.file('../../obj/test.lib') self.source = ptypes.file('~/python26/libs/python26.lib') self.load() # print self['SymbolNames']['Header'] # print self['SymbolNames']['Member'] # print self['MemberNames']['Header'] # print self['MemberNames']['Member'] # print self['LongNames']['Header'] # print self['LongNames']['Member'] # print '-'*79
(UINT32, 'packet_count_for_this_packet'), ] ### class File(parray.terminated): _object_ = RealMedia_Header def isTerminator(self, value): l = len(self.value) if l > 0: return l > self.value[0]['object']['num_headers'].int() + 1 return False if __name__ == '__main__': import sys import ptypes,rmff ptypes.setsource( ptypes.file(sys.argv[1], mode='rb') ) self = rmff.File() z = self.l print len(self.value) # offset = 0x16f # print self.at(offset) # typespecific = self[3]['object']['type_specific_data'] mdpr = [x for x in self.traverse(filter=lambda x: type(x) == rmff.RealMedia_Header) if x['object_id'].serialize() == 'MDPR'] for x in mdpr: print x.__name__, x['object']['mime_type']
def summary(self): l = len(self) if l == 1: return '..1 packet..' return '..%d packets..'% l def within(self, start, end): for n in self: d = n['header'].now() if start >= d > end: yield n continue return class File(pstruct.type): _fields_ = [ (pcap_hdr_t, 'header'), (List, 'packets'), ] if __name__ == '__main__': import ptypes,libpcap,osi s = ptypes.file('~/work/nezzwerk/pcap/win-2008.updates.restart.pcap') a = libpcap.File(source=s) b = a.l c = b['packets'] packet = osi.default z = [x['data'].cast(packet) for x in c]
if __name__ == '__main__': try: filename = sys.argv[1] except ValueError: help() if not os.path.exists(filename): raise OSError( "The specified file ({:s}) does not exist.".format(filename)) print('loading', filename) clock.start() myfile = vector.swf.File(source=ptypes.file(filename)) myfile = myfile.l clock.stop() header = myfile['header'] data = myfile['data'].d.l frameinfo = data['frameinfo'] tags = data['tags'] z = myfile print('loaded 0x%x tags' % (len(tags))) # for tag in myfile['data']['tags']: # print(repr(tag))
return dyn.clone(pointer, recurse=dict(byteorder=bo)) def __data(self): res = self['header'].li.size() + self['pointer'].li.size() if isinstance(self.source, ptypes.prov.bounded): return dyn.block(self.source.size() - res) return ptype.undefined _fields_ = [ (Header, 'header'), (__pointer, 'pointer'), (__data, 'data'), ] if __name__ == '__main__': import ptypes, image.tiff as tiff ptypes.setsource(ptypes.file('sample.tif')) a = tiff.File() a = a.l for n in a['pointer'].d.l.iterate(): print(n.l) if not isinstance(n['value'], ptypes.ptype.undefined): print(n['value']) continue assert not isinstance(n['pointer'], ptypes.ptype.undefined) for v in n['pointer'].d.l: print(v) continue
_object_=extension_payload, attrs={'cnt': count}) class __count(pbinary.struct): _fields_ = [ (4, 'cnt'), (lambda s: [0, 8][s['cnt'] == 15], 'esc_count'), ] _fields_ = [ (__count, 'count'), (extension_payload, 'payload'), ] @Element.define class end_element(pbinary.array): type = 7 _object_ = length = 0 class File(adts_sequence): pass if __name__ == '__main__': import ptypes ptypes.setsource(ptypes.file('poc.aac')) z = File() z = z.l
(BYTE, 'skip'), (BYTE, 'count'), (lambda s: dyn.block((int(s['count'].li)&0x80) and 1 or int(s['count'].li)&0x7f ), 'data') #XXX ] class Line(pstruct.type): _fields_ = [ (BYTE, 'numpackets'), (lambda s: dyn.array(LinePacket, int(s['numpackets'].li)), 'packets') ] class DELTA_FLI(Chunk): type = 12 _fields_ = [ (WORD, 'skip'), (WORD, 'numlines'), (lambda s: dyn.array(Line, int(s['numlines'].li)), 'lines') ] chunkLookup = dict([(cls.type, cls) for cls in globals().values() if type(cls) is type and cls is not Chunk and issubclass(cls, Chunk)]) if __name__ == '__main__': import ptypes,flic reload(flic) ptypes.setsource( ptypes.file('./test.fli') ) # ptypes.setsource( ptypes.file('./janmar90.flc') ) z = ptypes.debugrecurse(flic.File)() z = z.l print z
def __data(self): res = self['header'].li.size() + self['pointer'].li.size() if isinstance(self.source, ptypes.prov.filebase): return dyn.block(self.source.size() - res) return ptype.undefined _fields_ = [ (Header, 'header'), (__pointer, 'pointer'), (__data, 'data'), ] if __name__ == '__main__': import ptypes,tiff reload(tiff) ptypes.setsource( ptypes.file('sample.tif') ) a = tiff.File() a = a.l for n in a['pointer'].d.l.iterate(): print n.l if not isinstance(n['value'], ptypes.ptype.undefined): print n['value'] continue assert not isinstance(n['pointer'], ptypes.ptype.undefined) for v in n['pointer'].d.l: print v continue
def extension_payload(self): c = self['count'] count = c['cnt'] + c['esc_count'] - 1 # return dyn.clone(pbinary.array, length=count/8, _object_=extension_payload, cnt=count) return dyn.clone(pbinary.array, length=count/8, _object_=extension_payload, attrs={'cnt':count}) class __count(pbinary.struct): _fields_ = [ (4, 'cnt'), (lambda s: [0,8][s['cnt'] == 15], 'esc_count'), ] _fields_ = [ (__count, 'count'), (extension_payload, 'payload'), ] @Element.define class end_element(pbinary.array): type = 7 _object_ = length = 0 class File(adts_sequence): pass if __name__ == '__main__': import ptypes ptypes.setsource( ptypes.file('poc.aac') ) z = File() z = z.l
@Chunk.define class IHDR(pstruct.type): type = 'IHDR' _fields_ = [ (pint.uint32_t, 'Width'), (pint.uint32_t, 'Height'), (pint.uint8_t, 'Bit depth'), (pint.uint8_t, 'Colour type'), (pint.uint8_t, 'Compression method'), (pint.uint8_t, 'Filter method'), (pint.uint8_t, 'Interlace method'), ] @Chunk.define class PLTE(parray.block): type = 'PLTE' class entry(pstruct.type): _fields_ = [(pint.uint8_t,x) for x in 'rgb'] _object_ = entry @Chunk.define class IEND(ptype.type): type = 'IEND' if __name__ == '__main__': import ptypes,png ptypes.setsource(ptypes.file('Chimera_Laboratory.png')) a = png.File() a = a.l
(dyn.pointer(__table), 'offset'), (uint32, 'length'), ] _fields_ = [ (Fixed, 'version'), (uint16, 'numTables'), (uint16, 'searchRange'), (uint16, 'entrySelector'), (uint16, 'rangeShift'), (lambda s: dyn.array(s.Entry, s['numTables'].li.int()), 'tables'), ] if __name__ == '__main__': import ttf,ptypes reload(ttf) ptypes.setsource( ptypes.file('./cour.ttf', 'rb') ) #t = dyn.block(ptypes.ptype.type.source.size()) #a = t() #a = a.l b = ttf.File() b = b.l print '\n'.join(map(repr,((i,x['tag'].summary()) for i,x in enumerate(b['tables'])))) if 'tables' and False: print b['tables'][0]['offset'].d.l.hexdump() print b['tables'][1]['offset'].d.l.hexdump() print b['tables'][8]['offset'].d.l.hexdump() print b['tables'][9]['offset'].d.l.hexdump() print b['tables'][10]['offset'].d.l
(pint.uint8_t, 'Colour type'), (pint.uint8_t, 'Compression method'), (pint.uint8_t, 'Filter method'), (pint.uint8_t, 'Interlace method'), ] @Chunk.define class PLTE(parray.block): type = 'PLTE' class Entry(pstruct.type): _fields_ = [(pint.uint8_t, x) for x in 'rgb'] _object_ = Entry @Chunk.define class IEND(ptype.type): type = 'IEND' ChunkType._values_[:] = [(t.__name__, intofdata(key)) for key, t in Chunk.cache.iteritems()] if __name__ == '__main__': import ptypes, png ptypes.setsource(ptypes.file('Chimera_Laboratory.png')) a = png.File() a = a.l
pdu = property(fget=lambda s: s['data']['data']) ### entry point class Stream(parray.infinite): _object_ = TPKT File = Stream if __name__ == '__main__': import ptypes, analyze reload(analyze) ptypes.setbyteorder(ptypes.config.byteorder.littleendian) # ptypes.setsource(ptypes.file('./termdd_1.dat')) ptypes.setsource(ptypes.file('./blah.dat')) from analyze import * z = analyze.Stream() z = z.l # for x in z: # print x if False: a = TPKT() a = a.l print a['data'] b = TPDU(offset=a.getoffset() + a.size()) b = b.l
d = n['header'].now() if start >= d > end: yield n continue return class File(pstruct.type): def __packets(self): self.attributes.update(self['header'].attributes) return List def blocksize(self): if isinstance(self.source, ptypes.provider.filebase): return self.source.size() return sys.maxint _fields_ = [ (pcap_hdr_t, 'header'), (__packets, 'packets'), ] if __name__ == '__main__': import ptypes,libpcap,osi s = ptypes.file('~/work/nezzwerk/pcap/win-2008.updates.restart.pcap') a = libpcap.File(source=s) b = a.l c = b['packets'] packet = osi.default z = [x['data'].cast(packet) for x in c]
table = iter(quantizationTable) for y in range(8): for x in range(8): res.append( table.next() * scalefactor[y] * scalefactor[x] ) scaledQuantizationTable = res ### decode_huffman -> ### decode AC coefficient ### decode DC coefficient ## process dht table self = lookup['DHT']['table'][3] print repr(self) ### process scan data self = lookup['SOS'] print repr(self) print self['component'][0] self = lookup['SOF'] self = lookup['SOS'] if __name__ == '__main__': import sys import ptypes,jpeg ptypes.setsource( ptypes.file(sys.argv[1]) ) z = jpeg.File() z = z.l
class File(pstruct.type): _fields_ = [ (CFHEADER, 'header'), (lambda s: dyn.array(CFFOLDER, s['header'].li['cFolders'].int()), 'folders'), # (lambda s: dyn.array(CFFILE, s['header']['cFiles'].int()), 'files'), # (lambda s: dyn.block(s['header']['cbCabinet'].int() - s['header'].size()-s['folders'].size()-s['files'].size()), 'data'), # (dyn.block(s['header']['cbCabinet'].int() - s['header'].size()-s['folders'].size()-s['files'].size()), 'data'), ] if __name__ == '__main__': import sys, ptypes, archive.cab as cab ptypes.setsource(ptypes.file('~/shit/test/Windows6.1-KB2705219-x86.cab')) a = cab.File() a = a.l print(a['header']['cbCabinet'].int()) print(a['header']['cbCabinet']) print(a['folders'][0]['typeCompress'].summary()) print(a['folders'][0]['coffCabStart'].d.l) b = a['header']['coffFiles'].d.l for x in b: print(x['uoffFolderStart']) print(b[1]) print(b[1]['uoffFolderStart'].d.l.hexdump())
class File(Element): byteorder = ptypes.config.byteorder.bigendian attributes = {'byteorder': byteorder} # add an alias for exported objects protocol = Protocol packet = Packet if __name__ == '__main__': import ptypes, ber import ptypes.bitmap as bitmap reload(ber) ptypes.setsource(ptypes.file('./test.3', 'rb')) a = ber.Element a = a() a = a.l def test_tag(): res = bitmap.new(0x1e, 5) res = bitmap.zero res = bitmap.push(res, (0x1f, 5)) res = bitmap.push(res, (0x1, 1)) res = bitmap.push(res, (0x10, 7)) res = bitmap.push(res, (0x1, 0)) res = bitmap.push(res, (0x0, 7)) x = pbinary.new(ber.Tag, source=ptypes.prov.string(bitmap.data(res)))
n, start = cls.times.pop() t = stop - start print message % (n, t) return t if __name__ == '__main__': try: filename = sys.argv[1] except ValueError: help() print 'loading', filename clock.start() myfile = swf.File(source=ptypes.file(filename)) myfile = myfile.l clock.stop() header = myfile['header'] data = myfile['data'].d.l frameinfo = data['frameinfo'] tags = data['tags'] z = myfile print 'loaded 0x%x tags' % (len(tags)) # for tag in myfile['data']['tags']: # print repr(tag)
### class packet(pbinary.struct): _fields_ = [ (32, 'code'), (lambda s: layer.lookup(s['code']), 'data'), ] class stream(pbinary.terminatedarray): _object_ = packet def isTerminator(self, value): return type(value) == end_code class stream(pbinary.array): _object_ = packet length = 20 if __name__ == '__main__': import ptypes, mpeg # ptypes.setsource( ptypes.file('./poc-mpeg.stream') ) ptypes.setsource(ptypes.file('./poc.mov')) reload(mpeg) a = mpeg.stream(offset=0x3ba, length=20) print a.l
_fields_ = [ (Fixed, "version"), (uint16, "numTables"), (uint16, "searchRange"), (uint16, "entrySelector"), (uint16, "rangeShift"), (lambda s: dyn.array(s.Entry, s["numTables"].li.num()), "tables"), ] if __name__ == "__main__": import ttf, ptypes reload(ttf) ptypes.setsource(ptypes.file("./cour.ttf", "rb")) # t = dyn.block(ptypes.ptype.type.source.size()) # a = t() # a = a.l b = ttf.File() b = b.l print "\n".join(map(repr, ((i, x["tag"].summary()) for i, x in enumerate(b["tables"])))) if "tables" and False: print b["tables"][0]["offset"].d.l.hexdump() print b["tables"][1]["offset"].d.l.hexdump() print b["tables"][8]["offset"].d.l.hexdump() print b["tables"][9]["offset"].d.l.hexdump() print b["tables"][10]["offset"].d.l
(lambda s: dyn.array(s.Entry, int(s['count'].li)), 'entry'), (dyn.pointer(Directory,type=pint.uint32_t), 'next') ] class Header(pstruct.type): def __directory(self): signature = self['signature'].li.serialize() if signature == '\x4d\x4d\x00\x2a': # bigendian return dyn.pointer(Directory) if signature == '\x49\x49\x2a\x00': # little-endian pass # XXX: I haven't encountered this yet raise NotImplementedError(signature) _fields_ = [ # (pint.uint16_t, 'byteorder'), # (pint.uint16_t, 'id'), (pint.uint32_t, 'signature'), # ('\x49\x49\x2a\x00', '\x4d\x4d\x00\x2a') (dyn.pointer(Directory,type=pint.uint32_t), 'directory'), ] class File(Header): pass if __name__ == '__main__': import ptypes,tiff ptypes.setsource( ptypes.file('./0.tif') ) a = tiff.File() a = a.l
# if it's compressed then use the 'cdata' structure if int( self['header'].li['Signature'][0]) == ord('C'): #length = self.source.size() - self['header'].size() length = min(self['header']['FileLength'].num(),self.source.size()) - self['header'].size() return dyn.clone(self.cdata, _value_=dyn.block(length)) return Data _fields_ = [ (Header, 'header'), (__data, 'data') ] if __name__ == '__main__': import sys import ptypes,__init__ as swf ptypes.setsource(ptypes.file('./test.swf')) z = File # z = ptypes.debugrecurse(z) z = z() z = z.l for x in z['data']['tags']: print '-'*32 print x a = z['data']['tags'][0] print a.hexdump() print a.li.hexdump() print repr(a.l['Header'].serialize()) correct='\x44\x11\x08\x00\x00\x00'
(uint32, 'length'), ] _fields_ = [ (Fixed, 'version'), (uint16, 'numTables'), (uint16, 'searchRange'), (uint16, 'entrySelector'), (uint16, 'rangeShift'), (lambda s: dyn.array(s.Entry, s['numTables'].li.int()), 'tables'), ] if __name__ == '__main__': import ptypes, vector.ttf as ttf ptypes.setsource(ptypes.file('./cour.ttf', 'rb')) #t = dyn.block(ptypes.ptype.type.source.size()) #a = t() #a = a.l b = ttf.File() b = b.l print('\n'.join( map(repr, ((i, x['tag'].summary()) for i, x in enumerate(b['tables']))))) if 'tables' and False: print(b['tables'][0]['offset'].d.l.hexdump()) print(b['tables'][1]['offset'].d.l.hexdump()) print(b['tables'][8]['offset'].d.l.hexdump())
stop = time.time() n,start = cls.times.pop() t = stop - start print message% (n,t) return t if __name__ == '__main__': try: filename = sys.argv[1] except ValueError: help() print 'loading',filename clock.start() myfile = swf.File(source=ptypes.file(filename)) myfile = myfile.l clock.stop() header = myfile['header'] data = myfile['data'].d.l frameinfo = data['frameinfo'] tags = data['tags'] z = myfile print 'loaded 0x%x tags'%( len(tags) ) # for tag in myfile['data']['tags']: # print repr(tag)
sig = header['Signature'].str() # if it's compressed then use the 'zlib' structure t = EncodedDataType.withdefault(sig, type=sig) length = min(header['FileLength'].int(), self.source.size()) - header.size() return dyn.clone(t, _value_=dyn.clone(t._value_, length=length)) _fields_ = [ (Header, 'header'), (__data, 'data') ] if __name__ == '__main__': import sys import ptypes,__init__ as swf ptypes.setsource(ptypes.file('./test.swf', mode='r')) z = File # z = ptypes.debugrecurse(z) z = z() z = z.l for x in z['data']['tags']: print '-'*32 print x a = z['data']['tags'][0] print a.hexdump() print a.li.hexdump() print repr(a.l['Header'].serialize()) correct='\x44\x11\x08\x00\x00\x00'
header = self['Header'].li sig = header['Signature'].str() # if it's compressed then use the 'zlib' structure t = EncodedDataType.withdefault(sig, type=sig) length = min(header['FileLength'].int(), self.source.size()) - header.size() return dyn.clone(t, _value_=dyn.clone(t._value_, length=length)) _fields_ = [(Header, 'header'), (__data, 'data')] if __name__ == '__main__': import sys import ptypes, __init__ as swf ptypes.setsource(ptypes.file('./test.swf', mode='r')) z = File # z = ptypes.debugrecurse(z) z = z() z = z.l for x in z['data']['tags']: print '-' * 32 print x a = z['data']['tags'][0] print a.hexdump() print a.li.hexdump() print repr(a.l['Header'].serialize()) correct = '\x44\x11\x08\x00\x00\x00'
(1, 'marker_bit'), (5, 'video_bound'), (8, 'reserved_byte'), (__streamarray, 'streamarray'), ] ### class packet(pbinary.struct): _fields_ = [ (32, 'code'), (lambda s: layer.lookup(s['code']), 'data'), ] class stream(pbinary.terminatedarray): _object_ = packet def isTerminator(self, value): return type(value) == end_code class stream(pbinary.array): _object_ = packet length = 20 if __name__ == '__main__': import ptypes,mpeg # ptypes.setsource( ptypes.file('./poc-mpeg.stream') ) ptypes.setsource( ptypes.file('./poc.mov') ) reload(mpeg) a = mpeg.stream(offset=0x3ba, length=20) print a.l
return self['data']['length'].int() pdu = property(fget=lambda s: s['data']['data']) ### entry point class Stream(parray.infinite): _object_ = TPKT File=Stream if __name__ == '__main__': import ptypes,analyze reload(analyze) ptypes.setbyteorder(ptypes.config.byteorder.littleendian) # ptypes.setsource(ptypes.file('./termdd_1.dat')) ptypes.setsource(ptypes.file('./blah.dat')) from analyze import * z = analyze.Stream() z = z.l # for x in z: # print x if False: a = TPKT() a = a.l print a['data'] b = TPDU(offset=a.getoffset()+a.size()) b = b.l
(BYTE, 'skip'), (BYTE, 'count'), (lambda s: dyn.block( (s['count'].li.int() & 0x80) and 1 or s['count'].li.int() & 0x7f), 'data') #XXX ] class Line(pstruct.type): _fields_ = [(BYTE, 'numpackets'), (lambda s: dyn.array(LinePacket, s['numpackets'].li.int()), 'packets')] @ChunkType.define class DELTA_FLI(pstruct.type): type = 12 _fields_ = [(WORD, 'skip'), (WORD, 'numlines'), (lambda s: dyn.array(Line, s['numlines'].li.int()), 'lines')] if __name__ == '__main__': import ptypes, flic reload(flic) ptypes.setsource(ptypes.file('./test.fli')) # ptypes.setsource( ptypes.file('./janmar90.flc') ) z = ptypes.debugrecurse(flic.File)() z = z.l print z
if __name__ == '__main__': import ptypes,pecoff from ptypes import * ptypes.setsource(ptypes.file('./chewbacca.exe.infected')) a = pecoff.Executable.File() a=a.l b = a['next']['header'] #print b['header'] c = b['FileHeader']['pointertosymboltable'].d c = c.l print c['Symbols'][1] print c['Symbols'][1].details() print c['Symbols'] print c.names() print c.walk().next() print c.getSymbol('_main') print c.getAuxiliary('_main') print c.fetch('_main')
(lambda s: pstr.szstring if s['flags'].li['NEXT_CABINET'] else pstr.string, 'szDiskNext'), ] class File(pstruct.type): _fields_ = [ (CFHEADER, 'header'), (lambda s: dyn.array(CFFOLDER, s['header'].li['cFolders'].int()), 'folders'), # (lambda s: dyn.array(CFFILE, s['header']['cFiles'].int()), 'files'), # (lambda s: dyn.block(s['header']['cbCabinet'].int() - s['header'].size()-s['folders'].size()-s['files'].size()), 'data'), # (dyn.block(s['header']['cbCabinet'].int() - s['header'].size()-s['folders'].size()-s['files'].size()), 'data'), ] if __name__ == '__main__': import sys,ptypes,cab reload(cab) ptypes.setsource(ptypes.file('~/shit/test/Windows6.1-KB2705219-x86.cab')) a = cab.File() a = a.l print a['header']['cbCabinet'].int() print a['header']['cbCabinet'] print a['folders'][0]['typeCompress'].summary() print a['folders'][0]['coffCabStart'].d.l b = a['header']['coffFiles'].d.l for x in b: print x['uoffFolderStart'] print b[1] print b[1]['uoffFolderStart'].d.l.hexdump()