def dissect_packet(stream_fd): """ Return a dissected packet in stream fd. Based on the current readptr. """ dbfs = FileSystem.DBFS(stream_fd.case) urn = pyaff4.RDFURN() urn.set(stream_fd.urn.value + ".pkt") fd = dbfs.open(urn=urn) if not fd or \ not oracle.resolve_value(stream_fd.urn, pyaff4.AFF4_TARGET, urn): raise RuntimeError("%s is not a stream" % stream_fd.urn) ## Get the file from cache try: pcap_file = PCAP_FILE_CACHE.get(urn.value) except KeyError: pcap_fd = dbfs.open(urn=urn) pcap_file = pypcap.PyPCAP(pcap_fd) PCAP_FILE_CACHE.add(urn.value, pcap_file) offset = stream_fd.tell() ## What is the current range? (target_offset_at_point, available_to_read) = fd.get_range(offset, None) if available_to_read: ## Go to the packet pcap_file.seek(target_offset_at_point) ## Dissect it try: return pcap_file.dissect() except: pass
def next(self): try: parser = store.get(self.filename) except KeyError: fd = open(self.filename) if config.output: parser = pypcap.PyPCAP(fd, output=config.output) else: parser = pypcap.PyPCAP(fd) try: parser.seek(self.offset) except TypeError: pass store.put(parser, key=self.filename) packet = parser.next() self.offset = parser.offset() self.timestamp = packet.ts_sec + packet.ts_usec / 1.0e6 return packet
def load_file(urn, processor, pcap_dispatch): """ Loads the urn into the processor """ ## Get a unique id for the urn dbfs = FileSystem.DBFS(config.case) fd = dbfs.open(urn=urn) id = len(pcap_dispatch)+1 pcap_dispatch[id] = urn try: input_file = pypcap.PyPCAP(fd, file_id = id) except IOError,e: pyflaglog.log(pyflaglog.INFO, "Error reading %s: %s" % (urn, e)) return
def load_file(filename, processor, pcap_dbh): global offset pyflaglog.log(pyflaglog.INFO, "%s: Processing %s" % (time.ctime(),filename)) pcap_dbh.execute("select max(id) as m from pcap") row = pcap_dbh.fetch() pcap_id = row['m'] or 0 try: input_file = pypcap.PyPCAP(open(filename), output='little') except IOError,e: pyflaglog.log(pyflaglog.INFO, "Error reading %s: %s" % (filename, e)) return
def scan(self, fd, scanners, type, mime, cookie, **args): if "PCAP" not in type: return urn_dispatcher = {1: fd.urn} processor = make_processor(fd.case, scanners, urn_dispatcher, cookie) ## Now process the file try: pcap_file = pypcap.PyPCAP(fd, file_id=1) PCAP_FILE_CACHE.add(fd.urn, pcap_file) except IOError: pyflaglog.log( pyflaglog.WARNING, DB.expand("%s does not appear to be a pcap file", fd.urn)) return while 1: try: packet = pcap_file.dissect() processor.process(packet) except StopIteration: break del processor
def display(self, query, result): dbh = DB.DBO(query['case']) dbh.execute("select * from pcap where id=%r limit 1", query['id']) row = dbh.fetch() io = IO.open(query['case'], row['iosource']) packet = pypcap.PyPCAP(io) packet.seek(row['offset']) dissected_packet = packet.dissect() id = int(query['id']) def get_node(branch): """ Locate the node specified by the branch. branch is a list of attribute names. """ result = dissected_packet for b in branch: try: result = getattr(result, b) except: pass return result def tree_cb(path): branch = FlagFramework.splitpath(path) node = get_node(branch) try: for field in node.list(): if field.startswith("_"): continue child = getattr(node, field) try: yield (field, child.get_name(), 'branch') except AttributeError: yield (field, field, 'leaf') except AttributeError: pass return def pane_cb(path, result): branch = FlagFramework.splitpath(path) node = get_node(branch) result.heading("Packet %s" % id) data = dissected_packet.serialise() h = FlagFramework.HexDump(data, result) try: result.text("%s" % node.get_name(), font='bold') result.text('', style='black', font='normal') start, length = node.get_range() except AttributeError: result.text("%s\n" % node, style='red', wrap='full', font='typewriter', sanitise='full') result.text('', style='black', font='normal') node = get_node(branch[:-1]) start, length = node.get_range(branch[-1]) h.dump(highlight=[ [start, length, 'highlight'], ]) return result.tree(tree_cb=tree_cb, pane_cb=pane_cb, branch=['']) ## We add forward and back toolbar buttons to let people move ## to next or previous packet: dbh.execute("select min(id) as id from pcap") row = dbh.fetch() new_query = query.clone() if id > row['id']: del new_query['id'] new_query['id'] = id - 1 result.toolbar(text="Previous Packet", icon="stock_left.png", link=new_query) else: result.toolbar(text="Previous Packet", icon="stock_left_gray.png") dbh.execute("select max(id) as id from pcap") row = dbh.fetch() if id < row['id']: del new_query['id'] new_query['id'] = id + 1 result.toolbar(text="Next Packet", icon="stock_right.png", link=new_query) else: result.toolbar(text="Next Packet", icon="stock_right_gray.png")
#!/usr/bin/env python import pypcap fd = pypcap.PyPCAP(open("/var/tmp/uploads/stdcapture_0.3.pcap")) h = fd.file_header() print h.list() print h.get_field("linktype") print h.linktype def print_tree(packet, depth=0): for i in packet.list(): print " " * depth + "%s: %s" % (i, packet.get_field(i)) try: print_tree(packet.get_field(i), depth + 1) except: pass offset = fd.offset() packet = fd.next() print_tree(packet) fd.seek(offset) packet = fd.next() print_tree(packet) for p in fd:
## Stores the next timestamp: self.put(f) return p except StopIteration: return self.next() f = FileList(args, sort=not config.dont_sort) ## Force endianess if necessary: if config.output: for file in f.files: try: fd = pypcap.PyPCAP(open(file.filename), output=config.output) except IOError: continue break else: fd = pypcap.PyPCAP(open(f.firstValid)) ## ## Split by hours? ## if config.split_by_hours: print "Earliest time is ", f.times[0] print "Abs Starting date is ", datetime.datetime.utcfromtimestamp( f.times[0])
def load(self, mount_point, iosource_name,scanners = None): DBFS.load(self, mount_point, iosource_name) ## Open the file descriptor self.fd = IO.open(self.case, iosource_name) ## Use the C implementation to read the pcap files: pcap_file = pypcap.PyPCAP(self.fd) ## Build our streams: pyflaglog.log(pyflaglog.DEBUG, "Reassembling streams, this might take a while") pcap_dbh = DB.DBO(self.case) pcap_dbh.mass_insert_start("pcap") pcap_dbh.execute("select max(id) as m from pcap") max_id = pcap_dbh.fetch()['m'] or 0 cookie, processor = self.make_processor(iosource_name, scanners) ## Process the file with it: while 1: try: packet = pcap_file.dissect() max_id += 1 ## FIXME - this is a bottleneck. For now we use mass ## insert but this will break when we have multiple ## concurrent loaders. Record the packet in the pcap ## table: args = dict( iosource = iosource_name, offset = packet.offset, length = packet.caplen, _ts_sec = "from_unixtime('%s')" % packet.ts_sec, ts_usec = packet.ts_usec, ) ## Try to insert the ipid field try: args['ipid']= packet.root.eth.payload.id except: pass pcap_dbh.mass_insert(**args) #pcap_id = pcap_dbh.autoincrement() pcap_id = max_id pcap_file.set_id(pcap_id) ## Some progress reporting if pcap_id % 10000 == 0: pyflaglog.log(pyflaglog.DEBUG, "processed %s packets (%s bytes)" % (pcap_id, packet.offset)) processor.process(packet) except StopIteration: break processor.flush() pcap_dbh.check_index("connection_details",'src_ip') pcap_dbh.check_index("connection_details",'src_port') pcap_dbh.check_index("connection_details",'dest_ip') pcap_dbh.check_index("connection_details",'dest_port') pcap_dbh.check_index('connection_details','inode_id')
import pypcap, reassembler import time import pyaff4, os, pdb time.sleep(1) import gc gc.set_debug(gc.DEBUG_LEAK) oracle = pyaff4.Resolver(pyaff4.RESOLVER_MODE_DEBUG_MEMORY) image_urn = pyaff4.RDFURN() image_urn.set("/var/tmp/uploads/testimages/stdcapture_0.4.pcap") image_urn.set("/var/tmp/uploads/a5912_01_03.pcap") image = oracle.open(image_urn, 'r') pcap_file = pypcap.PyPCAP(image) ## Create a new volume on the output file outfile = pyaff4.RDFURN() outfile.set("/tmp/output.aff4") try: os.unlink(outfile.parser.query) except: pass volume = oracle.create(pyaff4.AFF4_ZIP_VOLUME) volume.set(pyaff4.AFF4_STORED, outfile) volume = volume.finish() volume_urn = volume.urn oracle.cache_return(volume)
socket.inet_ntoa(struct.pack( ">L", connection['src_ip'])), connection['src_port'], socket.inet_ntoa(struct.pack(">L", connection['dest_ip'])), connection['dest_port'], connection['l']) if options.stats: stats_fd = open(options.stats, 'ab') stats_fd.write(stat) stats_fd.close() #for i in range(len(connection['packets'])): # tmp.append("%s" % ((connection['packets'][i], connection['offset'][i], # connection['length'][i]),)) #stats_fd.write("%s\n" % ','.join(tmp)) processor = reassembler.Reassembler( packet_callback=FlagFramework.Curry(Callback, options=options)) for f in args: try: pcap_file = pypcap.PyPCAP(open(f, "rb")) except IOError: continue while 1: try: packet = pcap_file.dissect() processor.process(packet) except StopIteration: break
except: pass volume_urn = volume_fd.urn oracle.add(volume_urn, AFF4_STORED, output) volume_fd.finish() oracle.cache_return(volume_fd) for image in args: try: ## Make sure its a fully qualified url if "://" not in image: image = "file://%s" % image in_fd = oracle.open(image) pcap_file = pypcap.PyPCAP(in_fd) except IOError: print "%s does not appear to be a pcap file" % image continue ## Make an image image_fd = Image(None, "w") oracle.add(image_fd.urn, AFF4_STORED, volume_urn) image_urn = image_fd.urn image_fd.finish() ## Write a pcap header on: image_fd.write(pcap_file.file_header().serialise()) def Callback(mode, packet, connection): if mode == 'est':