def UserInput(): print('Choose the Netflow file to anaylyze:') print('1. flowd_capture_1') print ('2. flowd_capture_2') flowFile = raw_input('Enter your choice [1-2]: ') if flowFile == '1': try: log = flowd.FlowLog('flowd_capture_1') TopRemotePorts(log, 'flowd_capture_1') log = flowd.FlowLog('flowd_capture_1') TopRemoteHosts(log, 'flowd_capture_1') log = flowd.FlowLog('flowd_capture_1') TopLocalToRemote(log, 'flowd_capture_1') log = flowd.FlowLog('flowd_capture_1') TopLocalHostLocalPort(log, 'flowd_capture_1') except IOError: print 'flowd_capture_1 not found.' return UserInput() elif flowFile == '2': try: log = flowd.FlowLog('flowd_capture_2') TopRemotePorts(log, 'flowd_capture_2') log = flowd.FlowLog('flowd_capture_2') TopRemoteHosts(log, 'flowd_capture_2') log = flowd.FlowLog('flowd_capture_2') TopLocalToRemote(log, 'flowd_capture_2') log = flowd.FlowLog('flowd_capture_2') TopLocalHostLocalPort(log, 'flowd_capture_2') except IOError: print 'flowd_capture_2 not found.' return UserInput()
def process_from_file(logpath, rrdpath): flowlog = flowd.FlowLog(logpath) start_time = None flows = FlowTrack() for flow in flowlog: if not flow.has_field(flowd.FIELD_RECV_TIME): continue if start_time is None or \ (flow.recv_sec - start_time) > UPDATE_RATE: if start_time is not None: flows.store_in_rrd(rrdpath, start_time) elif not os.access(rrdpath, os.R_OK | os.W_OK): create_rrd(rrdpath, quantise(flow.recv_sec) - 300) flows = FlowTrack() start_time = quantise(flow.recv_sec) flows.update(flow)
def parse_flow(recv_stamp): """ parse flowd logs and yield records (dict type) :param recv_stamp: last receive timestamp (recv) :return: iterator flow details """ interfaces = Interfaces() parse_done = False for filename in sorted(glob.glob(FLOWD_LOG_FILES)): if parse_done: # log file contains older data (recv_stamp), break break flog = flowd.FlowLog(filename) for flow in flog: flow_record = dict() if flow.has_field(flowd.FIELD_RECV_TIME): # receive timestamp flow_record['recv'] = flow.recv_sec if flow_record['recv'] <= recv_stamp: # do not parse next flow archive (oldest reached) parse_done = True continue if flow.has_field(flowd.FIELD_FLOW_TIMES): # calculate flow start, end, duration in ms flow_record['flow_end'] = flow.recv_sec - ( flow.sys_uptime_ms - flow.flow_finish) / 1000.0 flow_record['duration_ms'] = (flow.flow_finish - flow.flow_start) flow_record['flow_start'] = flow_record[ 'flow_end'] - flow_record['duration_ms'] / 1000.0 # handle source data for flow_field in PARSE_FLOW_FIELDS: if flow.has_field(flow_field['check']): flow_record[flow_field['target']] = getattr( flow, flow_field['target']) else: flow_record[flow_field['target']] = None # map interface indexes to actual interface names flow_record['if_in'] = interfaces.if_device( flow_record['if_ndx_in']) flow_record['if_out'] = interfaces.if_device( flow_record['if_ndx_out']) yield flow_record # send None to mark last record yield None
def main(): verbose = 0 utc = 0 try: opts, args = getopt.getopt(sys.argv[1:], 'huv') except getopt.GetoptError: print >> sys.stderr, "Invalid commandline arguments" usage() for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) if o in ('-v', '--verbose'): verbose = 1 continue if o in ('-u', '--utc'): utc = 1 continue if len(args) == 0: print >> sys.stderr, "No logfiles specified" usage() if verbose: mask = flowd.DISPLAY_ALL else: mask = flowd.DISPLAY_BRIEF for ffile in args: flog = flowd.FlowLog(ffile) try: print "LOGFILE " + ffile except IOError: break; for flow in flog: print flow.format(mask = mask, utc = utc)
def main(): stats = flow_statistics() try: opts, args = getopt.getopt(sys.argv[1:], 'p:hu') except getopt.GetoptError: print >> sys.stderr, "Invalid commandline arguments" usage() pickle_file = None for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) if o in ('-p', '--pickle'): pickle_file = a # Get clear-to-line-end sequence for progress display ceol = None if sys.stderr.isatty() and not no_curses: curses.setupterm() ceol = curses.tigetstr("el") if ceol is not None: ceol = curses.tparm(ceol, 0, 0) if ceol is None: ceol = "" if len(args) == 0: print >> sys.stderr, "No logfiles specified" usage() i = 0 for ffile in args: j = 0 if ffile == "-": flog = flowd.FlowLog_fromfile(sys.stdin) else: flog = flowd.FlowLog(ffile, "rb") for flow in flog: stats.update(flow) if ceol != "" and i >= 0 and j % 1000 == 0: print >> sys.stderr, "\r%s: %d flows" % \ (ffile, j), if i != j: print >> sys.stderr, " total %d" % i, print >> sys.stderr, ceol, sys.stderr.flush() i += 1 j += 1 print >> sys.stderr, "\r%s: %d flows (total %d)%s\n" % \ (ffile, j, i, ceol) sys.stderr.flush() stats.crop() print stats.report() if pickle_file is not None: out = open(pickle_file, "wb") pickle.dump(stats, out) out.close() print >> sys.stderr, "Statistics pickled to \"%s\"" % \ pickle_file
#!/usr/bin/env python print("testing python import of flowd") import flowd flow_log = flowd.FlowLog("flows.log", "rb") for flow in flow_log: print flow.format()