def removeUseless(net, addLengths=False, ucFlow=None, removeReals=False): ''' Will remove sites from the network with only two neighbors (1 up 1 down) Will then merge the two flows together into one flow, keeping the length of 1 of the deleted flows. (This is done to resolve MultiLineString) entries in geoJSON files. net [Network]: Network to operate on Returns [None] Notes: Do NOT run this method if you want to take loops into account as this method will break the runtime if loops are present! ''' i = 0 sf = ucFlow while i in range(len(net.siteTable)): sit = net.siteTable[i] if sit.isReal and not removeReals: i += 1 continue cs = sit.connectedSites() if len(cs) == 2 and cs[0][2].reachCode == cs[1][2].reachCode: # This site is deletable coni0 = cs[0] coni1 = cs[1] if addLengths: newLen = coni0[2].length + coni1[2].length else: assert (coni0[2].length == coni1[2].length) newLen = coni0[2].length fl2Add = None if coni0[1] == DOWNSTREAM_CON: # coni0 is downstream of deletable site ('sit') # coni1 is upstream fl2Add = Flow(coni0[2].id, coni1[0], coni0[0], newLen, coni0[2].reachCode) else: # coni0 is upstream of 'sit' # coni1 is downstream fl2Add = Flow(coni1[2].id, coni0[0], coni1[0], newLen, coni1[2].reachCode) if not sf is None: if sf.downstreamSite == sit or sf.upstreamSite == sit: sf = fl2Add net.removeInvolvedFlows(sit) coni0[0].removeInvolvedFlows(sit) coni1[0].removeInvolvedFlows(sit) net.siteTable.remove(sit) coni0[0].flowsCon.append(fl2Add) coni1[0].flowsCon.append(fl2Add) net.flowTable.append(fl2Add) if i > 0: i -= 1 else: i += 1 return sf
def initRobotFromCfg(runtimeID): done = False index = 1 robots = [] thisRobot = None srcDict = CFGReader.ReadConfig("swarm.cfg", "src") dstDict = CFGReader.ReadConfig("swarm.cfg", "dst") if not srcDict: sys.exit("No [src] found in swarm.cfg") if not dstDict: sys.exit("No [dst] found in swarm.cfg") srcFID = srcDict["fid"] srcLoc = Location(float(srcDict["x"]), float(srcDict["y"]), float(srcDict["z"])) srcNode = Robot("-1", srcLoc, srcFID, None, 6666, 6666, isSrc=True) robots.append(srcNode) dstFID = dstDict["fid"] dstLoc = Location(float(dstDict["x"]), float(dstDict["y"]), float(dstDict["z"])) dstNode = Robot("-2", dstLoc, dstFID, None, 6666, 6666, isDest=True) robots.append(dstNode) while done == False: section = "R" + str(index) robotDict = CFGReader.ReadConfig("swarm.cfg", section) if not robotDict: done = True break rID = robotDict['rid'] fID = robotDict['fid'] x = float(robotDict['x']) y = float(robotDict['y']) z = float(robotDict['z']) loc = Location(x, y, z) newRobot = Robot(rID, loc, fID, None, 6666, 6666) robots.append(newRobot) if rID == runtimeID: thisRobot = newRobot index += 1 if (thisRobot == None): sys.exit("No [R%s] found in swarm.cfg" % runtimeID) thisRobot.all_robots["-1"] = srcLoc thisRobot.all_robots["-2"] = dstLoc currFlow = Flow.Flow(thisRobot.fid, robots, srcNode, dstNode) thisRobot.setFlowAndGraph(currFlow, robots) thisRobot.establishConnections() return robots
def SetFlows(self, flowNums, flowInfoList, startTime): self.flowNums = flowNums self.jobStartTime = startTime self.jobFinishTime = startTime self.flows = [None] # add each flow into flow list for i in range(flowNums): f = Flow() f.startId = flowInfoList[i][0] f.endId = flowInfoList[i][1] f.remainSize = flowInfoList[i][2] f.startTime = startTime f.finishTime = startTime self.flows.append(f) self.curFlowId = 0
def fetchFlow(self): fid = Helper.ReadConfig("model/flow.cfg", "Flow Information")['id'] sid = Helper.ReadConfig("model/flow.cfg", "Flow Information")['src'] s_loc = Location( Helper.ReadConfig("model/location_e.cfg", sid)['x'], Helper.ReadConfig("model/location_e.cfg", sid)['y'], Helper.ReadConfig("model/location_e.cfg", sid)['z']) src = Host.Host(sid, s_loc) did = Helper.ReadConfig("model/flow.cfg", "Flow Information")['dest'] d_loc = Location( Helper.ReadConfig("model/location_e.cfg", did)['x'], Helper.ReadConfig("model/location_e.cfg", did)['y'], Helper.ReadConfig("model/location_e.cfg", did)['z']) dest = Host.Host(did, d_loc) flow = Flow.Flow(fid, src, dest) self.setFlow(flow)
def SetFlows(self, flowNums, flowInfoList, startTime): """ flowInfList contain <flowNums> element and each element is formed as: (startId, endId, flowSize) """ self.flowNums = flowNums self.jobStartTime = startTime self.jobFinishTime = startTime self.flows = [None] # add each flow into flow list for i in range(flowNums): f = Flow() f.startId = flowInfoList[i][0] f.endId = flowInfoList[i][1] f.remainSize = flowInfoList[i][2] f.startTime = startTime f.finishTime = startTime self.flows.append(f) self.curFlowId = 0
def AddFlow(self, attri, sz): f = Flow(attri, sz, self.IdxCnt) self.FlowList.append(f) self.IdxCnt += 1 pass
def ProcessPcap(filename, output_folder="", flow_length=10, label=None, category=None, Timeout=30, ActivityTimeout=5, TorDetect=False, TorPickle=None, output_sizes_stat=False, pickle_flows=False, filter_packets_eq_less_than=-1): df_sizes = pandas.DataFrame() list_sizes = [] try: input_pcap = PacketCapture.PacketCapture(filename) except (OSError, FileNotFoundError, FileExistsError) as e: print(e) print("Error while accessing file %s." % (filename)) exit(1) Flows = [] time.sleep(1) if (TorDetect): tornodes = NodeGuard.NodeGuard() if(TorPickle): tornodes.loadNodesFromPickle(TorPickle) else: tornodes.loadNodes() tor_traffic = detectTorIP(input_pcap.streamslist, tornodes) for flow in tor_traffic: subflows = detectFlows(input_pcap.streams[(flow[0], flow[1], flow[2], flow[3])], Timeout=Timeout) # set up desttination (Tor EntryNode) and source flow_dest = flow[4] flow_source = None # from tcpdump point of view dest is Tor node if flow_dest == flow[2]: flow_source = flow[0] else: # then tcpdump saw Tor node as source and we don't like it.. flow_source = flow[2] for f in subflows: # process flows with at least 10 packets if len(f) > flow_length: ff = Flow.Flow(FlowTimeout=Timeout, ActivityTimeout=ActivityTimeout, \ ipsrc=flow_source, ipdst=flow_dest) ff.loadPackets(f) if (filter_packets_eq_less_than != -1): ff.filter_packets_eq_less_than(filter_packets_eq_less_than) Flows.append(ff) if output_sizes_stat: list_sizes.append(ff.packets_size_to_pandas()) # No TOR detection else: for flow in input_pcap.streams: subflows = detectFlows(input_pcap.streams[flow], Timeout=Timeout) for f in subflows: if len(f) > flow_length: ff = Flow.Flow(FlowTimeout=Timeout, ActivityTimeout=ActivityTimeout) ff.loadPackets(f) if (filter_packets_eq_less_than != -1): ff.filter_packets_eq_less_than(filter_packets_eq_less_than) Flows.append(ff) if output_sizes_stat: list_sizes.append(ff.packets_size_to_pandas()) print("Flows extracted: %d" %(len(Flows)) ) name = os.path.basename(filename) exportflowsToCsv(Flows, output_folder + name + "_flows_" + str(Timeout) + "_" + \ str(ActivityTimeout) + ".csv", label=label, category=category) if output_sizes_stat and len(list_sizes) > 0: df_sizes = reduce(partial(pandas.DataFrame.add, fill_value=0), list_sizes) df_sizes.to_csv(output_folder + name + "_flows_" + str(Timeout) + "_" + str(ActivityTimeout) + "_size_stats.csv", sep=",", encoding='utf-8', index=True) if pickle_flows: with open(output_folder + name + "_flows_" + str(Timeout) + "_" + str(ActivityTimeout) + ".pkl", 'wb' ) as f: pickle.dump(Flows, f, 0) return Flows
def __init__(self, jsonFile): self.params = Params.Params() self.params.load(jsonFile) self.db = MagicalDB.MagicalDB(self.params) # The flow database self.db.parse() #parsing the input files self.flow = Flow.Flow(self.db)
params.printHelp() exit() elif len(sys.argv) != 2: print("[E] One input parameters in json format in required") params.printHelp() exit() # load parameters params.load(sys.argv[1]) print("[I] parameters = %s" % (params)) db = MagicalDB.MagicalDB(params) # The flow database db.parse() #parsing the input files flow = Flow.Flow(db) flow.run() #Workaround for Pyinstaller. ref:https://github.com/pyinstaller/pyinstaller/issues/2820 if 0: import UserList import UserString import UserDict import itertools import collections import future.backports.misc import commands import base64 import __buildin__ import math import reprlib