Exemple #1
0
    def allocate_flow(self, node1, node2):
        if node1 is None or node2 is None:
            raise TypeError
        try:
            nodes = self.topology.shortest_path(node1, node2)
        except NoPathError:
            raise NoPathError()
        edges = [
            self.topology.get_edge_object(nodes[i], nodes[i + 1])
            for i in xrange(len(nodes) - 1)
        ]
        flow = flow_module.Flow(nodes)

        try:
            for edge in edges:
                ret = edge.allocate_flow(flow)
                if ret == edge.get_const_value('LAST_FLOW_AVAILABLE'):
                    # TODO useless we already know i
                    i = edges.index(edge)
                    self.topology.set_edge_unavailable(nodes[i], nodes[i + 1])

        # If we except the node should be unavailable and
        # we need to free the first allocations of the flow
        except RessourceAllocationError:
            self.topology.set_edge_unavailable(node[i], node[i + 1])
            for edge in edges[:i - 1]:
                edge.free_flow()
        else:
            self.flows.append(flow)
            return flow
Exemple #2
0
 def add(self, pkt):
     '''
     filters out unhandled packets, and sorts the remainder into the correct
     flow
     '''
     #shortcut vars
     src, dst = pkt.socket
     srcip, srcport = src
     dstip, dstport = dst
     # filter out weird packets, LSONG
     if (srcport == 5223 or dstport == 5223):
         log.warning('hpvirgtrp packets are ignored')
         return
     if (srcport == 5228 or dstport == 5228):
         log.warning('hpvroom packets are ignored')
         return
     if (srcport == 443 or dstport == 443):
         log.warning('https packets are ignored')
         return
     # sort it into a tcp.Flow in flowdict
     if (src, dst) in self.flowdict:
         self.flowdict[(src, dst)].add(pkt)
     elif (dst, src) in self.flowdict:
         self.flowdict[(dst, src)].add(pkt)
     else:
         newflow = tcp.Flow()
         newflow.add(pkt)
         self.flowdict[(src, dst)] = newflow
Exemple #3
0
def parse_apps_config(appConfigFile):
    apps = []
    f = open(appConfigFile, 'r')
    lines = f.readlines()
    for line in lines:
        items = line.split('\t')
        ac_flow = flow.Flow()
        ac_flow.parse('{} {} {}'.format(items[1], items[2], items[3]))
        pktSize = int(items[4])
        sendingRate = int(items[5])
        startTime = int(float(items[6]))
        duration = int(float(items[7]))
        endTime = startTime + duration
        app = {
            'src': int(ac_flow.src),
            'dst': int(ac_flow.dst),
            'sport': ac_flow.sport,
            'dport': ac_flow.dport,
            'flowType': ac_flow.flowType,
            'pktSize': pktSize,
            'sendingRate': sendingRate,
            'startTime': startTime,
            'duration': duration,
            'endTime': endTime
        }
        apps.append(app)
        #print(ac_flow.src, ac_flow.dst, startTime, endTime)
    return apps
Exemple #4
0
    def __init__(self,
                 title,
                 screen_width=800,
                 screen_height=600,
                 framerate=30,
                 size=200):
        self.screen_width = screen_width
        self.screen_height = screen_height
        self.title = title
        self.running = True
        self.english = True
        self.stance = [GAME.INTRO]

        self.intro = True
        self.option = False
        self.sound = True

        self.clock = pygame.time.Clock()
        self.framerate = framerate
        self.screen = pygame.display.set_mode([screen_width, screen_height])

        pygame.display.set_caption("Plumber")

        self.map = GameObjects.Map([], self.screen_height // size,
                                   self.screen_width // size, size)

        self.flow = flow.Flow(0, 0, 1000)

        print("game object has been created")
        self.size = size
        self.renderer = GameObjects.Renderer(size)
        pipe_list = mapgenerator.generate_map(self.map.rows, self.map.cols)

        self.map.full_map(pipe_list)
def run(interface="eth1"):
    global burst_strings
    flow_dict = {}
    capture = pyshark.LiveCapture(interface=interface,
                                  display_filter=DISPLAY_FILTER)
    prev = None
    for pkt in capture.sniff_continuously():  # for each packet received...
        pkt_dict = fl.packet_extract(
            pkt, local_ip) if local_ip else fl.packet_extract(pkt)

        flow_id = fl.packet_serialize(pkt_dict)

        if flow_id not in flow_dict.keys():
            flow_dict[flow_id] = fl.Flow()

        flow_dict[flow_id].add_pkt(pkt_dict)

        if check_burst(prev, pkt):
            for line in burst_strings:
                print(line)
            burst_strings = []

        flow_string = flow_dict[flow_id].get_analysis(pkt_dict)
        flow_string += " " + classifyFlows.get_label(
            [flow_dict[flow_id].get_feature_vector()])
        burst_strings.append(flow_string)

        prev = pkt
Exemple #6
0
 def __init__(self, config):
     import flow
     self.config = config.copy()
     self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
         self.config['ip_address']))
     self.chamber = ChamberAutoConnect(flow.Flow(), self.config)
     self.stirrer_ids = [0, 1, 2]
     self.current_mode = None
Exemple #7
0
	def test_init(self):
		ID = "F1"
		source = "H1"
		dest = "H2"
		size = "20"
		start =  "1"

		f = flow.Flow(ID,source,dest,size,start)
		self.assertEqual(f.get_id(), ID)
		self.assertEqual(f.get_source(), source)
		self.assertEqual(f.get_dest(), dest)
		self.assertEqual(f.get_size(), int(size) * 8.0 * 1000.0) # MByte -> KBit
		self.assertEqual(f.get_start(), int(start) * 1000) # s to ms
def extract_pkt(path, src_ip=None):
    os.dup2(null_fds[0], 1)
    os.dup2(null_fds[1], 2)
    flow_dict = dict()
    bursts = []
    cap = pyshark.FileCapture(path, display_filter=DISPLAY_FILTER)
    meta = []
    ips = []
    burst_index = 0
    if not src_ip:
        for pkt in cap:
            ips.append(str(pkt.ip.src))
            ips.append(str(pkt.ip.dst))

        src_ip = max([(ips.count(chr), chr) for chr in set(ips)])[1] # get most occuring ip address in capture file
    prev = cap[0]
    for pkt in cap:
        pkt_dict = fl.packet_extract(pkt, src_ip) if src_ip else fl.packet_extract(pkt)

        flow_id = fl.packet_serialize(pkt_dict)

        if flow_id not in flow_dict.keys():
            flow_dict[flow_id] = fl.Flow()

        flow_dict[flow_id].add_pkt(pkt_dict)

        if not bursts:
            bursts.append([flow_dict[flow_id].get_analysis(pkt_dict)])

        if check_burst(prev, pkt):
            burst_index += 1
            bursts.append([flow_dict[flow_id].get_analysis(pkt_dict)])
        else:
            bursts[burst_index].append(flow_dict[flow_id].get_analysis(pkt_dict))
        prev = pkt

        meta.append(flow_dict[flow_id].get_feature_vector())
    os.dup2(save[0], 1)
    os.dup2(save[1], 2)

    # for flow in flow_dict.values():
    #     feature_vector = flow.get_feature_vector()
    #     meta.append(feature_vector)

    return np.array(meta), bursts


# test = extract_pkt('../GatewayVM/recorded_traffic/browser.pcap')
# print(np.shape(test))
# print(test[:,1])
    def new_flow(self, socket, packet):
        '''
        Adds a new flow to flowdict for socket, and adds the packet.

        Socket must either be present in flowdict or missing entirely, eg., if
        you pass in (src, dst), (dst, src) should not be present.

        Args:
        * socket: ((ip, port), (ip, port))
        * packet: tcp.Packet
        '''
        newflow = tcp.Flow()
        newflow.add(packet)
        if socket in self.flowdict:
            self.flowdict[socket].append(newflow)
        else:
            self.flowdict[socket] = [newflow]
Exemple #10
0
def main():
    #Now we have the appropriate video file retrieval information under cap
    t0 = time.time()
    cap = cv2.VideoCapture(args["input"])
    initialized = False

    # initialize the FourCC, video writer, dimensions of the frame, and
    # zeros array
    fourcc = cv2.VideoWriter_fourcc(*args["codec"])
    writer = None

    while (cap.isOpened()):
        try:
            ret, frame = cap.read()
            print('Ret: {} and frame: {}'.format(ret, frame))
            if (initialized == False):
                #Builds the appropriate optical flow
                of = flow.Flow(frame)  #dense_hsv')
                initialized = True
                ret, frame = cap.read(
                )  #Sets the frames again to appropriately build the first set
                iterations = 0
                flowSum = []

            if ret == True:
                flowSum.append(of.calc(frame))
            else:
                break
        except KeyboardInterrupt:
            cv2.destroyAllWindows()
            print("Process was Terminated.")

    #if args["polar"]: writer.release()
    cv2.destroyAllWindows()

    t = str(datetime.now())
    X = ucert.uncert(np.array(flowSum))

    arrayStorage = np.array([t, X], dtype=object)
    np.save(args["raw_file"], arrayStorage)
Exemple #11
0
def parse_flows(f, h_map):
    flows = []
    f_map = {}

    num_flows = int(next_line(f))

    for i in xrange(num_flows):
        flow_id  = next_line(f)

        # ID for the source of the flow
        flow_src = next_line(f)

        # Host object of the source
        src_host = h_map[flow_src]

        # ID for the destination of the flow
        flow_dest = next_line(f)

        # Host object of the destination
        dest_host = h_map[flow_dest]

        # Amount of data to be sent by the flow
        data_amount = next_line(f, 'i')

        # Flow start time
        flow_start_time = next_line(f, 'f')

        # Construct the flow, insert it into the map and append
        # it to the list of flows
        flow = flow_class.Flow(flow_id, src_host, dest_host, 
                            data_amount, flow_start_time)
        f_map[flow_id] = flow
        flows.append(flow)

    # Set the Flow class map
    flow_class.Flow.f_map = f_map

    return (flows, f_map)
Exemple #12
0
import time

import requests
from raven import Client
import boto3

import flow as flow_module

from config import BOTNAME, BOTPW, ORG_ID, SENTRY_ACCESS_TOKEN, CHANNEL_MAP

try:
    flow = flow_module.Flow(BOTNAME)
except flow_module.Flow.FlowError:
    flow = flow_module.Flow()
    flow.create_device(BOTNAME, BOTPW)
    print('Device for bot {} created'.format(BOTNAME))


def print_channels():
    print('\033[1mYour bot "{}" has access to these channels:\033[0m\n'.format(
        BOTNAME))

    for channel in flow.enumerate_channels(ORG_ID):
        print('\033[91m\033[1m"{name}":\033[0m \033[94m{id}\033[0m'.format(
            **channel))


def test_sentry(app_name):
    url = 'https://sentry.io/api/0/projects/datamade/{}/keys/'.format(app_name)
    header = {'Authorization': 'Bearer {}'.format(SENTRY_ACCESS_TOKEN)}
Exemple #13
0

if __name__ == "__main__":
    # Read flow configuration file: 'Xapps_Ypkts.txt'.
    # X: number of app flows
    # Y: packet sending rate (pkts/s)
    appConfigFile = sys.argv[1]
    acf = open(appConfigFile, 'r')
    output_str_qos = '{}\t{}'.format(avg_thp.get_app_config(appConfigFile),
                                     'qos_violation')

    # Check the flowLog of each destination node in appConfigFile.
    lines = acf.readlines()
    for line in lines:
        items = line.split('\t')
        ac_flow = flow.Flow()
        ac_flow.parse('{} {} {}'.format(items[1], items[2], items[3]))
        dstId = avg_thp.get_nodeid_from_idport(items[2])
        startTime = int(float(items[6])) + 2
        duration = int(float(items[7]))
        bw_requirement = float(int(items[4]) * int(items[5]) * 8)

        # One flowlog file may contain multiple columns.
        lineNo = 0
        flowInfoFile = 'flowinfo_{}.txt'.format(dstId)
        fif = open(flowInfoFile, 'r')
        fifLines = fif.readlines()
        for fifLine in fifLines:
            lineNo += 1
            fitems = re.split(r'[\s :]', fifLine.strip())
            if not (len(fitems) == 6):
Exemple #14
0
def test_flow():
    """
    Note: no testing of max_interpacket_interval and
    min_interpacket_interval as they become imprecise due
    to floating point and when tried using decimal module
    found that would not serialise into Pymongo db.

    To create test packet data, capture packet in Wireshark and:

      For the packet summary:
        Right-click packet in top pane, Copy -> Summary (text).
        Edit pasted text as appropriate

      For the packet hex:
        Right-click packet in top pane, Copy -> Bytes -> Hex Stream

      For the packet timestamp:
        Expand 'Frame' in the middle pane,
        right-click 'Epoch Time' Copy -> Value
    """

    #*** Flow 1 TCP handshake packet 1
    # 10.1.0.1 10.1.0.2 TCP 74 43297 > http [SYN] Seq=0 Win=29200 Len=0 MSS=1460 SACK_PERM=1 TSval=5982511 TSecr=0 WS=64
    flow1_pkt1 = binascii.unhexlify(
        "080027c8db910800272ad6dd08004510003c19fd400040060cab0a0100010a010002a9210050c37250d200000000a002721014330000020405b40402080a005b492f0000000001030306"
    )
    flow1_pkt1_timestamp = 1458782847.829442000

    #*** Flow 1 TCP handshake packet 2
    # 10.1.0.2 10.1.0.1 TCP 74 http > 43297 [SYN, ACK] Seq=0 Ack=1 Win=28960 Len=0 MSS=1460 SACK_PERM=1 TSval=5977583 TSecr=5982511 WS=64
    flow1_pkt2 = binascii.unhexlify(
        "0800272ad6dd080027c8db9108004500003c00004000400626b80a0100020a0100010050a9219e5c9d99c37250d3a0127120494a0000020405b40402080a005b35ef005b492f01030306"
    )
    flow1_pkt2_timestamp = 1458782847.830399000

    #*** Flow 1 TCP handshake packet 3
    # 10.1.0.1 10.1.0.2 TCP 66 43297 > http [ACK] Seq=1 Ack=1 Win=29248 Len=0 TSval=5982512 TSecr=5977583
    flow1_pkt3 = binascii.unhexlify(
        "080027c8db910800272ad6dd08004510003419fe400040060cb20a0100010a010002a9210050c37250d39e5c9d9a801001c9142b00000101080a005b4930005b35ef"
    )
    flow1_pkt3_timestamp = 1458782847.830426000

    #*** Flow 1 client to server payload 1
    #  10.1.0.1 10.1.0.2 TCP 71 [TCP segment of a reassembled PDU] [PSH + ACK]
    flow1_pkt4 = binascii.unhexlify(
        "080027c8db910800272ad6dd08004510003919ff400040060cac0a0100010a010002a9210050c37250d39e5c9d9a801801c9143000000101080a005b4d59005b35ef4745540d0a"
    )
    flow1_pkt4_timestamp = 1458782852.090698000

    #*** Flow 1 TCP ACK server to client
    # 10.1.0.2 10.1.0.1 TCP 66 http > 43297 [ACK] Seq=1 Ack=6 Win=28992 Len=0 TSval=5978648 TSecr=5983577
    flow1_pkt5 = binascii.unhexlify(
        "0800272ad6dd080027c8db91080045000034a875400040067e4a0a0100020a0100010050a9219e5c9d9ac37250d8801001c5df1800000101080a005b3a18005b4d59"
    )
    flow1_pkt5_timestamp = 1458782852.091542000

    #*** Flow 1 server to client response
    # 10.1.0.2 10.1.0.1 HTTP 162 HTTP/1.1 400 Bad Request  (text/plain)  [PSH + ACK]
    flow1_pkt6 = binascii.unhexlify(
        "0800272ad6dd080027c8db91080045000094a876400040067de90a0100020a0100010050a9219e5c9d9ac37250d8801801c5792f00000101080a005b3a18005b4d59485454502f312e31203430302042616420526571756573740d0a436f6e74656e742d4c656e6774683a2032320d0a436f6e74656e742d547970653a20746578742f706c61696e0d0a0d0a4d616c666f726d656420526571756573742d4c696e65"
    )
    flow1_pkt6_timestamp = 1458782852.091692000

    #*** Flow 1 client to server ACK
    # 10.1.0.1 10.1.0.2 TCP 66 43297 > http [ACK] Seq=6 Ack=97 Win=29248 Len=0 TSval=5983577 TSecr=5978648
    flow1_pkt7 = binascii.unhexlify(
        "080027c8db910800272ad6dd0800451000341a00400040060cb00a0100010a010002a9210050c37250d89e5c9dfa801001c9142b00000101080a005b4d59005b3a18"
    )
    flow1_pkt7_timestamp = 1458782852.091702000

    #*** Flow 2 TCP SYN used to test flow separation:
    # 10.1.0.1 10.1.0.2 TCP 74 43300 > http [SYN] Seq=0 Win=29200 Len=0 MSS=1460 SACK_PERM=1 TSval=7498808 TSecr=0 WS=64
    flow2_pkt1 = binascii.unhexlify(
        "080027c8db910800272ad6dd08004510003c23df4000400602c90a0100010a010002a9240050ab094fe700000000a002721014330000020405b40402080a00726c380000000001030306"
    )
    flow2_pkt1_timestamp = 1458788913.014564000

    #*** Flow 3 TCP FIN + ACK used to test flags:
    # 10.1.0.2 10.1.0.1 TCP 66 http > 43302 [FIN, ACK] Seq=733 Ack=20 Win=28992 Len=0 TSval=9412661 TSecr=9417590
    flow3_pkt1 = binascii.unhexlify(
        "0800272ad6dd080027c8db910800450000349e9a4000400688250a0100020a0100010050a92674c00c0659c96b07801101c51d1b00000101080a008fa035008fb376"
    )
    flow3_pkt1_timestamp = 1458796588.143693000

    #*** Flow 4 TCP RST + ACK used to test flags:
    # 10.1.0.2 10.1.0.1 TCP 60 81 > 38331 [RST, ACK] Seq=1 Ack=1 Win=0 Len=0
    flow4_pkt1 = binascii.unhexlify(
        "0800272ad6dd080027c8db91080045100028f819400040062ea20a0100020a010001005195bb0000000051a9e82350140000cbf20000000000000000"
    )
    flow4_pkt1_timestamp = 1458797058.605055000

    #*** Flow 5 TCP SYN + ACK used to test client-server directionality logic
    #*** (first packet in flow seen is reverse direction):
    # 10.1.0.2 10.1.0.1 TCP 74 http > 37335 [SYN, ACK] Seq=0 Ack=1 Win=28960 Len=0
    flow5_pkt1 = binascii.unhexlify(
        "0800272ad6dd080027c8db9108004500003c00004000400626b80a0100020a010001005091d717d5f5e6d965a329a01271205b930000020405b40402080a00231c6300232e1d01030306"
    )
    flow5_pkt1_timestamp = 1475053750.061161000

    #*** Packet lengths for flow 1 on the wire (null value for index 0):
    pkt_len = [0, 74, 74, 66, 71, 66, 162, 66]

    #*** Sanity check can read into dpkt:
    eth = dpkt.ethernet.Ethernet(flow1_pkt1)
    eth_src = mac_addr(eth.src)
    assert eth_src == '08:00:27:2a:d6:dd'

    #*** Instantiate a flow object:
    flow = flow_class.Flow(logger, _mongo_addr, _mongo_port)

    #*** Test Flow 1 Packet 1:
    flow.ingest_packet(flow1_pkt1, flow1_pkt1_timestamp)
    assert flow.packet_count == 1
    assert flow.packet_length == pkt_len[1]
    assert flow.ip_src == '10.1.0.1'
    assert flow.ip_dst == '10.1.0.2'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 43297
    assert flow.tcp_dst == 80
    assert flow.tcp_seq == 3279048914
    assert flow.tcp_acq == 0
    assert flow.tcp_syn() == 1
    assert flow.tcp_fin() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 0
    assert flow.payload == ""
    assert flow.packet_direction == 'c2s'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:2])

    #*** Test Flow 1 Packet 2:
    flow.ingest_packet(flow1_pkt2, flow1_pkt2_timestamp)
    assert flow.packet_count == 2
    assert flow.packet_length == pkt_len[2]
    assert flow.ip_src == '10.1.0.2'
    assert flow.ip_dst == '10.1.0.1'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 80
    assert flow.tcp_dst == 43297
    assert flow.tcp_seq == 2656869785
    assert flow.tcp_acq == 3279048915
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 1
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.payload == ""
    assert flow.packet_direction == 's2c'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:3])

    #*** Test Flow 1 Packet 3:
    flow.ingest_packet(flow1_pkt3, flow1_pkt3_timestamp)
    assert flow.packet_count == 3
    assert flow.packet_length == pkt_len[3]
    assert flow.ip_src == '10.1.0.1'
    assert flow.ip_dst == '10.1.0.2'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 43297
    assert flow.tcp_dst == 80
    assert flow.tcp_seq == 3279048915
    assert flow.tcp_acq == 2656869786
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.payload == ""
    assert flow.packet_direction == 'c2s'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:4])

    #*** Random packet to ensure it doesn't count against flow 1:
    flow.ingest_packet(flow2_pkt1, flow2_pkt1_timestamp)

    #*** Test Flow 1 Packet 4:
    flow.ingest_packet(flow1_pkt4, flow1_pkt4_timestamp)
    assert flow.packet_count == 4
    assert flow.packet_length == pkt_len[4]
    assert flow.ip_src == '10.1.0.1'
    assert flow.ip_dst == '10.1.0.2'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 43297
    assert flow.tcp_dst == 80
    assert flow.tcp_seq == 3279048915
    assert flow.tcp_acq == 2656869786
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 1
    assert flow.tcp_ack() == 1
    assert flow.payload == "GET\r\n"
    assert flow.packet_direction == 'c2s'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:5])

    #*** Test Flow 1 Packet 5:
    flow.ingest_packet(flow1_pkt5, flow1_pkt5_timestamp)
    assert flow.packet_count == 5
    assert flow.packet_length == pkt_len[5]
    assert flow.ip_src == '10.1.0.2'
    assert flow.ip_dst == '10.1.0.1'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 80
    assert flow.tcp_dst == 43297
    assert flow.tcp_seq == 2656869786
    assert flow.tcp_acq == 3279048920
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.payload == ""
    assert flow.packet_direction == 's2c'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:6])

    #*** Test Flow 1 Packet 6:
    flow.ingest_packet(flow1_pkt6, flow1_pkt6_timestamp)
    assert flow.packet_count == 6
    assert flow.packet_length == pkt_len[6]
    assert flow.ip_src == '10.1.0.2'
    assert flow.ip_dst == '10.1.0.1'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 80
    assert flow.tcp_dst == 43297
    assert flow.tcp_seq == 2656869786
    assert flow.tcp_acq == 3279048920
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 1
    assert flow.tcp_ack() == 1
    #*** Convert payload back to hex for comparison:
    assert flow.payload.encode(
        "hex"
    ) == "485454502f312e31203430302042616420526571756573740d0a436f6e74656e742d4c656e6774683a2032320d0a436f6e74656e742d547970653a20746578742f706c61696e0d0a0d0a4d616c666f726d656420526571756573742d4c696e65"
    assert flow.packet_direction == 's2c'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len[0:7])

    #*** Test Flow 1 Packet 7:
    flow.ingest_packet(flow1_pkt7, flow1_pkt7_timestamp)
    assert flow.packet_count == 7
    assert flow.packet_length == pkt_len[7]
    assert flow.ip_src == '10.1.0.1'
    assert flow.ip_dst == '10.1.0.2'
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.tcp_src == 43297
    assert flow.tcp_dst == 80
    assert flow.tcp_seq == 3279048920
    assert flow.tcp_acq == 2656869882
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.payload == ""
    assert flow.packet_direction == 'c2s'
    assert flow.verified_direction == 'verified-SYN'
    assert flow.max_packet_size() == max(pkt_len)

    #*** Test Flow 3 packet for TCP FIN flag:
    flow.ingest_packet(flow3_pkt1, flow3_pkt1_timestamp)
    assert flow.tcp_fin() == 1
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.verified_direction == 0

    #*** Test Flow 4 packet for TCP RST flag:
    flow.ingest_packet(flow4_pkt1, flow4_pkt1_timestamp)
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 0
    assert flow.tcp_rst() == 1
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.client == '10.1.0.2'
    assert flow.server == '10.1.0.1'
    assert flow.packet_direction == 'c2s'
    assert flow.verified_direction == 0

    #*** Test Flow 5 packet for reversed client-server direction
    #*** This is the first packet seen in flow and is a SYN+ACK from server
    flow.ingest_packet(flow5_pkt1, flow5_pkt1_timestamp)
    assert flow.tcp_fin() == 0
    assert flow.tcp_syn() == 1
    assert flow.tcp_rst() == 0
    assert flow.tcp_psh() == 0
    assert flow.tcp_ack() == 1
    assert flow.client == '10.1.0.1'
    assert flow.server == '10.1.0.2'
    assert flow.packet_direction == 's2c'
    assert flow.verified_direction == 'verified-SYNACK'
Exemple #15
0
        i += 1
        if i > 10000:
            return xi, yi, e, ki

        ki_prev = ki[:]


class PRSolution:
    pass


if __name__ == '__main__':
    import flow

    f = flow.Flow(mass_flows=const.mass_flows,
                  temperature=39.99,
                  pressure=12000)

    xi, yi, e, *_ = calculate_equilibrium_by_pr(f.mole_fractions,
                                                f.temperature, f.pressure,
                                                [tc for tc in const.TC],
                                                [pc for pc in const.PC],
                                                const.OMEGA, const.PR_Kij)

    print(e)
    print(xi)

    # import matplotlib.pyplot as plt
    #
    # plt.style.use('seaborn-whitegrid')
    #
Exemple #16
0
import json

import boto.swf.layer2 as swf

import flow

initial_context = dict(foo='bar')
hello_world_flow = flow.Flow('dev', '1.0')
workflow_execution = swf.WorkflowType(
    name=hello_world_flow.name,
    domain=hello_world_flow.domain,
    version=hello_world_flow.version,
    task_list=hello_world_flow.name).start(input=json.dumps(initial_context))

print("Launching SWF for flow '{}' with WorkflowId '{}' and RunId '{}'".format(
    workflow_execution.name, workflow_execution.workflowId,
    workflow_execution.runId))
if __name__ == "__main__":
    # Read flow configuration file: 'Xapps_Ypkts.txt'.
    # X: number of app flows
    # Y: packet sending rate (pkts/s)
    appConfigFile = sys.argv[1]
    acf = open(appConfigFile, 'r')
    output_str = '{}\t{}'.format(avg_thp.get_app_config(appConfigFile),
                                 'rt_dscv_count')

    # Generate a set of source node IDs from appConfigFile.
    src_set = set([])
    lines = acf.readlines()
    for line in lines:
        items = line.split('\t')
        ac_flow = flow.Flow()
        ac_flow.parse('{} {} {}'.format(items[1], items[2], items[3]))
        ac_src_id = avg_thp.get_nodeid_from_idport(items[1])
        src_set.add(ac_src_id)

    log_file = open('nodestat_src_rt.txt', 'w')
    total_rt_dscv_count = 0
    total_rt_dscv_fail_count = 0
    for src_id in src_set:
        src_rtlog_file = 'srcrtlog_{}.txt'.format(src_id)
        srf = open(src_rtlog_file, 'r')
        srf_lines = srf.readlines()

        rt_dscv_count = 0
        rt_dscv_fail_count = 0
        for srf_line in srf_lines:
Exemple #18
0
    def processAlgorithm(self, parameters, context, feedback):
        try:
            dir_path = os.path.dirname(os.path.realpath(__file__))
            sys.path.insert(0, dir_path)
            import porepy as pp
            import numpy as np
            import scipy.sparse as sps
            import flow as f
            from fcts import read_cart_grid, bc_flag, argsort_cart_grid
            from tracer import Tracer
        except Exception as e:
            feedback.reportError(
                QCoreApplication.translate('Error', '%s' % (e)))
            feedback.reportError(QCoreApplication.translate('Error', ''))
            feedback.reportError(
                QCoreApplication.translate(
                    'Error', 'Please install porepy dependencies'))
            return {}

        #Parameters
        layer = self.parameterAsLayer(parameters, self.Grid, context)
        xx = self.parameterAsString(parameters, self.xx, context)
        xy = self.parameterAsString(parameters, self.xy, context)
        yy = self.parameterAsString(parameters, self.yy, context)
        steps = self.parameterAsInt(parameters, self.steps, context)
        end = self.parameterAsDouble(parameters, self.end, context) * pp.SECOND
        dV = self.parameterAsInt(parameters, self.Direction, context)
        tol = 1e-4 * pp.METER
        lP = parameters[self.lowPressure] * pp.PASCAL
        hP = parameters[self.highPressure] * pp.PASCAL
        mu = 1e-3 * pp.PASCAL * pp.SECOND  #Define here the dynamic viscosity of the liquid phase in [Pa s]

        if dV == 0:
            direction = "left_to_right"
        elif dV == 1:
            direction = "right_to_left"
        elif dV == 2:
            direction = "bottom_to_top"
        else:
            direction = "top_to_bottom"

        try:
            field_check = layer.fields().indexFromName('Rotation')
            if field_check == -1:
                feedback.reportError(
                    QCoreApplication.translate(
                        'Error',
                        'Invalid Contour Grid layer - please run the contour grid tool prior to the 2D Flow tool'
                    ))
                return {}

        except Exception:
            feedback.reportError(
                QCoreApplication.translate(
                    'Error',
                    'No attribute table found. Do not use the "Selected features only" option'
                ))
            return {}

        if lP > hP:
            feedback.reportError(
                QCoreApplication.translate(
                    'Error',
                    'Low pressure value is higher than high pressure value.'))
            return {}
        newFields = [
            'Pressure', 'Flux', 'Azimuth', 'Tracer', 'StartTime', 'EndTime',
            'Step'
        ]

        fields = QgsFields()
        for field in layer.fields():
            if field.name() not in newFields:
                fields.append(QgsField(field.name(), field.type()))

        for field in newFields[:-3]:
            fields.append(QgsField(field, QVariant.Double))
        fields.append(QgsField('StartTime', QVariant.DateTime))
        fields.append(QgsField('EndTime', QVariant.DateTime))
        fields.append(QgsField('Step', QVariant.Double))

        (writer, dest_id) = self.parameterAsSink(parameters, self.outGrid,
                                                 context, fields,
                                                 QgsWkbTypes.Polygon,
                                                 layer.sourceCrs())

        #Define xx,yy and xy perm
        kxx = []
        kyy = []
        kxy = []

        #Get dictionary of features
        features = {
            feature['Sample_No_']: feature
            for feature in layer.selectedFeatures()
        }
        if len(features) == 0:
            features = {
                feature['Sample_No_']: feature
                for feature in layer.getFeatures()
            }
            extent = layer.extent()
        else:
            extent = layer.boundingBoxOfSelected()
        total = len(features)

        if total == 0:
            feedback.reportError(
                QCoreApplication.translate(
                    'Error', 'No grid cells found in the input dataset'))
            return {}

        c = 0

        # Sort data by Sample No
        features = collections.OrderedDict(sorted(features.items()))
        W = False
        for FID, feature in features.items():
            c += 1
            if total != -1:
                feedback.setProgress(int(c * total))

            xxV, yyV, xyV = feature[xx], feature[yy], feature[xy]
            if xxV == 0 and yyV == 0 and xyV == 0:
                feedback.reportError(
                    QCoreApplication.translate(
                        'Info',
                        'Warning: Grid sample no. %s contains a pereambility of 0 for XX, XY and YY'
                        % (FID)))
                W = True

            kxx.append(xxV * pp.MILLIDARCY)
            kyy.append(yyV * pp.MILLIDARCY)
            kxy.append(xyV * pp.MILLIDARCY)

            if type(xxV) != float or type(xyV) != float or type(yyV) != float:
                feedback.reportError(
                    QCoreApplication.translate(
                        'Info',
                        'Warning: Grid sample no. %s contains non-float values for pereambility measurements'
                        % (FID)))
                W = True
        if W:
            feedback.reportError(
                QCoreApplication.translate(
                    'Info',
                    'Invalid permeability measurements created an empty 2D flow grid!'
                ))
            return {}

        kxx, kyy, kxy = np.array(kxx), np.array(kyy), np.array(kxy)

        rotation = feature['Rotation']
        spacing = feature['Spacing']

        P = 10  #Precision
        #Read grid geometry

        extentGeom = QgsGeometry.fromRect(extent)
        extentGeom = extentGeom.orientedMinimumBoundingBox()

        dWidth = round(extentGeom[4], P)
        dHeight = round(extentGeom[3], P)  #Domain width and height

        Ny = round(dHeight / spacing)
        Nx = round(dWidth / spacing)

        count = Nx * Ny
        if count != c:
            feedback.reportError(
                QCoreApplication.translate(
                    'Warning',
                    'Warning: Selected contour grid does not appear to be a rectangle.'
                ))
            feedback.reportError(QCoreApplication.translate('Warning', ''))

        # Read the grid
        gb = read_cart_grid(Nx, Ny, dWidth, dHeight)

        feedback.pushInfo(
            QCoreApplication.translate(
                'Output',
                'Constructing grid with %s columns and %s rows a domain size (width x height) of %s x %s.'
                % (Nx, Ny, dWidth, dHeight)))

        # mask that map the permeability from qgis to pp, and vice-versa
        mask, inv_mask = argsort_cart_grid(Nx, Ny)

        param_flow = {
            "tol": tol,
            "kxx": kxx[mask] / mu,
            "kyy": kyy[mask] / mu,
            "kxy": kxy[mask] / mu,
            "flow_direction": direction,
            "low_value": lP,
            "high_value": hP,
            "north": np.array([0, 1, 0]),
        }
        try:
            flow = f.Flow(gb)
            flow.set_data(param_flow, bc_flag)
            flow.solve()
        except Exception as e:
            feedback.reportError(QCoreApplication.translate('Error', str(e)))
            return {}

        if steps > 1:
            param_tracer = {
                "tol": tol,
                "num_steps": steps,
                "end_time": end,
                "flow_direction": direction,
                "low_value": lP,
                "high_value": hP
            }

            tracer = Tracer(gb)
            tracer.set_data(param_tracer, bc_flag)
            tracer.solve()

        t = []
        for g, d in gb:
            p = d[pp.STATE][flow.pressure]
            v = d[pp.STATE][flow.norm_flux]
            a = d[pp.STATE][flow.azimuth]
            if steps > 1:
                for time_step, current_time in enumerate(tracer.all_time):
                    var_name = tracer.variable + "_" + str(time_step)
                    traceD = d[pp.STATE][var_name]
                    traceD = traceD[inv_mask]
                    t.append(traceD)

        #Reshape the output data
        p = p[inv_mask]
        v = v[inv_mask]
        a = a[inv_mask]

        feedback.pushInfo(
            QCoreApplication.translate('Output', 'Updating Feature Layer'))

        #Update the dataset
        fet = QgsFeature()
        for enum, FID in enumerate(features):
            feature = features[FID]
            FID = feature.id()
            geom = feature.geometry()
            if total != -1:
                feedback.setProgress(int(enum * total))

            rows = []
            for field in layer.fields():
                if field.name() not in newFields:
                    rows.append(feature[field.name()])

            aV = math.degrees(float(a[enum])) + rotation
            if type(aV) == float:
                aV %= 360
            if dV < 2:
                if aV < 180:
                    aV += 180
                else:
                    aV -= 180

            rows.extend([
                round(float(p[enum]), P),
                round(float(v[enum]), P),
                round(float(aV), 2)
            ])

            if steps > 1:

                time = datetime.datetime(1, 1, 1, 0, 0, 0)
                deltaTime = datetime.timedelta(seconds=end / steps)

                for n in range(len(t)):
                    newRows = rows.copy()
                    newRows.append(round(float(t[n][enum]), P))
                    newRows.append(str(time))
                    time += deltaTime
                    newRows.append(str(time))
                    newRows.append(int(n))

                    fet.setGeometry(geom)
                    fet.setAttributes(newRows)
                    writer.addFeature(fet, QgsFeatureSink.FastInsert)
            else:
                fet.setGeometry(geom)
                fet.setAttributes(rows)
                writer.addFeature(fet, QgsFeatureSink.FastInsert)

        return {self.outGrid: dest_id}
Exemple #19
0
    def __init__(self, _config):
        #*** Get logging config values from config class:
        _logging_level_s = _config.get_value \
                                    ('tc_logging_level_s')
        _logging_level_c = _config.get_value \
                                    ('tc_logging_level_c')
        _syslog_enabled = _config.get_value('syslog_enabled')
        _loghost = _config.get_value('loghost')
        _logport = _config.get_value('logport')
        _logfacility = _config.get_value('logfacility')
        _syslog_format = _config.get_value('syslog_format')
        _console_log_enabled = _config.get_value('console_log_enabled')
        _coloredlogs_enabled = _config.get_value('coloredlogs_enabled')
        _console_format = _config.get_value('console_format')
        #*** Set up Logging:
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.DEBUG)
        self.logger.propagate = False

        #*** Syslog:
        if _syslog_enabled:
            #*** Log to syslog on host specified in config.yaml:
            self.syslog_handler = logging.handlers.SysLogHandler(
                address=(_loghost, _logport), facility=_logfacility)
            syslog_formatter = logging.Formatter(_syslog_format)
            self.syslog_handler.setFormatter(syslog_formatter)
            self.syslog_handler.setLevel(_logging_level_s)
            #*** Add syslog log handler to logger:
            self.logger.addHandler(self.syslog_handler)
        #*** Console logging:
        if _console_log_enabled:
            #*** Log to the console:
            if _coloredlogs_enabled:
                #*** Colourise the logs to make them easier to understand:
                coloredlogs.install(level=_logging_level_c,
                                    logger=self.logger,
                                    fmt=_console_format,
                                    datefmt='%H:%M:%S')
            else:
                #*** Add console log handler to logger:
                self.console_handler = logging.StreamHandler()
                console_formatter = logging.Formatter(_console_format)
                self.console_handler.setFormatter(console_formatter)
                self.console_handler.setLevel(_logging_level_c)
                self.logger.addHandler(self.console_handler)

        #*** Initialise Identity Harvest flags (they get set at DPAE join time)
        self.id_arp = 0
        self.id_lldp = 0
        self.id_dns = 0
        self.id_dhcp = 0
        #*** Initialise list for TC classifiers to run:
        self.classifiers = []

        #*** Retrieve config values for elephant flow suppression:
        self.suppress_flow_pkt_count_initial = \
                           _config.get_value("suppress_flow_pkt_count_initial")
        self.suppress_flow_pkt_count_backoff = \
                           _config.get_value("suppress_flow_pkt_count_backoff")

        #*** Retrieve config values for flow class db connection to use:
        _mongo_addr = _config.get_value("mongo_addr")
        _mongo_port = _config.get_value("mongo_port")
        #*** Instantiate a flow object for classifiers to work with:
        self.flow = flow.Flow(self.logger, _mongo_addr, _mongo_port)
Exemple #20
0
from werkzeug.urls import url_parse
from flask_login import current_user
from flask_login import login_user
from flask_login import logout_user
from flask_login import login_required

from webapp import webapp
from webapp import socketio
from webapp import db
from webapp.models import User
from webapp.forms import LoginForm
# from webapp.forms import RegistrationForm

sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
import flow as Flow
Flow = Flow.Flow()


@webapp.route("/")
@webapp.route("/index")
@login_required
def index():
    door = Flow.read_door()
    lamp = Flow.LAMP_state
    uvb = Flow.UVB_state
    heater = Flow.HEATER_state
    rain = Flow.RAIN_state
    new_temp_sp_val = Flow.new_temperature_setpoint
    battery_percentage = Flow.read_battery_percentage()
    battery_voltage = Flow.read_battery_voltage()
    temperature = Flow.temperature1