def __init__(self, switch_socket, controller_host, controller_port): super(MessageWatcherAgentThread, self).__init__() self.controller_socket = socket.socket() self.controller_socket.connect((controller_host, controller_port)) self.controller_socket.setblocking(0) self.controller_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) self.switch_socket = switch_socket self.switch_socket.setblocking(0) self.switch_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) self.is_alive = True self.datapath = ofproto_protocol.ProtocolDesc(version=0x01) self.id = None self.downstream_buf = bytearray() self.upstream_buf = bytearray() self.timeloop = time.time() # Connect to database # client = MongoClient('localhost', 27017) # self.db = client['netspec'] # Connect to database # client = MongoClient('sd-lemon.naist.jp', 9999) client = MongoClient('127.0.0.1', 27017) # client = MongoClient('mongodb://*****:*****@opimon.documents.azure.com:10250/?ssl=true&ssl_cert_reqs=CERT_NONE') self.db = client.opimon
def test_record_removal(): """ Test the recording of an idle-timeout flow removal message sent by a switch into the flow_rems database collection Synthesise flow removal messages to test with. """ #*** Supports OpenFlow version 1.3: OFP_VERSION = ofproto_v1_3.OFP_VERSION #*** Instantiate Flow class: flow = flows_module.Flow(config) #*** Load JSON representations of flow removed messages: with open('OFPMsgs/OFPFlowRemoved_1.json', 'r') as json_file: json_str_tx = json_file.read() json_dict_tx = json.loads(json_str_tx) with open('OFPMsgs/OFPFlowRemoved_2.json', 'r') as json_file: json_str_rx = json_file.read() json_dict_rx = json.loads(json_str_rx) #*** Set up fake datapath and synthesise messages: datapath = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath.id = 1 msg_tx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_tx) msg_rx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_rx) logger.debug("msg_tx=%s", msg_tx) #*** Call our method that we're testing with the synthesised flow rems: flow.record_removal(msg_tx) flow.record_removal(msg_rx) #*** Check that messages recorded correctly in database collection: db_data_tx = {'ip_A': '10.1.0.1', 'tp_B': 80} result = flow.flow_rems.find(db_data_tx).sort('$natural', -1).limit(1) result_tx = list(result)[0] logger.debug("result=%s", result_tx) assert result_tx['table_id'] == 1 assert result_tx['ip_B'] == '10.1.0.2' assert result_tx['tp_A'] == 43297 assert result_tx['packet_count'] == 10 assert result_tx['flow_hash'] == nethash.hash_flow( ('10.1.0.1', '10.1.0.2', 43297, 80, 6)) assert result_tx['cookie'] == 23 assert result_tx['direction'] == 'forward' #*** Return leg of flow: db_data_tx = {'ip_B': '10.1.0.1', 'tp_A': 80} result = flow.flow_rems.find(db_data_tx).sort('$natural', -1).limit(1) result_tx = list(result)[0] logger.debug("result=%s", result_tx) assert result_tx['table_id'] == 1 assert result_tx['ip_A'] == '10.1.0.2' assert result_tx['tp_B'] == 43297 assert result_tx['packet_count'] == 9 assert result_tx['flow_hash'] == nethash.hash_flow( ('10.1.0.2', '10.1.0.1', 80, 43297, 6)) assert result_tx['cookie'] == 1000000023 assert result_tx['direction'] == 'reverse'
def handle(self): desc = ofproto_protocol.ProtocolDesc() residue = b'' while True: if residue: data = residue residue = b'' else: data = self.request.recv(1024) if data == b'': break if self.verbose: print(data) h = ofproto_parser.header(data) if self.verbose: print(h) version, msg_type, msg_len, xid = h residue = data[msg_len:] desc.set_version(version=version) if msg_type == desc.ofproto.OFPT_HELLO: hello = desc.ofproto_parser.OFPHello(desc) hello.serialize() self.request.send(hello.buf) elif msg_type == desc.ofproto.OFPT_FLOW_MOD: # HACK: Clear xid into zero buf.append(data[:4] + b'\x00\x00\x00\x00' + data[8:msg_len]) elif msg_type == desc.ofproto.OFPT_BARRIER_REQUEST: brep = desc.ofproto_parser.OFPBarrierReply(desc) brep.xid = xid brep.serialize() self.request.send(brep.buf) break
def test_flows_removed_stats_count(): """ Test the flows_removed API stats count by ingesting flow removal messages then checking that the API response correctly specifies message count """ #*** Start api_external as separate process: logger.info("Starting api_external") api_ps = multiprocessing.Process(target=api.run, args=()) api_ps.start() #*** Supports OpenFlow version 1.3: OFP_VERSION = ofproto_v1_3.OFP_VERSION #*** Instantiate Flow class: flow = flows_module.Flow(config) #*** Load JSON representations of flow removed messages: with open('OFPMsgs/OFPFlowRemoved_1.json', 'r') as json_file: json_str_tx = json_file.read() json_dict_tx = json.loads(json_str_tx) with open('OFPMsgs/OFPFlowRemoved_2.json', 'r') as json_file: json_str_rx = json_file.read() json_dict_rx = json.loads(json_str_rx) #*** Set up fake datapath and synthesise messages: datapath = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath.id = 1 msg_tx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_tx) msg_rx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_rx) #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_STATS_COUNT) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['flows_removed'] == 0 #*** Record flow removal to flow_rems database collection: flow.record_removal(msg_tx) #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_STATS_COUNT) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['flows_removed'] == 1 #*** Record flow removal to flow_rems database collection: flow.record_removal(msg_rx) #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_STATS_COUNT) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['flows_removed'] == 2 #*** Stop api_external sub-process: api_ps.terminate()
def test_get_flow_data_xfer(): """ Test the get_flow_data_xfer method. Synthesise flow removal messages to test with. """ #*** Supports OpenFlow version 1.3: OFP_VERSION = ofproto_v1_3.OFP_VERSION #*** Instantiate Flow class: flow = flows_module.Flow(config) flow.ingest_packet(DPID1, INPORT1, pkts.RAW[0], datetime.datetime.now()) flow.ingest_packet(DPID1, INPORT2, pkts.RAW[1], datetime.datetime.now()) #*** Load JSON representations of flow removed messages: with open('OFPMsgs/OFPFlowRemoved_1.json', 'r') as json_file: json_str_tx = json_file.read() json_dict_tx = json.loads(json_str_tx) with open('OFPMsgs/OFPFlowRemoved_2.json', 'r') as json_file: json_str_rx = json_file.read() json_dict_rx = json.loads(json_str_rx) #*** Set up fake datapath and synthesise messages: datapath = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath.id = 1 msg_tx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_tx) msg_rx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_rx) logger.debug("msg_tx=%s", msg_tx) #*** Record flow removals to flow_rems database collection: flow.record_removal(msg_tx) flow.record_removal(msg_rx) #*** Now, test the get_flow_data_xfer method: record = { 'ip_src': '10.1.0.1', 'ip_dst': '10.1.0.2', 'tp_src': 43297, 'tp_dst': 80, 'proto': 6, 'flow_hash': '9822b2867652ee0957892482b9f004c3' } xfer = api.get_flow_data_xfer(record) logger.debug("xfer=%s", xfer) assert xfer['tx_found'] == 1 assert xfer['tx_bytes'] == 744 assert xfer['tx_pkts'] == 10 assert xfer['rx_found'] == 1 assert xfer['rx_bytes'] == 6644 assert xfer['rx_pkts'] == 9
def test_to_actions_pop_mpls(self): dp = ofproto_protocol.ProtocolDesc(version=ofproto_v1_3.OFP_VERSION) acts = [ { 'type': 'POP_MPLS', 'ethertype': 0x0800 } ] result = ofctl_v1_3.to_actions(dp, acts) insts = result[0] act = insts.actions[0] ok_(isinstance(act, OFPActionPopMpls)) eq_(act.ethertype, 0x0800)
def _test_msg(self, name, wire_msg, json_str): json_dict = json.loads(json_str) # on-wire -> OFPxxx -> json (version, msg_type, msg_len, xid) = ofproto_parser.header(wire_msg) try: has_parser, has_serializer = implemented[version][msg_type] except KeyError: has_parser = True has_serializer = True dp = ofproto_protocol.ProtocolDesc(version=version) if has_parser: msg = ofproto_parser.msg(dp, version, msg_type, msg_len, xid, wire_msg) json_dict2 = self._msg_to_jsondict(msg) # XXXdebug code open(('/tmp/%s.json' % name), 'wb').write(json.dumps(json_dict2)) eq_(json_dict, json_dict2) # json -> OFPxxx -> json msg2 = self._jsondict_to_msg(dp, json_dict) if has_serializer: msg2.serialize() eq_(self._msg_to_jsondict(msg2), json_dict) eq_(wire_msg, msg2.buf) # check if "len" "length" fields can be omitted def _remove(d, names): f = lambda x: _remove(x, names) if isinstance(d, list): return map(f, d) if isinstance(d, dict): d2 = {} for k, v in d.iteritems(): if k in names: continue d2[k] = f(v) return d2 return d json_dict3 = _remove(json_dict, ['len', 'length']) msg3 = self._jsondict_to_msg(dp, json_dict3) msg3.serialize() eq_(wire_msg, msg3.buf) msg2.serialize() eq_(wire_msg, msg2.buf)
def _test_match(self, attrs, test): dp = ofproto_protocol.ProtocolDesc(version=test.ver) # str -> match match = test.to_match(dp, attrs) for key, value in attrs.items(): key = self._conv_key(test, key, attrs) self._equal_str_to_match(key, value, match, test) # match -> str match_str = test.match_to_str(match) for key, value in attrs.items(): if key in conv_of12_to_of10_dict: key_old = conv_of12_to_of10_dict[key] else: key_old = key self._equal_match_to_str(key_old, value, match_str, test)
def handle(self): desc = ofproto_protocol.ProtocolDesc() residue = b'' while True: if residue: data = residue residue = b'' else: data = self.request.recv(1024) if data == b'': break if self.verbose: print(data) h = ofproto_parser.header(data) if self.verbose: print(h) version, msg_type, msg_len, xid = h residue = data[msg_len:] desc.set_version(version=version) if msg_type == desc.ofproto.OFPT_HELLO: hello = desc.ofproto_parser.OFPHello(desc) hello.serialize() self.request.send(hello.buf) elif msg_type == desc.ofproto.OFPT_FLOW_MOD: self._add_msg_to_buf(data, msg_len) elif version == 4 and msg_type == desc.ofproto.OFPT_EXPERIMENTER: # This is for OF13 Ext-230 bundle # TODO: support bundle for OF>1.3 exp = desc.ofproto_parser.OFPExperimenter.parser( object(), version, msg_type, msg_len, xid, data) self._add_msg_to_buf(data, msg_len) if isinstance(exp, desc.ofproto_parser.ONFBundleCtrlMsg): ctrlrep = desc.ofproto_parser.ONFBundleCtrlMsg( desc, exp.bundle_id, exp.type + 1, 0, []) ctrlrep.xid = xid ctrlrep.serialize() self.request.send(ctrlrep.buf) elif msg_type == desc.ofproto.OFPT_BARRIER_REQUEST: brep = desc.ofproto_parser.OFPBarrierReply(desc) brep.xid = xid brep.serialize() self.request.send(brep.buf)
def _test_actions(self, act, test): dp = ofproto_protocol.ProtocolDesc(version=test.ver) act_type = act["type"] # str -> action insts = test.to_actions(dp, [act]) if test.ver == ofproto_v1_0.OFP_VERSION: action = insts[0] self._equal_str_to_act(action, act, act_type, test) else: inst = insts[0] self._equal_str_to_inst(inst, act, act_type, test) # action -> str inst_str = test.actions_to_str(insts) if test.ver == ofproto_v1_0.OFP_VERSION: act_str = inst_str self._equal_act_to_str(act_str, act, act_type, test) else: self._equal_inst_to_str(inst_str, act, act_type, test)
def parser(cls, buf): from ryu.ofproto import ofproto_parser from ryu.ofproto import ofproto_protocol (version, msg_type, msg_len, xid) = ofproto_parser.header(buf) msg_parser = ofproto_parser._MSG_PARSERS.get(version) if msg_parser is None: msg = OFPUnparseableMsg(None, version, msg_type, msg_len, xid, buf[cls._MIN_LEN:msg_len]) return cls(msg), cls, buf[msg_len:] datapath = ofproto_protocol.ProtocolDesc(version=version) try: msg = msg_parser(datapath, version, msg_type, msg_len, xid, buf[:msg_len]) except: msg = OFPUnparseableMsg( datapath, version, msg_type, msg_len, xid, buf[datapath.ofproto.OFP_HEADER_SIZE:msg_len]) return cls(msg), cls, buf[msg_len:]
def test_flows_removed(): """ Test the flows_removed API by ingesting flow removal messages then checking that the API response correctly lists them """ #*** Start api_external as separate process: logger.info("Starting api_external") api_ps = multiprocessing.Process(target=api.run, args=()) api_ps.start() #*** Supports OpenFlow version 1.3: OFP_VERSION = ofproto_v1_3.OFP_VERSION #*** Instantiate Flow class: flow = flows_module.Flow(config) #*** Load JSON representations of flow removed messages: with open('OFPMsgs/OFPFlowRemoved_1.json', 'r') as json_file: json_str_tx = json_file.read() json_dict_tx = json.loads(json_str_tx) with open('OFPMsgs/OFPFlowRemoved_2.json', 'r') as json_file: json_str_rx = json_file.read() json_dict_rx = json.loads(json_str_rx) #*** Set up fake datapath and synthesise messages: datapath = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath.id = 1 msg_tx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_tx) msg_rx = ofproto_parser.ofp_msg_from_jsondict(datapath, json_dict_rx) #*** Record flow removals to flow_rems database collection: flow.record_removal(msg_tx) flow.record_removal(msg_rx) #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['_items'][0]['dpid'] == 1 #*** Note: can't easily test 'removal_time' as is dynamic, so skipping... assert api_result['_items'][0]['cookie'] == 23 assert api_result['_items'][0]['priority'] == 1 assert api_result['_items'][0]['reason'] == 0 assert api_result['_items'][0]['table_id'] == 1 assert api_result['_items'][0]['duration_sec'] == 5 assert api_result['_items'][0]['idle_timeout'] == 5 assert api_result['_items'][0]['hard_timeout'] == 0 assert api_result['_items'][0]['packet_count'] == 10 assert api_result['_items'][0]['byte_count'] == 744 assert api_result['_items'][0]['eth_A'] == '' assert api_result['_items'][0]['eth_B'] == '' assert api_result['_items'][0]['eth_type'] == 2048 assert api_result['_items'][0]['ip_A'] == '10.1.0.1' assert api_result['_items'][0]['ip_B'] == '10.1.0.2' assert api_result['_items'][0]['ip_proto'] == 6 assert api_result['_items'][0]['tp_A'] == 43297 assert api_result['_items'][0]['tp_B'] == 80 assert api_result['_items'][0][ 'flow_hash'] == '9822b2867652ee0957892482b9f004c3' assert api_result['_items'][0]['direction'] == 'forward' #*** Validate API Response parameters for second flow removal: assert api_result['_items'][1]['dpid'] == 1 #*** Note: can't easily test 'removal_time' as is dynamic, so skipping... assert api_result['_items'][1]['cookie'] == 1000000023 assert api_result['_items'][1]['priority'] == 1 assert api_result['_items'][1]['reason'] == 0 assert api_result['_items'][1]['table_id'] == 1 assert api_result['_items'][1]['duration_sec'] == 5 assert api_result['_items'][1]['idle_timeout'] == 5 assert api_result['_items'][1]['hard_timeout'] == 0 assert api_result['_items'][1]['packet_count'] == 9 assert api_result['_items'][1]['byte_count'] == 6644 assert api_result['_items'][1]['eth_A'] == '' assert api_result['_items'][1]['eth_B'] == '' assert api_result['_items'][1]['eth_type'] == 2048 assert api_result['_items'][1]['ip_A'] == '10.1.0.2' assert api_result['_items'][1]['ip_B'] == '10.1.0.1' assert api_result['_items'][1]['ip_proto'] == 6 assert api_result['_items'][1]['tp_A'] == 80 assert api_result['_items'][1]['tp_B'] == 43297 assert api_result['_items'][1][ 'flow_hash'] == '9822b2867652ee0957892482b9f004c3' assert api_result['_items'][1]['direction'] == 'reverse' #*** Stop api_external sub-process: api_ps.terminate()
def _test_actions(self, act, test): act_type = act["type"] to_actions = test.to_actions actions_to_str = test.actions_to_str dp = ofproto_protocol.ProtocolDesc(version=test.ver) act_list = [] act_list.append(act) # str -> action result = to_actions(dp, act_list) insts = result[0] if act_type in test.supported_action: cls = test.supported_action[act_type] else: cls = None if act_type == 'GOTO_TABLE': ok_(isinstance(insts, cls)) eq_(insts.table_id, act["table_id"]) elif act_type == 'WRITE_METADATA': ok_(isinstance(insts, cls)) eq_(insts.metadata, act["metadata"]) eq_(insts.metadata_mask, act["metadata_mask"]) elif act_type == 'METER': ok_(isinstance(insts, cls)) eq_(insts.meter_id, act["meter_id"]) else: if test.ver == ofproto_v1_0.OFP_VERSION: action = insts else: action = insts.actions[0] ok_(isinstance(action, cls)) if act_type == 'OUTPUT': eq_(action.port, act["port"]) elif act_type == 'SET_VLAN_VID': eq_(action.vlan_vid, act["vlan_vid"]) elif act_type == 'SET_VLAN_PCP': eq_(action.vlan_pcp, act["vlan_pcp"]) elif act_type == 'SET_DL_SRC': eq_(addrconv.mac.bin_to_text(action.dl_addr), act["dl_src"]) elif act_type == 'SET_DL_DST': eq_(addrconv.mac.bin_to_text(action.dl_addr), act["dl_dst"]) elif act_type == 'SET_NW_SRC': ip = netaddr.ip.IPAddress(action.nw_addr) eq_(str(ip), act["nw_src"]) elif act_type == 'SET_NW_DST': ip = netaddr.ip.IPAddress(action.nw_addr) eq_(str(ip), act["nw_dst"]) elif act_type == 'SET_NW_TOS': eq_(action.tos, act["nw_tos"]) elif act_type == 'SET_TP_SRC': eq_(action.tp, act["tp_src"]) elif act_type == 'SET_TP_DST': eq_(action.tp, act["tp_dst"]) elif act_type == 'ENQUEUE': eq_(action.queue_id, act["queue_id"]) eq_(action.port, act["port"]) elif act_type == 'SET_MPLS_TTL': eq_(action.mpls_ttl, act["mpls_ttl"]) elif act_type in ['PUSH_VLAN', 'PUSH_MPLS', 'POP_MPLS', 'PUSH_PBB']: eq_(action.ethertype, act["ethertype"]) elif act_type == 'SET_QUEUE': eq_(action.queue_id, act["queue_id"]) elif act_type == 'GROUP': eq_(action.group_id, act["group_id"]) elif act_type == 'SET_NW_TTL': eq_(action.nw_ttl, act["nw_ttl"]) elif act_type in ['STRIP_VLAN', 'COPY_TTL_OUT', 'COPY_TTL_IN', 'DEC_MPLS_TTL', 'POP_VLAN', 'DEC_NW_TTL', 'POP_PBB']: pass else: assert False # action -> str action_str = actions_to_str(result) action_str_list = action_str[0].split(':', 1) eq_(action_str_list[0], act_type) if act_type == 'GOTO_TABLE': eq_(int(action_str_list[1]), act["table_id"]) elif act_type == 'WRITE_METADATA': met = action_str_list[1].split('/') eq_(int(met[0], 16), act["metadata"]) eq_(int(met[1], 16), act["metadata_mask"]) elif act_type == 'METER': eq_(int(action_str_list[1]), act["meter_id"]) else: if act_type == 'OUTPUT': eq_(int(action_str_list[1]), act["port"]) elif act_type == 'SET_VLAN_VID': eq_(int(action_str_list[1]), act["vlan_vid"]) elif act_type == 'SET_VLAN_PCP': eq_(int(action_str_list[1]), act["vlan_pcp"]) elif act_type == 'SET_DL_SRC': eq_(action_str_list[1], act["dl_src"]) elif act_type == 'SET_DL_DST': eq_(action_str_list[1], act["dl_dst"]) elif act_type == 'SET_NW_SRC': eq_(action_str_list[1], act["nw_src"]) elif act_type == 'SET_NW_DST': eq_(action_str_list[1], act["nw_dst"]) elif act_type == 'SET_NW_TOS': eq_(int(action_str_list[1]), act["nw_tos"]) elif act_type == 'SET_TP_SRC': eq_(int(action_str_list[1]), act["tp_src"]) elif act_type == 'SET_TP_DST': eq_(int(action_str_list[1]), act["tp_dst"]) elif act_type == 'ENQUEUE': enq = action_str_list[1].split(':') eq_(int(enq[0], 10), act["port"]) eq_(int(enq[1], 10), act["queue_id"]) elif act_type == 'SET_MPLS_TTL': eq_(int(action_str_list[1]), act["mpls_ttl"]) elif act_type == 'PUSH_VLAN': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'PUSH_MPLS': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'POP_MPLS': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'SET_QUEUE': eq_(int(action_str_list[1]), act["queue_id"]) elif act_type == 'GROUP': eq_(int(action_str_list[1]), act["group_id"]) elif act_type == 'SET_NW_TTL': eq_(int(action_str_list[1]), act["nw_ttl"]) elif act_type == 'SET_FIELD': eq_(action_str_list[1].strip(' {'), act["field"]) eq_(action_str_list[2].strip('} '), act["value"]) elif act_type == 'PUSH_PBB': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type in ['STRIP_VLAN', 'COPY_TTL_OUT', 'COPY_TTL_IN', 'DEC_MPLS_TTL', 'POP_VLAN', 'DEC_NW_TTL', 'POP_PBB']: pass else: assert False
import socket from struct import * from nose.tools import * from nose.plugins.skip import Skip, SkipTest from ryu.ofproto.ofproto_v1_3_parser import * from ryu.ofproto import ofproto_v1_3_parser from ryu.ofproto import ofproto_v1_3 from ryu.ofproto import ofproto_protocol from ryu.ofproto import ether from ryu.ofproto.ofproto_parser import MsgBase from ryu import utils from ryu.lib import addrconv LOG = logging.getLogger('test_ofproto_v13') _Datapath = ofproto_protocol.ProtocolDesc(version=ofproto_v1_3.OFP_VERSION) class TestOFPMatch(unittest.TestCase): """ Test case for ofproto_v1_3_parser.OFPMatch """ def test_init(self): res = OFPMatch() # wc check eq_(res._wc.vlan_vid_mask, 0) # flow check eq_(res._flow.vlan_vid, 0) def _test_serialize_and_parser(self, match, header, value, mask=None):
def _test_msg(self, name, wire_msg, json_str): def bytes_eq(buf1, buf2): if buf1 != buf2: msg = 'EOF in either data' for i in range(0, min(len(buf1), len(buf2))): c1 = six.indexbytes(six.binary_type(buf1), i) c2 = six.indexbytes(six.binary_type(buf2), i) if c1 != c2: msg = 'differs at chr %d, %d != %d' % (i, c1, c2) break assert buf1 == buf2, "%r != %r, %s" % (buf1, buf2, msg) json_dict = json.loads(json_str) # on-wire -> OFPxxx -> json (version, msg_type, msg_len, xid) = ofproto_parser.header(wire_msg) try: has_parser, has_serializer = implemented[version][msg_type] except KeyError: has_parser = True has_serializer = True dp = ofproto_protocol.ProtocolDesc(version=version) if has_parser: try: msg = ofproto_parser.msg(dp, version, msg_type, msg_len, xid, wire_msg) json_dict2 = self._msg_to_jsondict(msg) except exception.OFPTruncatedMessage as e: json_dict2 = { 'OFPTruncatedMessage': self._msg_to_jsondict(e.ofpmsg) } # XXXdebug code open(('/tmp/%s.json' % name), 'w').write(json.dumps(json_dict2)) eq_(json_dict, json_dict2) if 'OFPTruncatedMessage' in json_dict2: return # json -> OFPxxx -> json xid = json_dict[list(json_dict.keys())[0]].pop('xid', None) msg2 = self._jsondict_to_msg(dp, json_dict) msg2.set_xid(xid) if has_serializer: msg2.serialize() eq_(self._msg_to_jsondict(msg2), json_dict) bytes_eq(wire_msg, msg2.buf) # check if "len" "length" fields can be omitted def _remove(d, names): f = lambda x: _remove(x, names) if isinstance(d, list): return list(map(f, d)) if isinstance(d, dict): d2 = {} for k, v in d.items(): if k in names: continue d2[k] = f(v) return d2 return d json_dict3 = _remove(json_dict, ['len', 'length']) msg3 = self._jsondict_to_msg(dp, json_dict3) msg3.set_xid(xid) msg3.serialize() bytes_eq(wire_msg, msg3.buf) msg2.serialize() bytes_eq(wire_msg, msg2.buf)
def _test_actions(self, act, test): act_type = act["type"] to_actions = test.to_actions actions_to_str = test.actions_to_str dp = ofproto_protocol.ProtocolDesc(version=test.ver) act_list = [] act_list.append(act) # str -> action result = to_actions(dp, act_list) insts = result[0] if act_type in test.supported_action: cls = test.supported_action[act_type] else: cls = None if act_type == 'GOTO_TABLE': ok_(isinstance(insts, cls)) eq_(insts.table_id, act["table_id"]) elif act_type == 'WRITE_METADATA': ok_(isinstance(insts, cls)) eq_(insts.metadata, act["metadata"]) eq_(insts.metadata_mask, act["metadata_mask"]) elif act_type == 'METER': ok_(isinstance(insts, cls)) eq_(insts.meter_id, act["meter_id"]) else: ok_(isinstance(insts.actions[0], cls)) if act_type == 'OUTPUT': eq_(insts.actions[0].port, act["port"]) elif act_type == 'SET_MPLS_TTL': eq_(insts.actions[0].mpls_ttl, act["mpls_ttl"]) elif act_type in [ 'PUSH_VLAN', 'PUSH_MPLS', 'POP_MPLS', 'PUSH_PBB' ]: eq_(insts.actions[0].ethertype, act["ethertype"]) elif act_type == 'SET_QUEUE': eq_(insts.actions[0].queue_id, act["queue_id"]) elif act_type == 'GROUP': eq_(insts.actions[0].group_id, act["group_id"]) elif act_type == 'SET_NW_TTL': eq_(insts.actions[0].nw_ttl, act["nw_ttl"]) # action -> str action_str = actions_to_str(result) action_str_list = action_str[0].split(':') eq_(action_str_list[0], act_type) if act_type == 'GOTO_TABLE': eq_(int(action_str_list[1]), act["table_id"]) elif act_type == 'WRITE_METADATA': met = action_str_list[1].split('/') eq_(int(met[0], 16), act["metadata"]) eq_(int(met[1], 16), act["metadata_mask"]) elif act_type == 'METER': eq_(int(action_str_list[1]), act["meter_id"]) else: if act_type == 'OUTPUT': eq_(int(action_str_list[1]), act["port"]) elif act_type == 'SET_MPLS_TTL': eq_(int(action_str_list[1]), act["mpls_ttl"]) elif act_type == 'PUSH_VLAN': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'PUSH_MPLS': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'POP_MPLS': eq_(int(action_str_list[1]), act["ethertype"]) elif act_type == 'SET_QUEUE': eq_(int(action_str_list[1]), act["queue_id"]) elif act_type == 'GROUP': eq_(int(action_str_list[1]), act["group_id"]) elif act_type == 'SET_NW_TTL': eq_(int(action_str_list[1]), act["nw_ttl"]) elif act_type == 'SET_FIELD': eq_(action_str_list[1].strip(' {'), act["field"]) eq_(action_str_list[2].strip('} '), act["value"]) elif act_type == 'PUSH_PBB': eq_(int(action_str_list[1]), act["ethertype"])
def _test_to_match(self, attrs, test): to_match = test.to_match match_to_str = test.match_to_str dp = ofproto_protocol.ProtocolDesc(version=test.ver) ofproto = dp.ofproto # str -> match match = to_match(dp, attrs) def equal_match(key, value, match): key = self._conv_key(test, key, attrs) field_value = self._get_field_value(test, key, match) if key in ['eth_src', 'eth_dst', 'arp_sha', 'arp_tha']: # MAC address eth, mask = _to_match_eth(value) if mask is not None: # with mask for i in range(0, len(mask)): if mask[i] == 'f': eq_(eth[i], field_value[0][i]) eq_(mask, field_value[1]) else: # without mask eq_(eth, field_value) return elif key in ['dl_src', 'dl_dst']: eth, mask = _to_match_eth(value) field_value = addrconv.mac.bin_to_text(field_value) eq_(eth, field_value) return elif key in ['ipv4_src', 'ipv4_dst', 'arp_spa', 'arp_tpa']: # IPv4 address ipv4, mask = _to_match_ip(value) if mask is not None: # with mask eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value) return elif key in ['nw_src', 'nw_dst']: # IPv4 address ipv4, mask = _to_match_ip(value) field_value = _to_match_ip(field_value) if mask is not None: # with mask eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value[0]) return elif key in ['ipv6_src', 'ipv6_dst']: # IPv6 address ipv6, mask = _to_match_ip(value) if mask is not None: # with mask eq_(ipv6, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv6, field_value) return elif key == 'vlan_vid': if test.ver == ofproto_v1_0.OFP_VERSION: eq_(value, field_value) else: eq_(test.expected_value['vlan_vid'][ value]['to_match'], field_value) return elif key == 'metadata' or key == 'ipv6_exthdr': # Metadata or IPv6 Extension Header pseudo-field value, mask = _to_match_masked_int(value) if mask is not None: # with mask value &= mask eq_(value, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(value, field_value) return else: eq_(value, field_value) return for key, value in attrs.items(): equal_match(key, value, match) # match -> str match_str = match_to_str(match) def equal_str(key, value, match_str): field_value = match_str[key] if key in ['dl_src', 'dl_dst', 'arp_sha', 'arp_tha']: # MAC address eth, mask = _to_match_eth(value) if mask is not None: # with mask field_value = field_value.split('/') for i in range(0, len(mask)): if mask[i] == 'f': eq_(eth[i], field_value[0][i]) eq_(mask, field_value[1]) else: # without mask eq_(eth, field_value) return elif key in['nw_src', 'nw_dst', 'arp_spa', 'arp_tpa']: # IPv4 address if test.ver == ofproto_v1_0.OFP_VERSION: ipv4, mask = _to_match_ip(value) field_value = _to_match_ip(field_value) if mask is not None: # with mask eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value[0]) else: ipv4, mask = _to_match_ip(value) if mask is not None: # with mask field_value = field_value.split('/') eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value) return elif key in ['ipv6_src', 'ipv6_dst']: # IPv6 address ipv6, mask = _to_match_ip(value) if mask is not None: # with mask field_value = field_value.split('/') eq_(ipv6, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv6, field_value) return elif key == 'dl_vlan': if test.ver == ofproto_v1_0.OFP_VERSION: eq_(value, field_value) else: eq_(test.expected_value['vlan_vid'][ value]['to_str'], field_value) return elif key == 'metadata' or key == 'ipv6_exthdr': # Metadata or IPv6 Extension Header pseudo-field value, mask = _to_match_masked_int(value) if mask is not None: # with mask field_value = field_value.split('/') value &= mask eq_(str(value), field_value[0]) eq_(str(mask), field_value[1]) else: # without mask eq_(str(value), field_value) return else: eq_(value, field_value) return for key, value in attrs.items(): if key in conv_of12_to_of10_dict: key_old = conv_of12_to_of10_dict[key] else: key_old = key equal_str(key_old, value, match_str)
def test_response_flows_removed_FLOWDIR_bytes_TXRX(): """ Test the flows_removed API various flavours of src/dst bytes sent/received by ingesting flow removal messages then checking that the API response correctly specifies appropriate stats for bytes sent, including identity enrichment """ #*** Start api_external as separate process: logger.info("Starting api_external") api_ps = multiprocessing.Process(target=api.run, args=()) api_ps.start() #*** Supports OpenFlow version 1.3: OFP_VERSION = ofproto_v1_3.OFP_VERSION #*** Instantiate supporting classes: flow = flows_module.Flow(config) policy = policy_module.Policy(config) identities = identities_module.Identities(config, policy) #*** Client to Server DHCP Request: flow.ingest_packet(DPID1, INPORT1, pkts_dhcp.RAW[2], datetime.datetime.now()) identities.harvest(pkts_dhcp.RAW[2], flow.packet) #*** Server to Client DHCP ACK: flow.ingest_packet(DPID1, INPORT2, pkts_dhcp.RAW[3], datetime.datetime.now()) identities.harvest(pkts_dhcp.RAW[3], flow.packet) #*** Load JSON representations of flow removed messages: with open('OFPMsgs/OFPFlowRemoved_1.json', 'r') as json_file: json_str_tx_1 = json_file.read() json_dict_tx_1 = json.loads(json_str_tx_1) with open('OFPMsgs/OFPFlowRemoved_2.json', 'r') as json_file: json_str_rx_1 = json_file.read() json_dict_rx_1 = json.loads(json_str_rx_1) with open('OFPMsgs/OFPFlowRemoved_3.json', 'r') as json_file: json_str_tx_2 = json_file.read() json_dict_tx_2 = json.loads(json_str_tx_2) with open('OFPMsgs/OFPFlowRemoved_4.json', 'r') as json_file: json_str_rx_2 = json_file.read() json_dict_rx_2 = json.loads(json_str_rx_2) with open('OFPMsgs/OFPFlowRemoved_5.json', 'r') as json_file: json_str_tx_3 = json_file.read() json_dict_tx_3 = json.loads(json_str_tx_3) with open('OFPMsgs/OFPFlowRemoved_6.json', 'r') as json_file: json_str_rx_3 = json_file.read() json_dict_rx_3 = json.loads(json_str_rx_3) #*** Switch 1: #*** Set up fake datapaths and synthesise messages: datapath1 = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath1.id = 1 msg_tx_1_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_tx_1) msg_rx_1_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_rx_1) msg_tx_2_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_tx_2) msg_rx_2_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_rx_2) msg_tx_3_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_tx_3) msg_rx_3_sw1 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_rx_3) #*** Record flow removals to flow_rems database collection: flow.record_removal(msg_tx_1_sw1) flow.record_removal(msg_rx_1_sw1) flow.record_removal(msg_tx_2_sw1) flow.record_removal(msg_rx_2_sw1) flow.record_removal(msg_tx_3_sw1) flow.record_removal(msg_rx_3_sw1) #*** Switch 2 (same flows to check dedup for multiple switches works): #*** Set up fake datapaths and synthesise messages: datapath2 = ofproto_protocol.ProtocolDesc(version=OFP_VERSION) datapath2.id = 2 msg_tx_1_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath2, json_dict_tx_1) msg_rx_1_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath2, json_dict_rx_1) msg_tx_2_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath2, json_dict_tx_2) msg_rx_2_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath2, json_dict_rx_2) msg_tx_3_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_tx_3) msg_rx_3_sw2 = ofproto_parser.ofp_msg_from_jsondict( datapath1, json_dict_rx_3) #*** Record flow removals to flow_rems database collection: flow.record_removal(msg_tx_1_sw2) flow.record_removal(msg_rx_1_sw2) flow.record_removal(msg_tx_2_sw2) flow.record_removal(msg_rx_2_sw2) flow.record_removal(msg_tx_3_sw2) flow.record_removal(msg_rx_3_sw2) #*** Test flows_removed_src_bytes_sent API: #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_SRC_BYTES_SENT) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['_items'][0]['_id'] == '10.1.0.2' assert api_result['_items'][0]['total_bytes_sent'] == 12345 assert api_result['_items'][0]['identity'] == '10.1.0.2' assert api_result['_items'][1]['_id'] == '10.1.0.1' assert api_result['_items'][1]['total_bytes_sent'] == 5533 assert api_result['_items'][1]['identity'] == 'pc1' #*** Test flows_removed_src_bytes_received API: #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_SRC_BYTES_RECEIVED) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['_items'][0]['_id'] == '10.1.0.1' assert api_result['_items'][0]['total_bytes_received'] == 8628 assert api_result['_items'][0]['identity'] == 'pc1' assert api_result['_items'][1]['_id'] == '10.1.0.2' assert api_result['_items'][1]['total_bytes_received'] == 543 assert api_result['_items'][1]['identity'] == '10.1.0.2' #*** Test flows_removed_dst_bytes_sent API: #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_DST_BYTES_SENT) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['_items'][0]['_id'] == '10.1.0.2' assert api_result['_items'][0]['total_bytes_sent'] == 8628 assert api_result['_items'][0]['identity'] == '10.1.0.2' assert api_result['_items'][1]['_id'] == '10.1.0.1' assert api_result['_items'][1]['total_bytes_sent'] == 543 assert api_result['_items'][1]['identity'] == 'pc1' #*** Test flows_removed_src_bytes_received API: #*** Call the external API: api_result = get_api_result(URL_TEST_FLOWS_REMOVED_DST_BYTES_RECEIVED) logger.debug("api_result=%s", api_result) #*** Validate API Response parameters: assert api_result['_items'][0]['_id'] == '10.1.0.1' assert api_result['_items'][0]['total_bytes_received'] == 12345 assert api_result['_items'][0]['identity'] == 'pc1' assert api_result['_items'][1]['_id'] == '10.1.0.2' assert api_result['_items'][1]['total_bytes_received'] == 5533 assert api_result['_items'][1]['identity'] == '10.1.0.2' #*** Stop api_external sub-process: api_ps.terminate()
def _test_to_match(self, attrs, test): to_match = test.to_match match_to_str = test.match_to_str dp = ofproto_protocol.ProtocolDesc(version=test.ver) ofproto = dp.ofproto vid_present = dp.ofproto.OFPVID_PRESENT expected_value = { "vlan_vid": { 0: { "to_match": 0 | vid_present, "to_str": "0" }, 3: { "to_match": 3 | vid_present, "to_str": "3" }, 4095: { "to_match": 4095 | vid_present, "to_str": "4095" }, "0": { "to_match": 0 | vid_present, "to_str": "0" }, "3": { "to_match": 3 | vid_present, "to_str": "3" }, "4095": { "to_match": 4095 | vid_present, "to_str": "4095" }, "0x0000": { "to_match": 0x0000, "to_str": "0x0000" }, "0x0003": { "to_match": 0x0003, "to_str": "0x0003" }, "0x0fff": { "to_match": 0x0fff, "to_str": "0x0fff" }, "0x1000": { "to_match": 0x1000, "to_str": "0" }, "0x1003": { "to_match": 0x1003, "to_str": "3" }, "0x1fff": { "to_match": 0x1fff, "to_str": "4095" }, "4096/4096": { "to_match": (4096, 4096), "to_str": "0x1000/0x1000" }, "4096/4097": { "to_match": (4096, 4097), "to_str": "0x1000/0x1001" }, "2744/2748": { "to_match": (2744, 2748), "to_str": "0x0ab8/0x0abc" }, "2748/2748": { "to_match": (2748, 2748), "to_str": "0x0abc/0x0abc" }, "2748/2749": { "to_match": (2748, 2749), "to_str": "0x0abc/0x0abd" }, "0x1000/0x1000": { "to_match": (0x1000, 0x1000), "to_str": "0x1000/0x1000" }, "0x1000/0x1001": { "to_match": (0x1000, 0x1001), "to_str": "0x1000/0x1001" }, "0x0ab8/0x0abc": { "to_match": (0x0ab8, 0x0abc), "to_str": "0x0ab8/0x0abc" }, "0x0abc/0x0abc": { "to_match": (0x0abc, 0x0abc), "to_str": "0x0abc/0x0abc" }, "0x0abc/0x0abd": { "to_match": (0x0abc, 0x0abd), "to_str": "0x0abc/0x0abd" } } } # str -> match match = to_match(dp, attrs) def equal_match(key, value, match): field_value = match[key] if key in ['eth_src', 'eth_dst', 'arp_sha', 'arp_tha']: # MAC address eth, mask = _to_match_eth(value) if mask is not None: # with mask for i in range(0, len(mask)): if mask[i] == 'f': eq_(eth[i], field_value[0][i]) eq_(mask, field_value[1]) else: # without mask eq_(eth, field_value) return elif key in ['ipv4_src', 'ipv4_dst', 'arp_spa', 'arp_tpa']: # IPv4 address ipv4, mask = _to_match_ip(value) if mask is not None: # with mask eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value) return elif key in ['ipv6_src', 'ipv6_dst']: # IPv6 address ipv6, mask = _to_match_ip(value) if mask is not None: # with mask eq_(ipv6, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv6, field_value) return elif key == 'vlan_vid': eq_(expected_value['vlan_vid'][value]['to_match'], field_value) return elif key == 'metadata' or key == 'ipv6_exthdr': # Metadata or IPv6 Extension Header pseudo-field value, mask = _to_match_masked_int(value) if mask is not None: # with mask value &= mask eq_(value, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(value, field_value) return else: eq_(value, field_value) return for key, value in attrs.items(): if key in conv_of10_to_of12_dict: # For old field name key_new = conv_of10_to_of12_dict[key] elif key == 'tp_src' or key == 'tp_dst': # TCP/UDP port conv = { inet.IPPROTO_TCP: { 'tp_src': 'tcp_src', 'tp_dst': 'tcp_dst' }, inet.IPPROTO_UDP: { 'tp_src': 'udp_src', 'tp_dst': 'udp_dst' } } ip_proto = attrs.get('nw_proto', attrs.get('ip_proto', 0)) key_new = conv[ip_proto][key] else: key_new = key equal_match(key_new, value, match) # match -> str match_str = match_to_str(match) def equal_str(key, value, match_str): field_value = match_str[key] if key in ['dl_src', 'dl_dst', 'arp_sha', 'arp_tha']: # MAC address eth, mask = _to_match_eth(value) if mask is not None: # with mask field_value = field_value.split('/') for i in range(0, len(mask)): if mask[i] == 'f': eq_(eth[i], field_value[0][i]) eq_(mask, field_value[1]) else: # without mask eq_(eth, field_value) return elif key in ['nw_src', 'nw_dst', 'arp_spa', 'arp_tpa']: # IPv4 address ipv4, mask = _to_match_ip(value) if mask is not None: # with mask field_value = field_value.split('/') eq_(ipv4, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv4, field_value) return elif key in ['ipv6_src', 'ipv6_dst']: # IPv6 address ipv6, mask = _to_match_ip(value) if mask is not None: # with mask field_value = field_value.split('/') eq_(ipv6, field_value[0]) eq_(mask, field_value[1]) else: # without mask eq_(ipv6, field_value) return elif key == 'dl_vlan': eq_(expected_value['vlan_vid'][value]['to_str'], field_value) return elif key == 'metadata' or key == 'ipv6_exthdr': # Metadata or IPv6 Extension Header pseudo-field value, mask = _to_match_masked_int(value) if mask is not None: # with mask field_value = field_value.split('/') value &= mask eq_(str(value), field_value[0]) eq_(str(mask), field_value[1]) else: # without mask eq_(str(value), field_value) return else: eq_(value, field_value) return for key, value in attrs.items(): if key in conv_of12_to_of10_dict: key_old = conv_of12_to_of10_dict[key] else: key_old = key equal_str(key_old, value, match_str)
def _test_to_match(self, attrs, test): to_match = test.to_match match_to_str = test.match_to_str dp = ofproto_protocol.ProtocolDesc(version=test.ver) # str -> match match = to_match(dp, attrs) buf = bytearray() match.serialize(buf, 0) match = match.__class__.parser(str(buf), 0) def equal_match(key, value, cls_name, fields): for field in fields: if cls_name in str(field): if key in ['dl_src', 'dl_dst', 'arp_sha', 'arp_tha', 'eth_src', 'eth_dst']: eth, mask = _to_match_eth(value) str_eth = mac.haddr_to_str(eth) str_mask = mac.haddr_to_str(mask) str_value = mac.haddr_to_str(field.value) for i in range(0, 17): if str_mask[i] == 'f': eq_(str_eth[i], str_value[i]) else: continue eq_(mask, field.mask) return elif key in ['nw_src', 'nw_dst', 'ipv4_src', 'ipv4_dst', 'arp_spa', 'arp_tpa']: ipv4, mask = _to_match_ip(value) eq_(ipv4, field.value) eq_(mask, field.mask) return elif key in ['ipv6_src', 'ipv6_dst']: ipv6, mask = _to_match_ipv6(value) for i in range(0, 8): if mask[i] == 65535: eq_(ipv6[i], field.value[i]) else: continue eq_(list(mask), field.mask) return elif key == 'ipv6_nd_target': ipv6, mask = _to_match_ipv6(value) for i in range(0, 8): if mask[i] == 65535: eq_(ipv6[i], field.value[i]) else: continue return elif key == 'ipv6_nd_sll' or key == 'ipv6_nd_tll': eq_(mac.haddr_to_bin(value), field.value) return elif key == 'metadata': eq_(int(value, 16), field.value) return else: eq_(value, field.value) return assert False for key, value in attrs.items(): if key.startswith('tp_'): cls = test.supported_match[key][attrs["ip_proto"]] elif key in test.supported_match: cls = test.supported_match[key] else: cls = None equal_match(key, value, cls.__name__, match.fields) # match -> str match_str = match_to_str(match) def equal_str(key, value, match_str): if key in ['dl_src', 'dl_dst', 'arp_sha', 'arp_tha']: eth_1, mask_1 = _to_match_eth(value) eth_2, mask_2 = _to_match_eth(match_str[key]) str_eth_1 = mac.haddr_to_str(eth_1) str_mask_1 = mac.haddr_to_str(mask_1) str_eth_2 = mac.haddr_to_str(eth_2) for i in range(0, 17): if str_mask_1[i] == 'f': eq_(str_eth_1[i], str_eth_2[i]) else: continue eq_(mask_1, mask_2) return elif key in['nw_src', 'nw_dst', 'arp_spa', 'arp_tpa']: ipv4_1, ip_mask_1 = _to_match_ip(value) ipv4_2, ip_mask_2 = _to_match_ip(match_str[key]) eq_(ipv4_1, ipv4_2) eq_(ip_mask_1, ip_mask_2) return elif key in ['ipv6_src', 'ipv6_dst']: ipv6_1, netmask_1 = _to_match_ipv6(value) ipv6_2, netmask_2 = _to_match_ipv6(match_str[key]) for i in range(0, 8): if netmask_1[i] == 65535: eq_(ipv6_1[i], ipv6_2[i]) else: continue eq_(netmask_1, netmask_2) return elif key == 'ipv6_nd_target': ipv6_1, netmask_1 = _to_match_ipv6(value) ipv6_2, netmask_2 = _to_match_ipv6(match_str[key]) for i in range(0, 8): if netmask_1[i] == 65535: eq_(ipv6_1[i], ipv6_2[i]) else: continue return elif key == 'metadata': eq_(str(int(value, 16)), match_str[key]) return eq_(value, match_str[key]) for key, value in attrs.items(): if key in conv_dict: key = conv_dict[key] equal_str(key, value, match_str)