def testMinus(self): h1 = headerspace(1) h1.add_hs(wildcard_create_from_string("1001xxxx")) h2 = headerspace(1) h2.add_hs(wildcard_create_from_string("100xx000")) h1.minus(h2) self.assertEqual(h1.count(),3)
def testContainedIn(self): h1 = headerspace(1) h1.add_hs(wildcard_create_from_string("1001xxxx")) h1.diff_hs(wildcard_create_from_string("1xxxx111")) h2 = headerspace(1) h2.add_hs(wildcard_create_from_string("1001xxxx")) h2.add_hs(wildcard_create_from_string("11xxxx11")) h2.diff_hs(wildcard_create_from_string("100xxx00")) h2.diff_hs(wildcard_create_from_string("1xxxx111")) self.assertTrue(h1.is_contained_in(h2)) self.assertFalse(h2.is_contained_in(h1))
def testIntersect2(self): ''' Test intersect with a headerspace ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xx000")) h.diff_hs(wildcard_create_from_string("1xxx1x11")) other = headerspace(1) other.add_hs(wildcard_create_from_string("10xxxxx1")) other.diff_hs(wildcard_create_from_string("10010xxx")) h.intersect(other) self.assertEqual(other.count(),1) self.assertEqual(other.count_diff(),1) self.assertEqual(h.count(),1) self.assertEqual(h.count_diff(),2)
def testComplement(self): ''' Test if complement correctly handles diffs ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.diff_hs(wildcard_create_from_string("100xx000")) h.complement() self.assertEqual(h.count(),5)
def testCreate(self): ''' Test if creating a headerspace object creates correct number of bytearrays inside. ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) self.assertEqual(h.count(),2)
def testSelfDiff(self): h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xxx00")) h.diff_hs(wildcard_create_from_string("1xxxx111")) h.self_diff() self.assertEqual(h.count(),5) self.assertEqual(h.count_diff(),0)
def testRW1(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("1001xxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("1111xxxx")))
def detect_loop(NTF, TTF, ports, test_packet=None, out_port_offset=0): loops = [] for port in ports: print "port %d is being checked" % port propagation = [] # put all-x test packet in propagation graph test_pkt = test_packet if test_pkt == None: all_x = wildcard_create_bit_repeat(NTF.length, 0x3) test_pkt = headerspace(NTF.length) test_pkt.add_hs(all_x) p_node = {} p_node["hdr"] = test_pkt p_node["port"] = port p_node["visits"] = [] p_node["hs_history"] = [] propagation.append(p_node) while len(propagation) > 0: #get the next node in propagation graph and apply it to NTF and TTF print "Propagation has length: %d" % len(propagation) tmp_propag = [] for p_node in propagation: next_hp = NTF.T(p_node["hdr"], p_node["port"]) for (next_h, next_ps) in next_hp: for next_p in next_ps: linked = TTF.T(next_h, next_p) for (linked_h, linked_ports) in linked: for linked_p in linked_ports: new_p_node = {} new_p_node["hdr"] = linked_h new_p_node["port"] = linked_p new_p_node["visits"] = list(p_node["visits"]) new_p_node["visits"].append(p_node["port"]) #new_p_node["visits"].append(next_p) new_p_node["hs_history"] = list( p_node["hs_history"]) new_p_node["hs_history"].append(p_node["hdr"]) #print new_p_node if len( new_p_node["visits"] ) > 0 and new_p_node["visits"][0] == linked_p: loops.append(new_p_node) print "loop detected" elif linked_p in new_p_node["visits"] or ( linked_p + out_port_offset ) in new_p_node["visits"]: pass else: tmp_propag.append(new_p_node) propagation = tmp_propag return loops
def testIntersect1(self): ''' Test intersect with a bytearray ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xx000")) h.diff_hs(wildcard_create_from_string("1xxx1x11")) h.intersect(wildcard_create_from_string("xxxxx011")) self.assertEqual(h.count(),2) self.assertEqual(h.count_diff(),2)
def testDependency(self): tf = TF(1) tf.add_fwd_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ None, None)) tf.add_rewrite_rule(TF.create_standard_rule([1], "1xxxxxxx", [3], "00111111", "10000000","",[])) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("xxxxxxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 2, "Expecting both rules to be matched") self.assertTrue(wildcard_is_equal( result[1][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ ), \ "unexpected second byte array")
def detect_loop(NTF, TTF, ports, test_packet = None, out_port_offset = 0): loops = [] for port in ports: print "port %d is being checked"%port propagation = [] # put all-x test packet in propagation graph test_pkt = test_packet if test_pkt == None: all_x = wildcard_create_bit_repeat(NTF.length,0x3) test_pkt = headerspace(NTF.length) test_pkt.add_hs(all_x) p_node = {} p_node["hdr"] = test_pkt p_node["port"] = port p_node["visits"] = [] p_node["hs_history"] = [] propagation.append(p_node) while len(propagation)>0: #get the next node in propagation graph and apply it to NTF and TTF print "Propagation has length: %d"%len(propagation) tmp_propag = [] for p_node in propagation: next_hp = NTF.T(p_node["hdr"],p_node["port"]) for (next_h,next_ps) in next_hp: for next_p in next_ps: linked = TTF.T(next_h,next_p) for (linked_h,linked_ports) in linked: for linked_p in linked_ports: new_p_node = {} new_p_node["hdr"] = linked_h new_p_node["port"] = linked_p new_p_node["visits"] = list(p_node["visits"]) new_p_node["visits"].append(p_node["port"]) #new_p_node["visits"].append(next_p) new_p_node["hs_history"] = list(p_node["hs_history"]) new_p_node["hs_history"].append(p_node["hdr"]) #print new_p_node if len(new_p_node["visits"]) > 0 and new_p_node["visits"][0] == linked_p: loops.append(new_p_node) print "loop detected" elif linked_p in new_p_node["visits"] or (linked_p + out_port_offset) in new_p_node["visits"]: pass else: tmp_propag.append(new_p_node) propagation = tmp_propag return loops
def testDiffHS(self): ''' Test the diff (lazy subtraction): 1) adding a diff before having anything doesn't add any diff to hs 2) adding a diff actually adds correct number of diff bytearrays 3) adding a new bytearray that has intersection with a previously added diff doesn't add that diff to the new bytearray. ''' h = headerspace(1) h.diff_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) self.assertEqual(h.count_diff(),0) h.diff_hs(wildcard_create_from_string("1xxx1111")) self.assertEqual(h.count_diff(),2) h.add_hs(wildcard_create_from_string("xxxxxx11")) self.assertEqual(h.count_diff(),2)
def testInverse(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("111xxxxx")) hs.diff_hs(wildcard_create_from_string("1110xxxx")) result = tf.T_inv(hs, 2) self.assertEqual(len(result), 1) self.assertEqual(result[0][0].count(),1) self.assertEqual(result[0][0].count_diff(),1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ )) self.assert_(wildcard_is_equal(result[0][0].hs_diff[0][0],\ wildcard_create_from_string("10x0xxxx"),\ ))
def testCopy(self): ''' Test if copy works correctly Adding new stuff on the original hs, doesn't affect copied hs. ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100x0000")) h.diff_hs(wildcard_create_from_string("1xxx1111")) hcpy = h.copy() self.assertEqual(h.count(),hcpy.count()) self.assertEqual(h.count_diff(),hcpy.count_diff()) h.add_hs(wildcard_create_from_string("100100xx")) self.assertEqual(h.count(),3) self.assertEqual(h.count_diff(),3) self.assertEqual(hcpy.count(),2) self.assertEqual(h.count_diff(),3)
for line in f: if (not line.startswith("$")) and line != "": tokens = line.strip().split(":") port = int(tokens[1]) + settings["port_type_multiplier"] * \ settings["mid_port_type_const"] N.add_link(port, port) # add link for forward engine port for i in range(len(settings["rtr_names"])): fwd_link = (i + 1) * settings["switch_id_multiplier"] N.add_link(fwd_link, fwd_link) # add a source node at yoza-te1/4 src_port_id = map["yoza_rtr"]["te1/4"] N.add_link(1, src_port_id) hs = headerspace(N.length) hs.add_hs(wildcard_create_bit_repeat(N.length, 0x3)) N.add_source("yoza-source", hs, [1]) rule_ids = [] for rtr_name in settings["rtr_names"]: f = TF(1) f.load_object_from_file("%s/%s.tf" % (settings["input_path"], rtr_name)) for rule in f.rules: in_ports = rule["in_ports"] out_ports = rule["out_ports"] match = rule["match"] mask = rule["mask"] rewrite = rule["rewrite"] st = time() rule_ids.append(\