def testMinus(self): h1 = headerspace(1) h1.add_hs(wildcard_create_from_string("1001xxxx")) h2 = headerspace(1) h2.add_hs(wildcard_create_from_string("100xx000")) h1.minus(h2) self.assertEqual(h1.count(),3)
def testContainedIn(self): h1 = headerspace(1) h1.add_hs(wildcard_create_from_string("1001xxxx")) h1.diff_hs(wildcard_create_from_string("1xxxx111")) h2 = headerspace(1) h2.add_hs(wildcard_create_from_string("1001xxxx")) h2.add_hs(wildcard_create_from_string("11xxxx11")) h2.diff_hs(wildcard_create_from_string("100xxx00")) h2.diff_hs(wildcard_create_from_string("1xxxx111")) self.assertTrue(h1.is_contained_in(h2)) self.assertFalse(h2.is_contained_in(h1))
def testRW1(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("1001xxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0], wildcard_create_from_string("1111xxxx")))
def testIntersect2(self): ''' Test intersect with a headerspace ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xx000")) h.diff_hs(wildcard_create_from_string("1xxx1x11")) other = headerspace(1) other.add_hs(wildcard_create_from_string("10xxxxx1")) other.diff_hs(wildcard_create_from_string("10010xxx")) h.intersect(other) self.assertEqual(other.count(),1) self.assertEqual(other.count_diff(),1) self.assertEqual(h.count(),1) self.assertEqual(h.count_diff(),2)
def testCreate(self): ''' Test if creating a headerspace object creates correct number of bytearrays inside. ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) self.assertEqual(h.count(),2)
def testComplement(self): ''' Test if complement correctly handles diffs ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.diff_hs(wildcard_create_from_string("100xx000")) h.complement() self.assertEqual(h.count(),5)
def testSelfDiff(self): h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xxx00")) h.diff_hs(wildcard_create_from_string("1xxxx111")) h.self_diff() self.assertEqual(h.count(),5) self.assertEqual(h.count_diff(),0)
def testAddSource(self): self.N.add_link(1, 100) self.N.add_link(100, 1) s = headerspace(1) s.add_hs(wildcard_create_from_string("1xxxxxxx")) self.N.add_source("client", s, [100]) source_flows = [(1,0),(1,0),(1,2),(0,0),(2,0),(1,1),(2,0)] pipelines = [(1,1),(1,1),(1,1),(1,0),(1,2),(0,1),(0,2)] self._checkPipelines(pipelines) self._checkSourceFlow(source_flows)
def testRW1(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("1001xxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("1111xxxx")))
def testIntersect1(self): ''' Test intersect with a bytearray ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100xx000")) h.diff_hs(wildcard_create_from_string("1xxx1x11")) h.intersect(wildcard_create_from_string("xxxxx011")) self.assertEqual(h.count(),2) self.assertEqual(h.count_diff(),2)
def testDependency(self): tf = TF(1) tf.add_fwd_rule(TF.create_standard_rule([1], "10xxxxxx", [2], None, None)) tf.add_rewrite_rule(TF.create_standard_rule([1], "1xxxxxxx", [3], "00111111", "10000000", "", [])) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("xxxxxxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 2, "Expecting both rules to be matched") self.assertTrue( wildcard_is_equal(result[1][0].hs_list[0], wildcard_create_from_string("10xxxxxx")), "unexpected second byte array", )
def testInverse(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("111xxxxx")) hs.diff_hs(wildcard_create_from_string("1110xxxx")) result = tf.T_inv(hs, 2) self.assertEqual(len(result), 1) self.assertEqual(result[0][0].count(), 1) self.assertEqual(result[0][0].count_diff(), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0], wildcard_create_from_string("10xxxxxx"))) self.assert_(wildcard_is_equal(result[0][0].hs_diff[0][0], wildcard_create_from_string("10x0xxxx")))
def detect_loop(NTF, TTF, ports, test_packet=None, out_port_offset=0): loops = [] for port in ports: print "port %d is being checked" % port propagation = [] # put all-x test packet in propagation graph test_pkt = test_packet if test_pkt == None: all_x = wildcard_create_bit_repeat(NTF.length, 0x3) test_pkt = headerspace(NTF.length) test_pkt.add_hs(all_x) p_node = {} p_node["hdr"] = test_pkt p_node["port"] = port p_node["visits"] = [] p_node["hs_history"] = [] propagation.append(p_node) while len(propagation) > 0: # get the next node in propagation graph and apply it to NTF and TTF print "Propagation has length: %d" % len(propagation) tmp_propag = [] for p_node in propagation: next_hp = NTF.T(p_node["hdr"], p_node["port"]) for (next_h, next_ps) in next_hp: for next_p in next_ps: linked = TTF.T(next_h, next_p) for (linked_h, linked_ports) in linked: for linked_p in linked_ports: new_p_node = {} new_p_node["hdr"] = linked_h new_p_node["port"] = linked_p new_p_node["visits"] = list(p_node["visits"]) new_p_node["visits"].append(p_node["port"]) # new_p_node["visits"].append(next_p) new_p_node["hs_history"] = list(p_node["hs_history"]) new_p_node["hs_history"].append(p_node["hdr"]) # print new_p_node if len(new_p_node["visits"]) > 0 and new_p_node["visits"][0] == linked_p: loops.append(new_p_node) print "loop detected" elif ( linked_p in new_p_node["visits"] or (linked_p + out_port_offset) in new_p_node["visits"] ): pass else: tmp_propag.append(new_p_node) propagation = tmp_propag return loops
def testDependency(self): tf = TF(1) tf.add_fwd_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ None, None)) tf.add_rewrite_rule(TF.create_standard_rule([1], "1xxxxxxx", [3], "00111111", "10000000","",[])) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("xxxxxxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 2, "Expecting both rules to be matched") self.assertTrue(wildcard_is_equal( result[1][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ ), \ "unexpected second byte array")
def detect_loop(NTF, TTF, ports, test_packet = None, out_port_offset = 0): loops = [] for port in ports: print "port %d is being checked"%port propagation = [] # put all-x test packet in propagation graph test_pkt = test_packet if test_pkt == None: all_x = wildcard_create_bit_repeat(NTF.length,0x3) test_pkt = headerspace(NTF.length) test_pkt.add_hs(all_x) p_node = {} p_node["hdr"] = test_pkt p_node["port"] = port p_node["visits"] = [] p_node["hs_history"] = [] propagation.append(p_node) while len(propagation)>0: #get the next node in propagation graph and apply it to NTF and TTF print "Propagation has length: %d"%len(propagation) tmp_propag = [] for p_node in propagation: next_hp = NTF.T(p_node["hdr"],p_node["port"]) for (next_h,next_ps) in next_hp: for next_p in next_ps: linked = TTF.T(next_h,next_p) for (linked_h,linked_ports) in linked: for linked_p in linked_ports: new_p_node = {} new_p_node["hdr"] = linked_h new_p_node["port"] = linked_p new_p_node["visits"] = list(p_node["visits"]) new_p_node["visits"].append(p_node["port"]) #new_p_node["visits"].append(next_p) new_p_node["hs_history"] = list(p_node["hs_history"]) new_p_node["hs_history"].append(p_node["hdr"]) #print new_p_node if len(new_p_node["visits"]) > 0 and new_p_node["visits"][0] == linked_p: loops.append(new_p_node) print "loop detected" elif linked_p in new_p_node["visits"] or (linked_p + out_port_offset) in new_p_node["visits"]: pass else: tmp_propag.append(new_p_node) propagation = tmp_propag return loops
def testSourceReachabilityProbe(self): self.N.add_link(1, 100) self.N.add_link(100, 1) s = headerspace(1) s.add_hs(wildcard_create_from_string("1xxxxxxx")) self.N.add_source("client", s, [100]) self.N.add_link(12, 200) self.N.add_link(200,12) self.N.add_source_reachability_probe("no-flow-from-client", [1], [200],\ wildcard_create_from_string("xxxxxxxx")) probe_state = self.N.get_source_probe_state("no-flow-from-client") self.assertEqual(len(probe_state),2) #self.N.print_pluming_network(True) self.N.remove_source_reachability_probe("no-flow-from-client") self.N.remove_source("client") self.testSetupPlumbing()
def testInverse(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("111xxxxx")) hs.diff_hs(wildcard_create_from_string("1110xxxx")) result = tf.T_inv(hs, 2) self.assertEqual(len(result), 1) self.assertEqual(result[0][0].count(),1) self.assertEqual(result[0][0].count_diff(),1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ )) self.assert_(wildcard_is_equal(result[0][0].hs_diff[0][0],\ wildcard_create_from_string("10x0xxxx"),\ ))
def testDiffHS(self): ''' Test the diff (lazy subtraction): 1) adding a diff before having anything doesn't add any diff to hs 2) adding a diff actually adds correct number of diff bytearrays 3) adding a new bytearray that has intersection with a previously added diff doesn't add that diff to the new bytearray. ''' h = headerspace(1) h.diff_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) self.assertEqual(h.count_diff(),0) h.diff_hs(wildcard_create_from_string("1xxx1111")) self.assertEqual(h.count_diff(),2) h.add_hs(wildcard_create_from_string("xxxxxx11")) self.assertEqual(h.count_diff(),2)
def testCopy(self): ''' Test if copy works correctly Adding new stuff on the original hs, doesn't affect copied hs. ''' h = headerspace(1) h.add_hs(wildcard_create_from_string("1001xxxx")) h.add_hs(wildcard_create_from_string("11xxxx11")) h.diff_hs(wildcard_create_from_string("100x0000")) h.diff_hs(wildcard_create_from_string("1xxx1111")) hcpy = h.copy() self.assertEqual(h.count(),hcpy.count()) self.assertEqual(h.count_diff(),hcpy.count_diff()) h.add_hs(wildcard_create_from_string("100100xx")) self.assertEqual(h.count(),3) self.assertEqual(h.count_diff(),3) self.assertEqual(hcpy.count(),2) self.assertEqual(h.count_diff(),3)
def testAddRemoveRule(self): # remove rule 4 and add a source and verify everything is correct self.N.remove_rule(self.rule_ids[4]) self.rule_ids.remove(self.rule_ids[4]) self.N.add_link(1, 100) self.N.add_link(100, 1) s = headerspace(1) s.add_hs(wildcard_create_from_string("1xxxxxxx")) self.N.add_source("client", s, [100]) source_flows = [(1,0),(1,0),(1,2),(0,0),(1,1),(0,0)] pipelines = [(0,1),(0,1),(1,1),(1,0),(0,1),(0,1)] self._checkPipelines(pipelines) self._checkSourceFlow(source_flows) # Now adding back the same rule, verify things are correct self.rule_ids.append(self.N.add_rule("B2", -1, [4], [5], \ wildcard_create_from_string("10xxxxxx"), \ wildcard_create_from_string("10011111"), \ wildcard_create_from_string("01100000"))) source_flows = [(1,0),(1,0),(1,2),(0,0),(1,1),(2,0),(2,0)] pipelines = [(1,1),(1,1),(1,1),(1,0),(0,1),(0,2),(1,2)] self._checkPipelines(pipelines) self._checkSourceFlow(source_flows)
"fwd_engine_layer":2, "input_path":"tf_files", "switch_id_multiplier":cisco_router.SWITCH_ID_MULTIPLIER, "port_type_multiplier":cisco_router.PORT_TYPE_MULTIPLIER, "out_port_type_const":cisco_router.OUTPUT_PORT_TYPE_CONST, "remove_duplicates":True, } (ntf,ttf,name_to_id,id_to_name) = load_network(settings) # create all-x packet as input headerspace. all_x = wildcard_create_bit_repeat(ntf.length,0x3) # uncomment to set some field #set_header_field(cisco_router.HS_FORMAT(), all_x, "field", value, right_mask) #set_header_field(cisco_router.HS_FORMAT(), all_x, "vlan", 92, 0) test_pkt = headerspace(ntf.length) test_pkt.add_hs(all_x) #set some input/output ports output_port_addition = cisco_router.PORT_TYPE_MULTIPLIER * \ cisco_router.OUTPUT_PORT_TYPE_CONST #TODO: CHANGE THIS IF YOU WANT TO RUN IT FROM/TO DIFFERENT PORTS src_port_id = name_to_id["ROUTER NAME"]["PORT NAME"] dst_port_ids = [name_to_id["ROUTER NAME"]["PORT NAME"]+output_port_addition] #start reachability test and print results st = time() paths = find_reachability(ntf, ttf, src_port_id, dst_port_ids, test_pkt) en = time() print_paths(paths, id_to_name)
if __name__ == '__main__': N = NetPlumber(1) rule_ids = [] N.add_link(2, 4) N.add_link(4, 2) N.add_link(3, 6) N.add_link(6, 3) N.add_link(5, 8) N.add_link(8, 5) N.add_link(7, 9) N.add_link(9, 7) N.add_link(1, 100) N.add_link(100, 1) s = headerspace(1) s.add_hs(wildcard_create_from_string("1xxxxxxx")) N.add_source("client", s, [100]) st = time() rule_ids.append(N.add_rule("B1", -1, [1], [2], \ wildcard_create_from_string("1010xxxx"), \ None, \ None)) en = time() print "time ", en - st st = time() rule_ids.append(N.add_rule("B1", -1, [1], [2], \ wildcard_create_from_string("10001xxx"), \ None, \ None)) en = time()
for line in f: if (not line.startswith("$")) and line != "": tokens = line.strip().split(":") port = int(tokens[1]) + settings["port_type_multiplier"] * \ settings["mid_port_type_const"] N.add_link(port,port) # add link for forward engine port for i in range(len(settings["rtr_names"])): fwd_link = (i+1) * settings["switch_id_multiplier"] N.add_link(fwd_link,fwd_link) # add a source node at yoza-te1/4 src_port_id = map["yoza_rtr"]["te1/4"] N.add_link(1,src_port_id) hs = headerspace(N.length) hs.add_hs(wildcard_create_bit_repeat(N.length,0x3)) N.add_source("yoza-source", hs, [1]) rule_ids = [] for rtr_name in settings["rtr_names"]: f = TF(1) f.load_object_from_file("%s/%s.tf"%(settings["input_path"],rtr_name)) for rule in f.rules: in_ports = rule["in_ports"] out_ports = rule["out_ports"] match = rule["match"] mask = rule["mask"] rewrite = rule["rewrite"] st = time() rule_ids.append(\
return result if __name__ == '__main__': N = NetPlumber(1) rule_ids = [] N.add_link(2,4) N.add_link(4, 2) N.add_link(3, 6) N.add_link(6, 3) N.add_link(5, 8) N.add_link(8, 5) N.add_link(7, 9) N.add_link(9, 7) N.add_link(1, 100) N.add_link(100, 1) s = headerspace(1) s.add_hs(wildcard_create_from_string("1xxxxxxx")) N.add_source("client", s, [100]) st = time() rule_ids.append(N.add_rule("B1", -1, [1], [2], \ wildcard_create_from_string("1010xxxx"), \ None, \ None)) en = time() print "time ",en-st st = time() rule_ids.append(N.add_rule("B1", -1, [1], [2], \ wildcard_create_from_string("10001xxx"), \ None, \ None)) en = time()
#!/usr/bin/env python # coding=utf-8 ''' Copyright 2012, Stanford University. This file is licensed under GPL v2 plus a special exception, as described in included LICENSE_EXCEPTION.txt. Created on Jun 7, 2012 @author: Peyman Kazemian ''' import headerspace.hs as hs import utils.wildcard as uw # Creating a header space object of length 8 bits (1 byte) hsl = hs.headerspace(4) # Adding some wildcard expressions to the headerspace object hsl.add_hs(uw.wildcard_create_from_string("00001010100000100111111100000011")) hsl.add_hs(uw.wildcard_create_from_int(175668993, 4)) # 10.120.127.1 hsl.add_hs(uw.wildcard_create_from_string("000010101000001001111111xxxxxxxx")) print "original HS is\n", hsl, "\n---------" # Removing some wildcard expressions from the headerspace object #hsl.diff_hs(uw.wildcard_create_from_string("1010011x")) #hsl.diff_hs(uw.wildcard_create_from_string("1010xxx0")) #print "New HS is\n",hsl,"\n---------" # Intersecting this headerspace with some wildcard expression hsl.intersect( uw.wildcard_create_from_string("00001010100000101000000000000001"))
def __init__(self): self.src = hs.headerspace(4) self.dst = hs.headerspace(4)
for line in f: if (not line.startswith("$")) and line != "": tokens = line.strip().split(":") port = int(tokens[1]) + settings["port_type_multiplier"] * \ settings["mid_port_type_const"] N.add_link(port, port) # add link for forward engine port for i in range(len(settings["rtr_names"])): fwd_link = (i + 1) * settings["switch_id_multiplier"] N.add_link(fwd_link, fwd_link) # add a source node at yoza-te1/4 src_port_id = map["yoza_rtr"]["te1/4"] N.add_link(1, src_port_id) hs = headerspace(N.length) hs.add_hs(wildcard_create_bit_repeat(N.length, 0x3)) N.add_source("yoza-source", hs, [1]) rule_ids = [] for rtr_name in settings["rtr_names"]: f = TF(1) f.load_object_from_file("%s/%s.tf" % (settings["input_path"], rtr_name)) for rule in f.rules: in_ports = rule["in_ports"] out_ports = rule["out_ports"] match = rule["match"] mask = rule["mask"] rewrite = rule["rewrite"] st = time() rule_ids.append(\