def testRW1(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("1001xxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0], wildcard_create_from_string("1111xxxx")))
def testRW1(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("1001xxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("1111xxxx")))
def testInverse(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("111xxxxx")) hs.diff_hs(wildcard_create_from_string("1110xxxx")) result = tf.T_inv(hs, 2) self.assertEqual(len(result), 1) self.assertEqual(result[0][0].count(), 1) self.assertEqual(result[0][0].count_diff(), 1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0], wildcard_create_from_string("10xxxxxx"))) self.assert_(wildcard_is_equal(result[0][0].hs_diff[0][0], wildcard_create_from_string("10x0xxxx")))
def testDependency(self): tf = TF(1) tf.add_fwd_rule(TF.create_standard_rule([1], "10xxxxxx", [2], None, None)) tf.add_rewrite_rule(TF.create_standard_rule([1], "1xxxxxxx", [3], "00111111", "10000000", "", [])) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("xxxxxxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 2, "Expecting both rules to be matched") self.assertTrue( wildcard_is_equal(result[1][0].hs_list[0], wildcard_create_from_string("10xxxxxx")), "unexpected second byte array", )
def testDependency(self): tf = TF(1) tf.add_fwd_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ None, None)) tf.add_rewrite_rule(TF.create_standard_rule([1], "1xxxxxxx", [3], "00111111", "10000000","",[])) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("xxxxxxxx")) result = tf.T(hs, 1) self.assertEqual(len(result), 2, "Expecting both rules to be matched") self.assertTrue(wildcard_is_equal( result[1][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ ), \ "unexpected second byte array")
def testInverse(self): tf = TF(1) tf.add_rewrite_rule(TF.create_standard_rule([1], "10xxxxxx", [2], \ "10011111", "01100000")) hs = headerspace(1) hs.add_hs(wildcard_create_from_string("111xxxxx")) hs.diff_hs(wildcard_create_from_string("1110xxxx")) result = tf.T_inv(hs, 2) self.assertEqual(len(result), 1) self.assertEqual(result[0][0].count(),1) self.assertEqual(result[0][0].count_diff(),1) self.assert_(wildcard_is_equal(result[0][0].hs_list[0],\ wildcard_create_from_string("10xxxxxx"),\ )) self.assert_(wildcard_is_equal(result[0][0].hs_diff[0][0],\ wildcard_create_from_string("10x0xxxx"),\ ))
if args.view: stage = args.view[0] i = 1 for rule in f.rules: if stage == get_stage(rule): print i,":",get_openflow_rule(rule,inv_mapf) i = i + 1; if args.rm: indices = args.rm[0].split(",") indices = [int(i) for i in indices] indices.sort(reverse=True) for index in indices: f.remove_rule(index-1) if args.add: positions = (args.add[0]).split(",") rules = (args.add[1]).split(":") if len(rules) != len(positions): sys.stderr.write("Number of positions and number of rules should be the same") for i in range(len(positions)): position = int(positions[i])-1 tokens = rules[i].split(";") rule = parse_new_rule_tokens(tokens,mapf_extended,args.rtr_name) if rule["mask"] == None: f.add_fwd_rule(rule,position) elif rule["mask"] != None: f.add_rewrite_rule(rule,position) f.save_object_to_file("%s/%s.tf"%(args.data_path,args.rtr_name)) f.save_as_json("%s/%s.tf.json"%(args.data_path,args.rtr_name))
stage = args.view[0] i = 1 for rule in f.rules: if stage == get_stage(rule): print i, ":", get_openflow_rule(rule, inv_mapf) i = i + 1 if args.rm: indices = args.rm[0].split(",") indices = [int(i) for i in indices] indices.sort(reverse=True) for index in indices: f.remove_rule(index - 1) if args.add: positions = (args.add[0]).split(",") rules = (args.add[1]).split(":") if len(rules) != len(positions): sys.stderr.write( "Number of positions and number of rules should be the same") for i in range(len(positions)): position = int(positions[i]) - 1 tokens = rules[i].split(";") rule = parse_new_rule_tokens(tokens, mapf_extended, args.rtr_name) if rule["mask"] == None: f.add_fwd_rule(rule, position) elif rule["mask"] != None: f.add_rewrite_rule(rule, position) f.save_object_to_file("%s/%s.tf" % (args.data_path, args.rtr_name)) f.save_as_json("%s/%s.tf.json" % (args.data_path, args.rtr_name))