def generate_mesh_on_flows(topo, traffic): G = read_topo(topo) pl = Pipeline() tbl0 = pl.tables[0] flows = read_flows(traffic) sites = [58, 60, 61, 70, 71] rand100_flows = [f for f in random.sample(flows, 100)] pairs = set() for f in rand100_flows: src_site = G.ip_prefixes[f['src_ip']] dst_site = G.ip_prefixes[f['dst_ip']] pairs.add((src_site,dst_site)) for src_site, dst_site in pairs: for src in G.node[src_site]['ip-prefixes']: for dst in G.node[dst_site]['ip-prefixes']: if src == dst: continue tbl0.insert( Rule(priority=20, match=Match(src_ip=src, dst_ip=dst, dst_port=8444), action=Action(action=ACTION_TYPE.DROP)) ) tbl0.insert( Rule(priority=10, match=Match(src_ip=src, dst_ip=dst), action=Action(action=['A', 'B', 'C'])) ) with open(sys.argv[-2], 'w') as f: json.dump(rand100_flows, f, indent=2, sort_keys=True) return pl
def generate_dest_based_pipeline(filename): G = read_topo(filename) pl = Pipeline() tbl0 = pl.tables[0] for dst in G.ip_prefixes: for p in random.sample(range(50000, 50100), 10): tbl0.insert( Rule(priority=20, match=Match(dst_ip=dst, dst_port=p), action=Action(action=ACTION_TYPE.DROP)) ) tbl0.insert( Rule(priority=10, match=Match(dst_ip=dst), action=Action(action=['A', 'B', 'C'])) ) return pl
def generate_full_mesh_pipeline(filename): G = read_topo(filename) pl = Pipeline() tbl0 = pl.tables[0] for src in G.ip_prefixes: for dst in G.ip_prefixes: if src == dst: continue tbl0.insert( Rule(priority=20, match=Match(src_ip=src, dst_ip=dst, dst_port=8444), action=Action(action=ACTION_TYPE.DROP)) ) tbl0.insert( Rule(priority=10, match=Match(src_ip=src, dst_ip=dst), action=Action(action=['A', 'B', 'C'])) ) return pl
def main(topo_filepath, flow_filepath): G = read_topo(topo_filepath) flows = read_flows(flow_filepath) # dests = [i for i in range(18, 23)] + [i for i in range(28, 44)] + [25, 26, 59] # sidr_peer_deflection_sim(G, dests) SDX2 = { 23: { 'srcs': [d for d in G.node if 44 <= d <= 50 or 60 <= d <= 73], 'dests': [d for d in G.node if d < 44 or 50 < d < 60 or d > 73], 'peers': [24] }, 24: { 'srcs': [d for d in G.node if 44 <= d <= 56 or 60 <= d <= 79], 'dests': [d for d in G.node if d < 44 or 56 < d < 60], 'peers': [23] }, } print('====== 2 SDX =======') sidr_deflection_sim(G, flows, SDX2) SDX3 = { 20: { 'srcs': [d for d in G.node if 42 <= d <= 44 or 60 <= d <= 66], 'dests': [d for d in G.node if d < 42 or 44 < d < 60 or d > 66], 'peers': [23, 24] }, 23: { 'srcs': [d for d in G.node if 44 <= d <= 50 or 60 <= d <= 73], 'dests': [d for d in G.node if d < 44 or 50 < d < 60 or d > 73], 'peers': [20, 24] }, 24: { 'srcs': [d for d in G.node if 44 <= d <= 56 or 60 <= d <= 79], 'dests': [d for d in G.node if d < 44 or 56 < d < 60], 'peers': [20, 23] }, } print('====== 3 SDX =======') sidr_deflection_sim(G, flows, SDX3) SDX4 = { 20: { 'srcs': [d for d in G.node if 42 <= d <= 44 or 60 <= d <= 66], 'dests': [d for d in G.node if d < 42 or 44 < d < 60 or d > 66], 'peers': [23, 24] }, 23: { 'srcs': [d for d in G.node if 44 <= d <= 50 or 60 <= d <= 73], 'dests': [d for d in G.node if d < 44 or 50 < d < 60 or d > 73], 'peers': [20, 24] }, 24: { 'srcs': [d for d in G.node if 44 <= d <= 56 or 60 <= d <= 79], 'dests': [d for d in G.node if d < 44 or 56 < d < 60], 'peers': [20, 23, 27] }, 27: { 'srcs': [d for d in G.node if 51 <= d <= 58 or 74 <= d <= 79], 'dests': [d for d in G.node if d < 51 or 58 < d < 74], 'peers': [24] }, } print('====== 4 SDX =======') sidr_deflection_sim(G, flows, SDX4) SDX5 = { 20: { 'srcs': [d for d in G.node if 42 <= d <= 44 or 60 <= d <= 66], 'dests': [d for d in G.node if d < 42 or 44 < d < 60 or d > 66], 'peers': [23, 24] }, 23: { 'srcs': [d for d in G.node if 44 <= d <= 50 or 60 <= d <= 73], 'dests': [d for d in G.node if d < 44 or 50 < d < 60 or d > 73], 'peers': [20, 24] }, 24: { 'srcs': [d for d in G.node if 44 <= d <= 56 or 60 <= d <= 79], 'dests': [d for d in G.node if d < 44 or 56 < d < 60], 'peers': [20, 23, 27] }, 27: { 'srcs': [d for d in G.node if 51 <= d <= 58 or 74 <= d <= 79], 'dests': [d for d in G.node if d < 51 or 58 < d < 74], 'peers': [24, 29] }, 29: { 'srcs': [d for d in G.node if 57 <= d <= 59], 'dests': [d for d in G.node if d < 57 or d > 59], 'peers': [27] } } print('====== 5 SDX =======') sidr_deflection_sim(G, flows, SDX5)
def session_start(topo_filepath, flow_filepath, algorithm_type='1', triangle=None, **kwargs): if triangle: triangle = read_triangle(triangle) G = read_topo(topo_filepath) F = read_flows(flow_filepath) # print(len(F)) # ASRelationsReader(relationship_filepath).augment_to_topology(G) generate_local_policies(G, triangle=triangle, **kwargs) # manual_policy(G) # dump_topo(G, 'results.yaml') # Find common reachable flows # H = G.copy() # H.ip_prefixes = G.ip_prefixes # initiate_ribs(H) # for i in range(10): # fp_bgp_advertise(H) # F, _ = check_reachability(H, F, display=False) # H = G.copy() # H.ip_prefixes = G.ip_prefixes # initiate_ribs(H) # for i in range(10): # correct_bgp_advertise(H) # F, _ = check_reachability(H, F, display=False) # H = G.copy() # H.ip_prefixes = G.ip_prefixes # initiate_ribs(H) # print('\t'.join(['hops'] + [str(i) for i in range(2, 11)] + ['loop', 'drop', 'transferred/bytes', 'failed/bytes'])) if '1' in algorithm_type: # print("CGFP-BGP Evaluation") # cgfp_bgp_eval(G.copy(), F) H = G.copy() H.ip_prefixes = G.ip_prefixes initiate_ribs(H) for i in range(10): fp_bgp_advertise(H) # correct_bgp_advertise(H) # print('============================== RIB ============================') # report_rib(H, 23) # print('============================== LOCAL ============================') # report_local_policy(H, 29) # report_local_policy(H, 30) # print('CGFP', end='\t') check_reachability(H, F) # dump_tables(H, 'cgfp-bgp-tables.json') if '2' in algorithm_type: # print("CGC-BGP Evaluation") # cgc_bgp_eval(G.copy(), F) # manual_policy(G) H = G.copy() H.ip_prefixes = G.ip_prefixes initiate_ribs(H) for i in range(10): correct_bgp_advertise(H) # print('============================== RIB ============================') # report_rib(H, 23) # print('CGC', end='\t') R_F, UR_F = check_reachability(H, F) # dump_tables(H, 'cgc-bgp-tables.json') # for f in UR_F[:20]: # src = H.ip_prefixes[f['src_ip']] # dst = H.ip_prefixes[f['dst_ip']] # print(f, src, dst) if '3' in algorithm_type: # print("SFP Evaluation") # fg_sfp_eval(G.copy(), F) H = G.copy() H.ip_prefixes = G.ip_prefixes # report_rib(G, 29) initiate_ribs(H) # report_rib(H, 29) for i in range(10): sfp_advertise(H) # print('============================== RIB ============================') # report_rib(H, 23) # report_rib(H, 45) # report_rib(H, 68) # print('============================== Adj-RIBs-In ============================') # report_rib(H, 23, table='adj-ribs-in', neigh=48) # report_rib(H, 23, table='adj-ribs-in', neigh=45) # report_rib(H, 45, table='adj-ribs-in', neigh=68) # report_rib(H, 68, table='adj-ribs-in', neigh=45) # report_rib(H, 30, table='adj-ribs-in', neigh=29) # print('============================== AS 29 Read Local ============================') # print(read_local_rib(H, 29, '128.211.128.0/19', 80)) # print('============================== AS 30 Read Local ============================') # print(read_local_rib(H, 30, '128.211.128.0/19', 80)) # print('============================== LOCAL ============================') # report_local_policy(H) # print('SFP', end='\t') _, UR_F = check_reachability(H, F) dump_tables(H, 'sfp-bgp-tables.json') # for f in UR_F[:20]: # src = H.ip_prefixes[f['src_ip']] # dst = H.ip_prefixes[f['dst_ip']] # print(f, src, dst) if '4' in algorithm_type: # print("SFP Evaluation on CGC-BGP Reachable Flows") H = G.copy() H.ip_prefixes = G.ip_prefixes initiate_ribs(H) for i in range(10): sfp_advertise(H) _, UR_F = check_reachability(H, R_F)
import networkx def class_flows_src(flows, topo): # type: (list[dict[str, str|int]], networkx.Graph) -> dict[int, list[dict[str, str|int]]] src_flows = {} for node in topo.nodes: src_flows[node] = list() for flow in flows: src_ip = flow["src_ip"] node = topo.ip_prefixes[src_ip] src_flows[node].append(flow) return src_flows if __name__ == '__main__': flows_filepath = sys.argv[1] topo_filepath = sys.argv[2] output_folder = sys.argv[3] flows = read_flows(flows_filepath) topo = read_topo(topo_filepath) src_flows = class_flows_src(flows, topo) for i in src_flows.keys(): json.dump(src_flows[i], open(os.path.join(output_folder, 'flows-%d.json' % i), 'w'), indent=2, sort_keys=True)