def time_failures_latex(): for i in range(len(networks)): print( "______________________________________________________________________________________\n" ) print(" -------- ", networks[i][0], " --------") for perc_f in percentages_failure: val_t_rt = [] n_fail = round((networks[i][1].number_of_nodes() / 100.0) * perc_f) count = 0 for _ in range(nbr_exp_failure): net_copy = deepcopy(networks[i][1]) count += 1 print(f"\t{count}") # Failures list_fail = generate_random_dst(n_fail, 0, net_copy.number_of_nodes() - 1) _, time_rt = failure_list(net_copy, list_fail) val_t_rt.append(time_rt) avg_rt = round(mean(val_t_rt), 5) st_dev_rt = round(std(val_t_rt), 5) print(f"""\multirow{{2}}{{*}}{{{n_fail} ({perc_f}\%)}} &\\vc Average &\\vc {avg_rt} \\\ \cline{{2-3}} &\\bc Stand. Dev. &\\bc {st_dev_rt} \\\ \hline""")
def protocol_measure_in_data_file(): start_time = time.perf_counter() f = open(data_file, 'w') f.write("Network %Dest #Dest Protocol #Nodes #Leaves Height AvgBranchFact\n") f.close() count_pim = 0 for network in networks: print(f"TIME: {round(time.perf_counter()-start_time, 3)} sec") print("______________________________________________________________________________________\n") print(" -------- ", network[0], " --------") for perc in percentagesV2: n_dest = round((network[1]/100.0)*perc) exp = [] for _ in range(number_of_experiments): src = random.randint(0, network[1]-1) dst = generate_random_dst(n_dest, 0, network[1]-1) exp.append((src, dst)) for protocol in protocols: if protocol == "s": m_StatelessV2(data_file, exp, network[0], perc, n_dest) elif protocol == "p": count_pim = m_pim_ssmV2(count_pim, data_file, exp, network[0], perc, n_dest) elif protocol == "b": m_bierV2(data_file, exp, network[0], perc, n_dest) else: print("unknow protocol") exit(1) print(f"TOTAL TIME: {round(time.perf_counter()-start_time, 3)} sec")
def time_SL_measure_in_data_file(): f = open(data_file, 'w') f.write(f"Network %Dest #Dest Time_SL(msec)\n") for network in networks: print( "______________________________________________________________________________________\n" ) print(" -------- ", network[0], " --------") for perc in percentages_SL: n_dest = round((network[1].number_of_nodes() / 100.0) * perc) worst_network, worst_dst = create_worst_network(n_dest) for _ in range(nbr_exp_SL): # Compute current network case times src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) _, times = encapsulate_pkt(network[1], src, "payload", dst) times *= 1000 f.write(f"{network[0]} {perc} {n_dest} {times}\n") # Compute worst case time for the same number of destination _, worst_time = encapsulate_pkt(worst_network, 0, "payload", worst_dst) worst_time *= 1000 f.write(f"Worst_case X {n_dest} {worst_time}\n") f.close()
def header_measure_in_data_file(): f = open(data_file, 'w') f.write("Network %Dest #Dest 1st_header Avg_header\n") for network in networks: for perc in percentagesV2: n_dest = round((network[1].number_of_nodes() / 100.0) * perc) for _ in range(number_experiments): open(Log_file, 'w').close() #clean log file src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) ingress_process(network[1], src, "payload", dst, log=True) values = compute_valV2(Log_file) # segments are encoded on 16 bytes (1 segment == 16 bytes) + 8bytes header flags f.write( f"{network[0]} {perc} {n_dest} {values['1st_header']*16+8} {values['Avg_header']*16+8}\n" ) f.close()
def failure_measure_in_data_file(): f = open(data_file_failure, 'w') s = "Failure" f.write(f"Network %{s} #Failures Time_comp_RT(sec)\n") for i in range(len(networks)): print( "______________________________________________________________________________________\n" ) print(" -------- ", networks[i][0], " --------") for perc_f in percentages_failure: n_fail = round((networks[i][1].number_of_nodes() / 100.0) * perc_f) for _ in range(nbr_exp_failure): net_copy = deepcopy(networks[i][1]) # Failures list_fail = generate_random_dst(n_fail, 0, net_copy.number_of_nodes() - 1) _, time_rt = failure_list(net_copy, list_fail) f.write(f"{networks[i][0]} {perc_f} {n_fail} {time_rt}\n") f.close()
def header_min_measure_in_data_file(): f = open(data_file_min, 'w') f.write("Network %Dest #Dest Min_strat 1st_header Avg_header\n") for network in networks: #using a prefix /48 if network[0] == "ISP1": val_min_prefix = 6.75 # 1 segment = 6.75 bytes val_min_id = 2 # 1 segment = 2 bytes elif network[0] == "rf1239": val_min_prefix = 6.75 val_min_id = 1.875 elif network[0] == "Cogentco": val_min_prefix = 6.875 val_min_id = 1.875 else: print("unknown network") val_min_id = 0 val_min_prefix = 0 for perc in percentagesV2: n_dest = round((network[1].number_of_nodes() / 100.0) * perc) for _ in range(number_experiments): open(Log_file, 'w').close() #clean log file src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) ingress_process(network[1], src, "payload", dst, log=True) values = compute_valV2(Log_file) for min_strat in minimisation: if min_strat == "No_minimisation": f.write( f"{network[0]} {perc} {n_dest} {min_strat} {values['1st_header']*16+8} {values['Avg_header']*16+8}\n" ) elif min_strat == "rm_prefix": f.write( f"{network[0]} {perc} {n_dest} {min_strat} {values['1st_header']*val_min_prefix+8} {values['Avg_header']*val_min_prefix+8}\n" ) elif min_strat == "id_minimisation": f.write( f"{network[0]} {perc} {n_dest} {min_strat} {values['1st_header']*val_min_id+8} {values['Avg_header']*val_min_id+8}\n" ) else: print("unknown minimisation strategy") f.close()
def strat_measure_in_data_file(): start_time = time.perf_counter() f = open(data_file, 'w') f.write("Network %Dest #Dest Strategy #Nodes #Leaves Height Avg_branch_factor\n") f.close() for network in networks: print(f"TIME: {round(time.perf_counter()-start_time, 3)} sec") print("______________________________________________________________________________________\n") print(" -------- ", network[0], " --------") for perc in percentages: n_dest = round((network[1]/100.0)*perc) exp = [] for _ in range(number_of_experiments): src = random.randint(0, network[1]-1) dst = generate_random_dst(n_dest, 0, network[1]-1) exp.append((src, dst)) for strat in strategies: if strat == "table_phys" and network[0]!="Cogentco":# only Cogentco support physical distance strategy continue strategy_measurementsV2(data_file, exp, network[0], perc, n_dest, strat) print(f"TOTAL TIME: {round(time.perf_counter()-start_time, 3)} sec")
def time_compute_seg_list_latex(): for network in networks: print( "______________________________________________________________________________________\n" ) print(" -------- ", network[0], " --------") for perc in percentages: n_dest = round((network[1].number_of_nodes() / 100.0) * perc) val = [] for _ in range(nbr_exp_SL): src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) _, times = encapsulate_pkt(network[1], src, "", dst) times *= 1000 val.append(times) avg = round(mean(val), 5) st_dev = round(std(val), 5) print(f"""\multirow{{2}}{{*}}{{{n_dest} ({perc}\%)}} &\\vc Average &\\vc {avg} \\\ \cline{{2-3}} &\\bc Stand. Dev. &\\bc {st_dev} \\\ \hline""")
def format_plot_data(): for network in networks: _file = "plot/" + network[0] + "_Header_Size.data" f = open(_file, 'w') f.write( "# 1[number of dest] 2[avg first header size (byte)] 3[std first header size] 4[avg header size (byte)] 5[std header size]\n" ) for perc in percentages: open(Log_file, 'w').close() #clean log file n_dest = round((network[1].number_of_nodes() / 100.0) * perc) for _ in range(number_experiments): src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) ingress_process(network[1], src, "", dst, log=True) values = compute_val(Log_file) # segments are encoded on 16 bytes (1 segment == 16 bytes) + 8bytes header flags f.write( f"{n_dest} {values['avg_first']*16+8} {values['std_first']*16} {values['avg']*16+8} {values['std']*16}\n" ) print(f"Finish with {perc}%") f.close()
def protocol_measure_latexTab(): count_pim = 0 for network in networks: print("______________________________________________________________________________________\n") print(" -------- ", network[0], " --------") for perc in percentages: n_dest = round((network[1]/100.0)*perc) exp = [] for _ in range(number_of_experiments): src = random.randint(0, network[1]-1) dst = generate_random_dst(n_dest, 0, network[1]-1) exp.append((src, dst)) for protocol in protocols: if protocol == "s": m_Stateless(perc, network[0], exp) elif protocol == "p": count_pim = m_pim_ssm(count_pim, network[0], exp) elif protocol == "b": m_bier(network[0], exp) else: print("unknow protocol") exit(1)
def strat_measure_latexTab(): for network in networks: print("______________________________________________________________________________________\n") print(" -------- ", network[0], " --------") for perc in percentages: n_dest = round((network[1]/100.0)*perc) n_row = 10 if network[0]=="Cogentco" else 8 print(f"\multirow{{{n_row}}}{{*}}{{{n_dest} ({perc}\%)}}") exp = [] for _ in range(number_of_experiments): src = random.randint(0, network[1]-1) dst = generate_random_dst(n_dest, 0, network[1]-1) exp.append((src, dst)) i = 0 for strat in strategies: i += 1 if strat == "table_phys" and network[0]!="Cogentco":# only Cogentco support physical distance strategy continue strategy_measurements(perc, network[0], exp, strat) if i < len(strategies): print("\cline{2-7}") print("\n\hline")
if filename[-3:] != ".py": continue full_module_name = folder + "." + filename[:-3] mymodule = importlib.import_module(full_module_name) networks.append((mymodule.network, filename[:-3])) print("Network Max_count_end Diameter") for net in networks: spl = {} list_nodes = list(net[0].nodes) for src in list_nodes: path = nx.single_source_dijkstra_path(net[0], src) paths_length = {} for e in path: paths_length[e] = len(path[e]) - 1 spl[src] = paths_length e = nx.eccentricity(net[0], sp=spl) max_h = 0 n_dest = round((net[0].number_of_nodes() / 100.0) * 80) for _ in range(100): src = random.randint(0, net[0].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, net[0].number_of_nodes() - 1) tree = ingress_process(net[0], src, "", dst, return_tree=True) h = tree_height(tree) if h > max_h: max_h = h net_diameter = nx.diameter(net[0], e) print(f"{net[1]} {max_h-1} {net_diameter}") if max_h - 1 > net_diameter: print("property violated")
def header_measure_strategies_in_data_file(): f = open(data_stategy_file, 'w') f.write( "Network %Dest #Dest Strategy Max_1st_header Avg_1st_header Avg_header Used_bytes\n" ) for network in networks: if "groupNear" in strategies: near_limit = math.ceil(nx.diameter(network[1]) / 5) distances = dict( nx.all_pairs_shortest_path_length( network[1], cutoff=near_limit)) # distance per hop for perc in percentagesV2: n_dest = round((network[1].number_of_nodes() / 100.0) * perc) for _ in range(number_experiments): src = random.randint(0, network[1].number_of_nodes() - 1) dst = generate_random_dst(n_dest, 0, network[1].number_of_nodes() - 1) for strat in strategies: open(Log_file, 'w').close() # clean log file if strat == "normal": # no strategy->one multicast tree ingress_process(network[1], src, "payload", dst, log=True) elif strat == "SRH_limit": # one limited multicast tree + unicast forwarding ingress_process_SRH_limit(network[1], src, "payload", dst, log=True, limit=SRH_limit) elif strat == "SRHs_limit": # limited multicast trees ingress_process_SRHs_limit(network[1], src, "payload", dst, log=True, limit=SRHs_limit) elif strat == "groupNear": # group near destinations in a multicast tree ingress_process_groupNear(network[1], src, "payload", dst, distances, log=True) elif strat == "Near_SRHsLimit": # group near destinations in a limited multicast tree ingress_process_SRHs_limit_groupNear(network[1], src, "payload", dst, distances, log=True, limit=SRHs_limit) else: print(f"Unknown strategy: '{strat}' .") values = compute_valV2_strategy(Log_file) f.write( f"{network[0]} {perc} {n_dest} {strat} {values['Max_1st_header']} {values['Avg_1st_header']} {values['Avg_header']} {values['Used_bytes']}\n" ) f.close()