コード例 #1
0
ファイル: table2.py プロジェクト: minicz/spider
def process_network_AMPL_model(net_name,out_q):
	filename_res=topology_dir+"/results.txt."+net_name
	filename_net=topology_dir+"/network.xml."+net_name
	(G, pos, hosts, switches, mapping) = f_t_parser.parse_network_xml(filename=filename_net)
	(requests,faults) = f_t_parser.parse_ampl_results(filename=filename_res)
	print len(requests), 'requests loaded'
	print len(faults), 'faults loaded'
	print 'Network has', len(switches), 'switches,', G.number_of_edges()-len(hosts), 'links and', len(hosts), 'hosts'
	mn_topo = f_t_parser.networkx_to_mininet_topo(G, hosts, switches, mapping)
	ports_dict = f_t_parser.adapt_mn_topo_ports_to_old_API(mn_topo.ports)

	print "\n# Smart instance "+net_name+" with results from AMPL model..."
	(fault_ID, flow_entries_dict, flow_entries_with_timeout_dict, flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(requests,faults,ports_dict,match_flow=f_t_parser.get_mac_match_mininet,check_cache=False,filename=filename_res,confirm_cache_loading=False,dpctl_script=False)
	
	flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
	tot_flows = [flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys() if node!='global']
	'''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''

	D = len(requests)
	F = len(faults)
	print 'O(D*F) = %d*%d = %d'%(D,F,D*F)
	stats = [net_name+" AMPL model",{'min' : min(tot_flows) ,'avg' : sum(tot_flows)/float(len((tot_flows))) , 'max' : max(tot_flows)}]
	out_q.put(stats)
	with open("tmp/"+str(net_name)+" AMPL model.txt", "a+") as out_file:
		out_file.write(str(stats)+"\n")
	return stats
コード例 #2
0
ファイル: table2.py プロジェクト: minicz/spider
def process_network_E2E_PP(net_name,out_q):
	filename_res=topology_dir+"/results.txt."+net_name
	filename_net=topology_dir+"/network.xml."+net_name
	(G, pos, hosts, switches, mapping) = f_t_parser.parse_network_xml(filename=filename_net)
	(requests,faults) = f_t_parser.parse_ampl_results(filename=filename_res)
	print len(requests), 'requests loaded'
	print len(faults), 'faults loaded'
	print 'Network has', len(switches), 'switches,', G.number_of_edges()-len(hosts), 'links and', len(hosts), 'hosts'
	mn_topo = f_t_parser.networkx_to_mininet_topo(G, hosts, switches, mapping)
	ports_dict = f_t_parser.adapt_mn_topo_ports_to_old_API(mn_topo.ports)

	print "\n# Dumb instance "+net_name+" with end-to-end path protection (bp_node_disj=True...)"
	# we take requests just for its keys, but primary/backup paths are calculated by execute_instance()
	demands = {dem : 1 for dem in requests.keys() }
	N = G.number_of_edges()-len(hosts)
	G_dir = G.to_directed()
	for e in G_dir.edges():
		G_dir.edge[e[0]][e[1]] = {'capacity': N*N*10}
	fc = execute_instance(G_dir, demands, bp_node_disj=True)
	(requests_E2E,faults_E2E) = create_requests_faults_dict(fc.pps,fc.bps)
	(fault_ID, flow_entries_dict, flow_entries_with_timeout_dict, flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(requests_E2E,faults_E2E,ports_dict,match_flow=f_t_parser.get_mac_match_mininet,check_cache=False,filename=filename_res+"E2E",confirm_cache_loading=False,dpctl_script=False)

	flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
	tot_flows = [flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys() if node!='global']
	'''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''
	stats = [net_name+" E2E PP",{'min' : min(tot_flows) ,'avg' : sum(tot_flows)/float(len((tot_flows))) , 'max' : max(tot_flows)}]
	out_q.put(stats)
	with open("tmp/"+str(net_name)+" E2E PP.txt", "a+") as out_file:
		out_file.write(str(stats)+"\n")
	return stats
コード例 #3
0
ファイル: table2.py プロジェクト: rubiruchi/spider
def process_network_E2E_PP(net_name, out_q):
    filename_res = topology_dir + "/results.txt." + net_name
    filename_net = topology_dir + "/network.xml." + net_name
    (G, pos, hosts, switches,
     mapping) = f_t_parser.parse_network_xml(filename=filename_net)
    (requests, faults) = f_t_parser.parse_ampl_results(filename=filename_res)
    print len(requests), 'requests loaded'
    print len(faults), 'faults loaded'
    print 'Network has', len(switches), 'switches,', G.number_of_edges() - len(
        hosts), 'links and', len(hosts), 'hosts'
    mn_topo = f_t_parser.networkx_to_mininet_topo(G, hosts, switches, mapping)
    ports_dict = f_t_parser.adapt_mn_topo_ports_to_old_API(mn_topo.ports)

    print "\n# Dumb instance " + net_name + " with end-to-end path protection (bp_node_disj=True...)"
    # we take requests just for its keys, but primary/backup paths are calculated by execute_instance()
    demands = {dem: 1 for dem in requests.keys()}
    N = G.number_of_edges() - len(hosts)
    G_dir = G.to_directed()
    for e in G_dir.edges():
        G_dir.edge[e[0]][e[1]] = {'capacity': N * N * 10}
    fc = execute_instance(G_dir, demands, bp_node_disj=True)
    (requests_E2E, faults_E2E) = create_requests_faults_dict(fc.pps, fc.bps)
    (fault_ID, flow_entries_dict, flow_entries_with_timeout_dict,
     flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(
         requests_E2E,
         faults_E2E,
         ports_dict,
         match_flow=f_t_parser.get_mac_match_mininet,
         check_cache=False,
         filename=filename_res + "E2E",
         confirm_cache_loading=False,
         dpctl_script=False)

    flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
    tot_flows = [
        flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys()
        if node != 'global'
    ]
    '''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''
    stats = [
        net_name + " E2E PP", {
            'min': min(tot_flows),
            'avg': sum(tot_flows) / float(len((tot_flows))),
            'max': max(tot_flows)
        }
    ]
    out_q.put(stats)
    with open("tmp/" + str(net_name) + " E2E PP.txt", "a+") as out_file:
        out_file.write(str(stats) + "\n")
    return stats
コード例 #4
0
ファイル: table2.py プロジェクト: rubiruchi/spider
def process_network_AMPL_model(net_name, out_q):
    filename_res = topology_dir + "/results.txt." + net_name
    filename_net = topology_dir + "/network.xml." + net_name
    (G, pos, hosts, switches,
     mapping) = f_t_parser.parse_network_xml(filename=filename_net)
    (requests, faults) = f_t_parser.parse_ampl_results(filename=filename_res)
    print len(requests), 'requests loaded'
    print len(faults), 'faults loaded'
    print 'Network has', len(switches), 'switches,', G.number_of_edges() - len(
        hosts), 'links and', len(hosts), 'hosts'
    mn_topo = f_t_parser.networkx_to_mininet_topo(G, hosts, switches, mapping)
    ports_dict = f_t_parser.adapt_mn_topo_ports_to_old_API(mn_topo.ports)

    print "\n# Smart instance " + net_name + " with results from AMPL model..."
    (fault_ID, flow_entries_dict, flow_entries_with_timeout_dict,
     flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(
         requests,
         faults,
         ports_dict,
         match_flow=f_t_parser.get_mac_match_mininet,
         check_cache=False,
         filename=filename_res,
         confirm_cache_loading=False,
         dpctl_script=False)

    flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
    tot_flows = [
        flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys()
        if node != 'global'
    ]
    '''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''

    D = len(requests)
    F = len(faults)
    print 'O(D*F) = %d*%d = %d' % (D, F, D * F)
    stats = [
        net_name + " AMPL model", {
            'min': min(tot_flows),
            'avg': sum(tot_flows) / float(len((tot_flows))),
            'max': max(tot_flows)
        }
    ]
    out_q.put(stats)
    with open("tmp/" + str(net_name) + " AMPL model.txt", "a+") as out_file:
        out_file.write(str(stats) + "\n")
    return stats
コード例 #5
0
ファイル: table2.py プロジェクト: rubiruchi/spider
def process_NxN_greedy(N, out_q):
    G, demands = create_square_network(N,
                                       link_capacity=N * N * 10,
                                       demand_volume=1)
    print "\n# Smart instance " + str(N) + "x" + str(
        N) + " with link cost function and bp_node_disj=False..."
    fc = execute_instance(G, demands, cost_func=cost_func_inv)
    ports_dict = create_ports_dict(G, demands)
    (requests, faults) = create_requests_faults_dict(fc.pps, fc.bps)
    # fictitious filename, just to caching purpose
    filename = str(N) + 'X' + str(N) + 'greedy.txt'
    (fault_ID, flow_entries_dict, flow_entries_with_timeout_dict,
     flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(
         requests,
         faults,
         ports_dict,
         match_flow=f_t_parser.get_mac_match_mininet,
         check_cache=False,
         filename=filename,
         confirm_cache_loading=False,
         dpctl_script=False)

    flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
    tot_flows = [
        flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys()
        if node != 'global'
    ]
    '''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''

    D = len(demands)
    F = len(G.edges()) / 2 if isinstance(G, nx.DiGraph) else len(G.edges())
    print 'O(D*F) = %d*%d = %d' % (D, F, D * F)
    stats = [
        str(N) + "x" + str(N) + " greedy", {
            'min': min(tot_flows),
            'avg': sum(tot_flows) / float(len((tot_flows))),
            'max': max(tot_flows)
        }
    ]
    out_q.put(stats)
    with open("tmp/" + str(N) + "x" + str(N) + " greedy.txt",
              "a+") as out_file:
        out_file.write(str(stats) + "\n")
    return stats
コード例 #6
0
ファイル: table2.py プロジェクト: rubiruchi/spider
def process_NxN_E2E_PP(N, out_q):
    G, demands = create_square_network(N,
                                       link_capacity=N * N * 10,
                                       demand_volume=1)
    print "\n# Dumb instance " + str(N) + "x" + str(
        N) + " with end-to-end path protection (bp_node_disj=True...)"
    fc = execute_instance(G, demands, bp_node_disj=True)
    ports_dict = create_ports_dict(G, demands)
    (requests, faults) = create_requests_faults_dict(fc.pps, fc.bps)
    # fictitious filename, just to caching purpose
    filename = str(N) + 'X' + str(N) + 'E2E.txt'
    (fault_ID, flow_entries_dict, flow_entries_with_timeout_dict,
     flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(
         requests,
         faults,
         ports_dict,
         match_flow=f_t_parser.get_mac_match_mininet,
         check_cache=False,
         filename=filename,
         confirm_cache_loading=False,
         dpctl_script=False)

    flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
    tot_flows = [
        flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys()
        if node != 'global'
    ]
    '''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''
    stats = [
        str(N) + "x" + str(N) + " E2E PP", {
            'min': min(tot_flows),
            'avg': sum(tot_flows) / float(len((tot_flows))),
            'max': max(tot_flows)
        }
    ]
    out_q.put(stats)
    with open("tmp/" + str(N) + "x" + str(N) + " E2E PP.txt",
              "a+") as out_file:
        out_file.write(str(stats) + "\n")
    return stats
コード例 #7
0
ファイル: table2.py プロジェクト: minicz/spider
def process_NxN_E2E_PP(N,out_q):
	G, demands = create_square_network(N, link_capacity=N*N*10, demand_volume=1)
	print "\n# Dumb instance "+str(N)+"x"+str(N)+" with end-to-end path protection (bp_node_disj=True...)"
	fc = execute_instance(G, demands, bp_node_disj=True)
	ports_dict = create_ports_dict(G, demands)
	(requests,faults) = create_requests_faults_dict(fc.pps,fc.bps)
	# fictitious filename, just to caching purpose
	filename=str(N)+'X'+str(N)+'E2E.txt'
	(fault_ID, flow_entries_dict, flow_entries_with_timeout_dict, flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(requests,faults,ports_dict,match_flow=f_t_parser.get_mac_match_mininet,check_cache=False,filename=filename,confirm_cache_loading=False,dpctl_script=False)
	
	flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
	tot_flows = [flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys() if node!='global']
	'''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''
	stats = [str(N)+"x"+str(N)+" E2E PP",{'min' : min(tot_flows) ,'avg' : sum(tot_flows)/float(len((tot_flows))) , 'max' : max(tot_flows)}]
	out_q.put(stats)
	with open("tmp/"+str(N)+"x"+str(N)+" E2E PP.txt", "a+") as out_file:
		out_file.write(str(stats)+"\n")
	return stats
コード例 #8
0
ファイル: table2.py プロジェクト: minicz/spider
def process_NxN_greedy(N,out_q):
	G, demands = create_square_network(N, link_capacity=N*N*10, demand_volume=1)	
	print "\n# Smart instance "+str(N)+"x"+str(N)+" with link cost function and bp_node_disj=False..."
	fc = execute_instance(G, demands, cost_func=cost_func_inv)
	ports_dict = create_ports_dict(G, demands)
	(requests,faults) = create_requests_faults_dict(fc.pps,fc.bps)
	# fictitious filename, just to caching purpose
	filename=str(N)+'X'+str(N)+'greedy.txt'
	(fault_ID, flow_entries_dict, flow_entries_with_timeout_dict, flow_entries_with_burst_dict) = f_t_parser.generate_flow_entries_dict(requests,faults,ports_dict,match_flow=f_t_parser.get_mac_match_mininet,check_cache=False,filename=filename,confirm_cache_loading=False,dpctl_script=False)

	flow_stats_dict = f_t_parser.get_flow_stats_dict(flow_entries_dict)
	tot_flows = [flow_stats_dict[node]['tot_flows'] for node in flow_stats_dict.keys() if node!='global']
	'''print 'min',min(tot_flows)
	print 'avg',sum(tot_flows)/float(len((tot_flows)))
	print 'max',max(tot_flows)'''
	
	D = len(demands)
	F = len(G.edges())/2 if isinstance(G,nx.DiGraph) else len(G.edges())
	print 'O(D*F) = %d*%d = %d'%(D,F,D*F)
	stats = [str(N)+"x"+str(N)+" greedy",{'min' : min(tot_flows) ,'avg' : sum(tot_flows)/float(len((tot_flows))) , 'max' : max(tot_flows)}]
	out_q.put(stats)
	with open("tmp/"+str(N)+"x"+str(N)+" greedy.txt", "a+") as out_file:
		out_file.write(str(stats)+"\n")
	return stats