예제 #1
0
def do_metrics(options, topo, g, mylist):
    temp=[]
    FlowSpace_domain = [] #FlowSpace Rules Table for domain wide slicing
    FlowSpace_switch = [] #FlowSpace Rules Table for switch wide slicing
    FlowSpace_port = [] #FlowSpace Rules Table for port wide
    query = []
    dst = [] #stores connection points as  multiple destinations
    paths_temp = [ ]	#stores disjoint paths from src to connection points
    print "computing metricss for topo: %s" % topo
    options = parse_args()
    if options.weights_from_file:
	filename_weights = get_filename_weights(options)
	read_weights_from_file(g,filename_weights)
    #graph_util.construct_array(g,mylist) #testing elements-please ignore
    #lookup_process.init_lookup(mylist)
    #lookup_process.read_query(query)
    #lookup_process.lookup_table(mylist,query)
    filename = get_filename(topo, options)
    filename_domain, filename_switch, filename_port = get_tablefilename(topo,options)
    data = {}  # Data to be computed --> acceptance ratio, flowspace rules and number of flowspace rules
    bandwidth = options.bandwidth
    if bandwidth:
	data['Weights used'] = 'Bandwidth'
    else:
	data['Weighs used'] = 'Propagation delay'
    apsp = nx.all_pairs_dijkstra_path_length(g) # weight computation according to dijkstra algorithm
    apsp_paths = nx.all_pairs_dijkstra_path(g) # all pair of dijkstra paths
    if options.disjoint_paths:
	dst=[]
	paths_temp = []
	connection_points = []
	connection_points = options.connection_points #for future implementation of multi-domain environments
	if g.__contains__(options.src):
		src = options.src
	else:		
		src = options.dst
	graph_util.parse_points(g,dst,connection_points)
	#print dst
	counter=0
        '''Disjoint paths computation'''
        for i in range(len(dst)):
		#print apsp[src][dst]
		temp1,temp2  = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
		if temp1!=None and temp2!=None:
			length1 = get_length(apsp,temp1)
			paths_temp.append((temp1,length1,dst[i]))
			length2 = get_length(apsp,temp2)
			paths_temp.append((temp2,length2,dst[i]))
			counter = counter+2
		elif temp1!=None and temp2==None:
			length = get_length(apsp,temp1)
			paths_temp.append((temp1,length,dst[i]))
			counter=counter+1
	if counter == 0 or counter==1:
		print "Could not find two or more paths to check if they are disjoint"
		print "The execution will know be stopped"
		raise SystemExit
	#print apsp[src][dst]
	paths_temp = sorted(paths_temp, key=itemgetter(1))
	path1,path2 = get_disjoint(g,paths_temp)
	if path1 == None or path2 == None:
		print("Could not establish a set of disjoint paths between the requsted source and destination nodes")
		print "The execution will now be stopped"
                raise SystemExit
	
	print path1,path2
	path_temp1 , cost1, dst1 = path1
	path_temp2 , cost2, dst2 = path2
	apsp[src][dst1] = cost1
	apsp_paths[src][dst1] = path_temp1
	apsp[src][dst2]=cost2
	apsp_paths[src][dst2]=path_temp2

    '''Precomputations for metrics computation'''
    
    if options.reuse:
	star_path_counter = options.star_paths
	iters = options.simple_paths
	disjoints = options.disjoint
	dis_counter = 0
	unbound_ratio = options.unbound
	dst=[]
	dis_paths = []
	paths_temp = []
	connection_points = []
        connection_points = options.connection_points
	graph_util.parse_points(g,dst,connection_points)
	for node in g.nodes():
		if dis_counter >= disjoints:
			break
		src = node
		counter = 0
		for i in range(len(dst)):
                	#print apsp[src][dst]
                	temp1,temp2  = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
                	if temp1!=None and temp2!=None:
                        	length1 = get_length(apsp,temp1)
				if length1 == -1:
					break
                        	paths_temp.append((temp1,length1,dst[i]))
                        	length2 = get_length(apsp,temp2)
				if length2== -1:
					break
                        	paths_temp.append((temp2,length2,dst[i]))
                        	counter = counter+2
                	elif temp1!=None and temp2==None:
                        	length = get_length(apsp,temp1)
				if length == -1:
					break
                        	paths_temp.append((temp1,length,dst[i]))
                        	counter=counter+1
		if counter == 0 or counter==1:
			continue
		paths_temp = sorted(paths_temp, key=itemgetter(1))
        	path1,path2 = get_disjoint(g,paths_temp)
		if path1!=None and path2!=None:
			dis_counter = dis_counter +2
			dis_paths.append(path1[0])
			dis_paths.append(path2[0])

	if dis_counter == disjoints:
		print("-------Found %d disjoint paths" % dis_counter)
	else:
		print("-------Found %d disjoint paths out of %d that was requested" % (dis_counter,disjoints))
    	evaluation.compute_metrics(FlowSpace_domain,FlowSpace_switch,FlowSpace_port,g,data,iters,dis_counter,dis_paths,star_path_counter,unbound_ratio) #this function actually computes acceptance ratio and generate non-overlapping flowspace rules

        '''creation of file containing flowspace rules in /tables folder. One file is cretated for each slicing method'''
        
	dirname = os.path.dirname(filename_domain)
        if not os.path.exists(dirname):
     	   os.mkdir(dirname)
        write_json_file(filename_domain+"Flowspace table" , FlowSpace_domain)
	dirname = os.path.dirname(filename_switch)
        if not os.path.exists(dirname):
    	    os.mkdir(dirname)
        write_json_file(filename_switch+"Flowspace table", FlowSpace_switch)
	dirname = os.path.dirname(filename_port)
    	if not os.path.exists(dirname):
        	os.mkdir(dirname)
       	write_json_file(filename_port + "Flowspace table", FlowSpace_port)

    '''creation of file containing acceptance ratio results and number of flowspace rules for each slicing method in /acceptance_ratio folder'''

    if options.use_prior:
        data = read_json_file(filename)
    else:
    	if options.write:
        	dirname = os.path.dirname(filename)
        	if not os.path.exists(dirname):
            		os.mkdir(dirname)
    			write_json_file(filename + 'acceptance_ratio', data)
       
    return data, filename
예제 #2
0
        if not os.path.exists(dirname):
     	   os.mkdir(dirname)
        write_json_file(filename_domain+"Flowspace table" , FlowSpace_domain)
	dirname = os.path.dirname(filename_switch)
        if not os.path.exists(dirname):
    	    os.mkdir(dirname)
        write_json_file(filename_switch+"Flowspace table", FlowSpace_switch)
	dirname = os.path.dirname(filename_port)
    	if not os.path.exists(dirname):
        	os.mkdir(dirname)
       	write_json_file(filename_port + "Flowspace table", FlowSpace_port)

    '''creation of file containing acceptance ratio results and number of flowspace rules for each slicing method in /acceptance_ratio folder'''

    if options.use_prior:
        data = read_json_file(filename)
    else:
    	if options.write:
        	dirname = os.path.dirname(filename)
        	if not os.path.exists(dirname):
            		os.mkdir(dirname)
    			write_json_file(filename + 'acceptance_ratio', data)
       
    return data, filename

if __name__ == '__main__':
    options = parse_args()
    for topo in options.topos:
        g = get_topo_graph(topo)
        do_metrics(options, topo, g)
예제 #3
0
                            aspect_colors,
                            aspect_fcns,
                            "linear",
                            "linear",
                            None,
                            None,
                            filepath,
                            options.write,
                            ext=options.ext,
                            xlabel=xlabel,
                            ylabel=ylabel,
                            min_x=min_x,
                            max_x=max_x,
                            min_y=min_y,
                            max_y=max_y,
                            ylabel2=ylabel2,
                            y2_scale_factor=y2_scale_factor,
                            hlines=hlines)
            else:
                raise Exception("undefined ptype: %s" % ptype)

    if not options.write:
        plot.show()


if __name__ == "__main__":
    options = parse_args()
    print "loading JSON data..."
    stats = plot.load_stats(options)
    topo_name = options.input.split('/')[1]
    do_ranges(options, stats, None, topo_name)
예제 #4
0
def import_zoo_graph(topo):
    '''Given name of Topology Zoo graph, return graph or error.
    
    @param g: NetworkX Graph
    @param usable: boolean: locations on all nodes and connected?
    @param note: error or note about mods
    '''
    filename = 'zoo/' + topo + '.gml'
    if not os.path.exists(filename):
        raise Exception("invalid topo path:%s" % filename)

    # Ignore old graphs
    if old_version(topo):
        return None, False, "Old version"
    if blacklisted(topo):
        return None, False, "Blacklisted topology"

    # Convert multigraphs to regular graphs; multiple edges don't affect
    # latency, but add complications when debugging.
    g = nx.Graph(nx.read_gml(filename, 'UTF-8', True))
    cc = nx.connected_components(g)
    if len(cc) != 1:
        if ok_disconn(topo):
            # Remove disconnected components and continue
            # Do a pass to find the largest CC
            max_component_size = 0
            for comp in cc:
                if len(comp) >= max_component_size:
                    max_component_size = len(comp)
            # Do a pass to remove all nodes in non-largest CCs.
            for comp in cc:
                if len(comp) != max_component_size:
                    for n in comp:
                        g.remove_node(n)
        elif known_disconn(topo):
            return None, False, "Known disconnected topology"
        else:
            return None, False, "Unknown disconnected topology"
    cc = nx.connected_components(g)
    assert len(cc) == 1

    if not has_a_location(g):
        if known_no_loc(topo):
            return None, False, "Known no-weight topo"
        else:
            return None, False, "Unknown no-weight topo"

    if g.number_of_nodes() <= 9:
        print "********%s%s" % (topo, g.number_of_nodes())

    # Append weights
    if has_all_locs(g):
        options = parse_args()
        bandwidth = options.bandwidth
        if bandwidth:
            print("Weights used:Bandwidth")
            attach_bandwidth_weights(g)
        else:
            print("Weights used:Propagation delays")
            attach_weights(g)
        #print "dist between %s and %s is %s" % (src, dst, g[src][dst]["weight"])
        return g, True, None
    elif missing_locs_are_external(g):
        remove_external_nodes(g)
        attach_weights(g)
        return g, True, "OK - removed external nodes"
    elif missing_locs_are_hyperedges(g):
        return g, False, "OK - missing locs are only hyperedges"
    elif missing_locs_are_external_or_hyperedges(g):
        return g, False, "OK - missing locs are hyperedges or external"
    else:
        return g, False, "Missing location(s)"
예제 #5
0
def import_zoo_graph(topo):
    '''Given name of Topology Zoo graph, return graph or error.
    
    @param g: NetworkX Graph
    @param usable: boolean: locations on all nodes and connected?
    @param note: error or note about mods
    '''
    filename = 'zoo/' + topo + '.gml'
    if not os.path.exists(filename):
        raise Exception("invalid topo path:%s" % filename)

    # Ignore old graphs
    if old_version(topo):
        return None, False, "Old version"
    if blacklisted(topo):
        return None, False, "Blacklisted topology"

    # Convert multigraphs to regular graphs; multiple edges don't affect
    # latency, but add complications when debugging.
    g = nx.Graph(nx.read_gml(filename,'UTF-8',True))
    cc = nx.connected_components(g)
    if len(cc) != 1:
        if ok_disconn(topo):
            # Remove disconnected components and continue
            # Do a pass to find the largest CC
            max_component_size = 0
            for comp in cc:
                if len(comp) >= max_component_size:
                    max_component_size = len(comp)
            # Do a pass to remove all nodes in non-largest CCs.
            for comp in cc:
                if len(comp) != max_component_size:
                    for n in comp:
                        g.remove_node(n)
        elif known_disconn(topo):
            return None, False, "Known disconnected topology"
        else:
            return None, False, "Unknown disconnected topology"
    cc = nx.connected_components(g)
    assert len(cc) == 1

    if not has_a_location(g):
        if known_no_loc(topo):
            return None, False, "Known no-weight topo"
        else:
            return None, False, "Unknown no-weight topo"

    if g.number_of_nodes() <= 9:
        print "********%s%s" % (topo, g.number_of_nodes())

    # Append weights
    if has_all_locs(g):
        options = parse_args()
        bandwidth = options.bandwidth
	if bandwidth:
		print("Weights used:Bandwidth")
		attach_bandwidth_weights(g)
	else:
		print("Weights used:Propagation delays")
        	attach_weights(g)
        #print "dist between %s and %s is %s" % (src, dst, g[src][dst]["weight"])    
        return g, True, None
    elif missing_locs_are_external(g):
        remove_external_nodes(g)
        attach_weights(g)
        return g, True, "OK - removed external nodes"
    elif missing_locs_are_hyperedges(g):
        return g, False, "OK - missing locs are only hyperedges"
    elif missing_locs_are_external_or_hyperedges(g):
        return g, False, "OK - missing locs are hyperedges or external"
    else:
        return g, False, "Missing location(s)"
예제 #6
0
def do_metrics(options, topo, g, mylist):
    temp = []
    FlowSpace_domain = []  #FlowSpace Rules Table for domain wide slicing
    FlowSpace_switch = []  #FlowSpace Rules Table for switch wide slicing
    FlowSpace_port = []  #FlowSpace Rules Table for port wide
    query = []
    dst = []  #stores connection points as  multiple destinations
    paths_temp = []  #stores disjoint paths from src to connection points
    print "computing metricss for topo: %s" % topo
    options = parse_args()
    if options.weights_from_file:
        filename_weights = get_filename_weights(options)
        read_weights_from_file(g, filename_weights)
    #graph_util.construct_array(g,mylist) #testing elements-please ignore
    #lookup_process.init_lookup(mylist)
    #lookup_process.read_query(query)
    #lookup_process.lookup_table(mylist,query)
    filename = get_filename(topo, options)
    filename_domain, filename_switch, filename_port = get_tablefilename(
        topo, options)
    data = {
    }  # Data to be computed --> acceptance ratio, flowspace rules and number of flowspace rules
    bandwidth = options.bandwidth
    if bandwidth:
        data['Weights used'] = 'Bandwidth'
    else:
        data['Weighs used'] = 'Propagation delay'
    apsp = nx.all_pairs_dijkstra_path_length(
        g)  # weight computation according to dijkstra algorithm
    apsp_paths = nx.all_pairs_dijkstra_path(g)  # all pair of dijkstra paths
    if options.disjoint_paths:
        dst = []
        paths_temp = []
        connection_points = []
        connection_points = options.connection_points  #for future implementation of multi-domain environments
        if g.__contains__(options.src):
            src = options.src
        else:
            src = options.dst
        graph_util.parse_points(g, dst, connection_points)
        #print dst
        counter = 0
        '''Disjoint paths computation'''
        for i in range(len(dst)):
            #print apsp[src][dst]
            temp1, temp2 = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
            if temp1 != None and temp2 != None:
                length1 = get_length(apsp, temp1)
                paths_temp.append((temp1, length1, dst[i]))
                length2 = get_length(apsp, temp2)
                paths_temp.append((temp2, length2, dst[i]))
                counter = counter + 2
            elif temp1 != None and temp2 == None:
                length = get_length(apsp, temp1)
                paths_temp.append((temp1, length, dst[i]))
                counter = counter + 1
        if counter == 0 or counter == 1:
            print "Could not find two or more paths to check if they are disjoint"
            print "The execution will know be stopped"
            raise SystemExit
        #print apsp[src][dst]
        paths_temp = sorted(paths_temp, key=itemgetter(1))
        path1, path2 = get_disjoint(g, paths_temp)
        if path1 == None or path2 == None:
            print(
                "Could not establish a set of disjoint paths between the requsted source and destination nodes"
            )
            print "The execution will now be stopped"
            raise SystemExit

        print path1, path2
        path_temp1, cost1, dst1 = path1
        path_temp2, cost2, dst2 = path2
        apsp[src][dst1] = cost1
        apsp_paths[src][dst1] = path_temp1
        apsp[src][dst2] = cost2
        apsp_paths[src][dst2] = path_temp2
    '''Precomputations for metrics computation'''

    if options.reuse:
        star_path_counter = options.star_paths
        iters = options.simple_paths
        disjoints = options.disjoint
        dis_counter = 0
        unbound_ratio = options.unbound
        dst = []
        dis_paths = []
        paths_temp = []
        connection_points = []
        connection_points = options.connection_points
        graph_util.parse_points(g, dst, connection_points)
        for node in g.nodes():
            if dis_counter >= disjoints:
                break
            src = node
            counter = 0
            for i in range(len(dst)):
                #print apsp[src][dst]
                temp1, temp2 = paths.vertex_disjoint_shortest_pair(
                    g, src, dst[i])
                if temp1 != None and temp2 != None:
                    length1 = get_length(apsp, temp1)
                    if length1 == -1:
                        break
                    paths_temp.append((temp1, length1, dst[i]))
                    length2 = get_length(apsp, temp2)
                    if length2 == -1:
                        break
                    paths_temp.append((temp2, length2, dst[i]))
                    counter = counter + 2
                elif temp1 != None and temp2 == None:
                    length = get_length(apsp, temp1)
                    if length == -1:
                        break
                    paths_temp.append((temp1, length, dst[i]))
                    counter = counter + 1
            if counter == 0 or counter == 1:
                continue
            paths_temp = sorted(paths_temp, key=itemgetter(1))
            path1, path2 = get_disjoint(g, paths_temp)
            if path1 != None and path2 != None:
                dis_counter = dis_counter + 2
                dis_paths.append(path1[0])
                dis_paths.append(path2[0])

        if dis_counter == disjoints:
            print("-------Found %d disjoint paths" % dis_counter)
        else:
            print(
                "-------Found %d disjoint paths out of %d that was requested" %
                (dis_counter, disjoints))
        evaluation.compute_metrics(
            FlowSpace_domain, FlowSpace_switch, FlowSpace_port, g, data, iters,
            dis_counter, dis_paths, star_path_counter, unbound_ratio
        )  #this function actually computes acceptance ratio and generate non-overlapping flowspace rules
        '''creation of file containing flowspace rules in /tables folder. One file is cretated for each slicing method'''

        dirname = os.path.dirname(filename_domain)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_domain + "Flowspace table", FlowSpace_domain)
        dirname = os.path.dirname(filename_switch)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_switch + "Flowspace table", FlowSpace_switch)
        dirname = os.path.dirname(filename_port)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_port + "Flowspace table", FlowSpace_port)
    '''creation of file containing acceptance ratio results and number of flowspace rules for each slicing method in /acceptance_ratio folder'''

    if options.use_prior:
        data = read_json_file(filename)
    else:
        if options.write:
            dirname = os.path.dirname(filename)
            if not os.path.exists(dirname):
                os.mkdir(dirname)
                write_json_file(filename + 'acceptance_ratio', data)

    return data, filename