예제 #1
0
def OS3EWeightedGraph():

    data = {}
    g = OS3EGraph()
    longit = {}
    lat = {}
    # Get locations
    if os.path.isfile(LATLONG_FILE):
        print "Using existing lat/long file"
        data = read_json_file(LATLONG_FILE)
    else:
        print "Generating new lat/long file"
        for n in g.nodes():
            data[n] = get_lat_long(n)
        write_json_file(LATLONG_FILE, data)

    for node in g.nodes():
        latit = float(data[node]["Latitude"])
        lon = float(data[node]["Longitude"])
        lat[node] = latit
        longit[node] = lon
    nx.set_node_attributes(g, "Latitude", lat)
    nx.set_node_attributes(g, "Longitude", longit)

    # Append weights
    for src, dst in g.edges():
        g[src][dst]["weight"] = dist_in_miles(data, src, dst)
        # print "%s to %s: %s" % (src, dst, g[src][dst]["weight"])

    return g
예제 #2
0
def do_plot(options, stats, g, write_filepath):
    city_data = None
    if os.path.isfile(LATLONG_FILE):
        city_data = read_json_file(LATLONG_FILE)

    data = {}
    if not write_filepath:
        write_filepath = get_output_filepath(options.input)
    write_filepath += "_map_"

    print "reformatting data & doing plot..."
    for i, c in enumerate(stats["group"]):
        if options.max and i >= options.max:
            break
        data[str(c)] = stats["data"][str(c)]
        metric_data = []
        for metric in options.metrics:
            metric_data.append(data[str(c)][metric]["lowest_combo"])
        write_map(
            g,
            city_data,
            options.metrics,
            metric_data,
            write_filepath + str(c),
            options.write,
            options.ext,
            options.labels,
            color=COLORS[i],
        )
예제 #3
0
def OS3EWeightedGraph():

    data = {}
    g = OS3EGraph()
    longit = {}
    lat = {}
    # Get locations
    if os.path.isfile(LATLONG_FILE):
        print "Using existing lat/long file"
        data = read_json_file(LATLONG_FILE)
    else:
        print "Generating new lat/long file"
        for n in g.nodes():
            data[n] = get_lat_long(n)
        write_json_file(LATLONG_FILE, data)

    for node in g.nodes():
	latit = float(data[node]["Latitude"])
	lon = float(data[node]["Longitude"])
	lat[node] = latit
	longit[node] = lon	
    nx.set_node_attributes(g,'Latitude',lat)
    nx.set_node_attributes(g,'Longitude',longit)	
	

    # Append weights
    for src, dst in g.edges():
        g[src][dst]["weight"] = dist_in_miles(data, src, dst)
        #print "%s to %s: %s" % (src, dst, g[src][dst]["weight"])

    return g
예제 #4
0
def do_metrics(options, topo, g):
    '''Compute the metrics for a single topology.'''

    print("==========options")
    print(options)
    print("computing metricss for topo: %s" % topo)
    controllers = get_controllers(g, options)
    filename = get_filename(topo, options, controllers)

    data = {}  # See top for data schema details.
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)

    extra_params = get_extra_params(g)
    if options.use_prior:
        data = read_json_file(filename)
    else:
        start = time.time()
        weighted = True
        metrics.run_all_combos(options.metrics, g, controllers, data, apsp,
                               apsp_paths, weighted, options.write_dist,
                               options.write_combos, extra_params,
                               options.processes, options.multiprocess,
                               options.chunksize, options.median)
        total_duration = time.time() - start
        print("%0.6f" % total_duration)

    if not options.dist_only:
        metrics.run_greedy_informed(data, g, apsp, options.weighted)
        metrics.run_greedy_alg_dict(
            data, g, 'greedy-cc', 'latency',
            nx.closeness_centrality(g, weighted_edges=options.weighted), apsp,
            options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency',
                                    nx.degree_centrality(g), apsp,
                                    options.weighted)
        for i in [10, 100, 1000]:
            metrics.run_best_n(data, g, apsp, i, options.weighted)
            metrics.run_worst_n(data, g, apsp, i, options.weighted)

    print(
        "*******************************************************************")

    # Ignore the actual combinations in CSV outputs as well as single points.
    exclude = ["distribution", "metric", "group", "id"]
    if not options.write_combos:
        exclude += ['highest_combo', 'lowest_combo']

    if options.write:
        dirname = os.path.dirname(filename)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename + '.json', data)
        if options.write_csv:
            write_csv_file(filename, data["data"], exclude=exclude)
            if options.write_dist:
                write_dist_csv_file(filename + '_dist', data["data"], exclude)

    return data, filename
예제 #5
0
파일: metrics.py 프로젝트: NKSG/cpp
def do_metrics(options, topo, g):
    '''Compute the metrics for a single topology.'''

    print "computing metricss for topo: %s" % topo
    controllers = get_controllers(g, options)
    filename = get_filename(topo, options, controllers)

    data = {}  # See top for data schema details.
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)

    extra_params = get_extra_params(g)
    if options.use_prior:
        data = read_json_file(filename)
    else:
        start = time.time()
        weighted = True
        metrics.run_all_combos(options.metrics, g, controllers, data, apsp,
                               apsp_paths, weighted, options.write_dist,
                               options.write_combos, extra_params, options.processes,
                               options.multiprocess, options.chunksize, options.median)
        total_duration = time.time() - start
        print "%0.6f" % total_duration

    if not options.dist_only:
        metrics.run_greedy_informed(data, g, apsp, options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-cc', 'latency', nx.closeness_centrality(g, weighted_edges = options.weighted), apsp, options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency', nx.degree_centrality(g), apsp, options.weighted)
        for i in [10, 100, 1000]:
            metrics.run_best_n(data, g, apsp, i, options.weighted)
            metrics.run_worst_n(data, g, apsp, i, options.weighted)

    print "*******************************************************************"

    # Ignore the actual combinations in CSV outputs as well as single points.
    exclude = ["distribution", "metric", "group", "id"]
    if not options.write_combos:
        exclude += ['highest_combo', 'lowest_combo']

    if options.write:
        dirname = os.path.dirname(filename)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename + '.json', data)
        if options.write_csv:
            write_csv_file(filename, data["data"], exclude = exclude)
            if options.write_dist:
                write_dist_csv_file(filename + '_dist', data["data"], exclude)

    return data, filename
예제 #6
0
def read_weights_from_file(g,filename):
	weights = {}
	weights = read_json_file(filename)
	for src,dst in g.edges():
		tuples = [weights.get(src)]
		if tuples[0]!=None:
			try:
				index = tuples[0].index(dst)
			except ValueError:
				continue
			else:
				g[src][dst]['weight'] = tuples[0][index+1]
		tuples = [weights.get(dst)]
		if tuples[0]!=None:
                        try:
                               	index = tuples[0].index(src)   
		      	except ValueError:
				continue
                        g[src][dst]['weight'] = tuples[0][index+1]
	return 
예제 #7
0
def do_plot(options, stats, g, write_filepath):
    city_data = None
    if os.path.isfile(LATLONG_FILE):
        city_data = read_json_file(LATLONG_FILE)

    data = {}
    if not write_filepath:
        write_filepath = get_output_filepath(options.input)
    write_filepath += '_map_'

    print "reformatting data & doing plot..."
    for i, c in enumerate(stats['group']):
        if options.max and i >= options.max:
            break
        data[str(c)] = stats['data'][str(c)]
        metric_data = []
        for metric in options.metrics:
            metric_data.append(data[str(c)][metric]['lowest_combo'])
        write_map(g, city_data, options.metrics, metric_data, write_filepath + str(c), options.write,
                  options.ext, options.labels, color = COLORS[i])
예제 #8
0
def read_weights_from_file(g, filename):
    weights = {}
    weights = read_json_file(filename)
    for src, dst in g.edges():
        tuples = [weights.get(src)]
        if tuples[0] != None:
            try:
                index = tuples[0].index(dst)
            except ValueError:
                continue
            else:
                g[src][dst]['weight'] = tuples[0][index + 1]
        tuples = [weights.get(dst)]
        if tuples[0] != None:
            try:
                index = tuples[0].index(src)
            except ValueError:
                continue
            g[src][dst]['weight'] = tuples[0][index + 1]
    return
예제 #9
0
파일: os3e_weighted.py 프로젝트: NKSG/cpp
def OS3EWeightedGraph():

    data = {}
    g = OS3EGraph()

    # Get locations
    if os.path.isfile(LATLONG_FILE):
        print "Using existing lat/long file"
        data = read_json_file(LATLONG_FILE)
    else:
        print "Generating new lat/long file"
        for n in g.nodes():
            data[n] = get_lat_long(n)
        write_json_file(LATLONG_FILE, data)

    # Append weights
    for src, dst in g.edges():
        g[src][dst]["weight"] = dist_in_miles(data, src, dst)
        #print "%s to %s: %s" % (src, dst, g[src][dst]["weight"])

    return g
예제 #10
0
def OS3EWeightedGraph():

    data = {}
    g = OS3EGraph()

    # Get locations
    if os.path.isfile(LATLONG_FILE):
        print("Using existing lat/long file")
        data = read_json_file(LATLONG_FILE)
    else:
        print("Generating new lat/long file")
        for n in g.nodes():
            data[n] = get_lat_long(n)
        write_json_file(LATLONG_FILE, data)

    # Append weights
    for src, dst in g.edges():
        g[src][dst]["weight"] = dist_in_miles(data, src, dst)
        #print "%s to %s: %s" % (src, dst, g[src][dst]["weight"])

    return g
예제 #11
0
def do_metrics(options, topo, g, mylist):
    temp=[]
    FlowSpace_domain = [] #FlowSpace Rules Table for domain wide slicing
    FlowSpace_switch = [] #FlowSpace Rules Table for switch wide slicing
    FlowSpace_port = [] #FlowSpace Rules Table for port wide
    query = []
    dst = [] #stores connection points as  multiple destinations
    paths_temp = [ ]	#stores disjoint paths from src to connection points
    print "computing metricss for topo: %s" % topo
    options = parse_args()
    if options.weights_from_file:
	filename_weights = get_filename_weights(options)
	read_weights_from_file(g,filename_weights)
    #graph_util.construct_array(g,mylist) #testing elements-please ignore
    #lookup_process.init_lookup(mylist)
    #lookup_process.read_query(query)
    #lookup_process.lookup_table(mylist,query)
    filename = get_filename(topo, options)
    filename_domain, filename_switch, filename_port = get_tablefilename(topo,options)
    data = {}  # Data to be computed --> acceptance ratio, flowspace rules and number of flowspace rules
    bandwidth = options.bandwidth
    if bandwidth:
	data['Weights used'] = 'Bandwidth'
    else:
	data['Weighs used'] = 'Propagation delay'
    apsp = nx.all_pairs_dijkstra_path_length(g) # weight computation according to dijkstra algorithm
    apsp_paths = nx.all_pairs_dijkstra_path(g) # all pair of dijkstra paths
    if options.disjoint_paths:
	dst=[]
	paths_temp = []
	connection_points = []
	connection_points = options.connection_points #for future implementation of multi-domain environments
	if g.__contains__(options.src):
		src = options.src
	else:		
		src = options.dst
	graph_util.parse_points(g,dst,connection_points)
	#print dst
	counter=0
        '''Disjoint paths computation'''
        for i in range(len(dst)):
		#print apsp[src][dst]
		temp1,temp2  = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
		if temp1!=None and temp2!=None:
			length1 = get_length(apsp,temp1)
			paths_temp.append((temp1,length1,dst[i]))
			length2 = get_length(apsp,temp2)
			paths_temp.append((temp2,length2,dst[i]))
			counter = counter+2
		elif temp1!=None and temp2==None:
			length = get_length(apsp,temp1)
			paths_temp.append((temp1,length,dst[i]))
			counter=counter+1
	if counter == 0 or counter==1:
		print "Could not find two or more paths to check if they are disjoint"
		print "The execution will know be stopped"
		raise SystemExit
	#print apsp[src][dst]
	paths_temp = sorted(paths_temp, key=itemgetter(1))
	path1,path2 = get_disjoint(g,paths_temp)
	if path1 == None or path2 == None:
		print("Could not establish a set of disjoint paths between the requsted source and destination nodes")
		print "The execution will now be stopped"
                raise SystemExit
	
	print path1,path2
	path_temp1 , cost1, dst1 = path1
	path_temp2 , cost2, dst2 = path2
	apsp[src][dst1] = cost1
	apsp_paths[src][dst1] = path_temp1
	apsp[src][dst2]=cost2
	apsp_paths[src][dst2]=path_temp2

    '''Precomputations for metrics computation'''
    
    if options.reuse:
	star_path_counter = options.star_paths
	iters = options.simple_paths
	disjoints = options.disjoint
	dis_counter = 0
	unbound_ratio = options.unbound
	dst=[]
	dis_paths = []
	paths_temp = []
	connection_points = []
        connection_points = options.connection_points
	graph_util.parse_points(g,dst,connection_points)
	for node in g.nodes():
		if dis_counter >= disjoints:
			break
		src = node
		counter = 0
		for i in range(len(dst)):
                	#print apsp[src][dst]
                	temp1,temp2  = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
                	if temp1!=None and temp2!=None:
                        	length1 = get_length(apsp,temp1)
				if length1 == -1:
					break
                        	paths_temp.append((temp1,length1,dst[i]))
                        	length2 = get_length(apsp,temp2)
				if length2== -1:
					break
                        	paths_temp.append((temp2,length2,dst[i]))
                        	counter = counter+2
                	elif temp1!=None and temp2==None:
                        	length = get_length(apsp,temp1)
				if length == -1:
					break
                        	paths_temp.append((temp1,length,dst[i]))
                        	counter=counter+1
		if counter == 0 or counter==1:
			continue
		paths_temp = sorted(paths_temp, key=itemgetter(1))
        	path1,path2 = get_disjoint(g,paths_temp)
		if path1!=None and path2!=None:
			dis_counter = dis_counter +2
			dis_paths.append(path1[0])
			dis_paths.append(path2[0])

	if dis_counter == disjoints:
		print("-------Found %d disjoint paths" % dis_counter)
	else:
		print("-------Found %d disjoint paths out of %d that was requested" % (dis_counter,disjoints))
    	evaluation.compute_metrics(FlowSpace_domain,FlowSpace_switch,FlowSpace_port,g,data,iters,dis_counter,dis_paths,star_path_counter,unbound_ratio) #this function actually computes acceptance ratio and generate non-overlapping flowspace rules

        '''creation of file containing flowspace rules in /tables folder. One file is cretated for each slicing method'''
        
	dirname = os.path.dirname(filename_domain)
        if not os.path.exists(dirname):
     	   os.mkdir(dirname)
        write_json_file(filename_domain+"Flowspace table" , FlowSpace_domain)
	dirname = os.path.dirname(filename_switch)
        if not os.path.exists(dirname):
    	    os.mkdir(dirname)
        write_json_file(filename_switch+"Flowspace table", FlowSpace_switch)
	dirname = os.path.dirname(filename_port)
    	if not os.path.exists(dirname):
        	os.mkdir(dirname)
       	write_json_file(filename_port + "Flowspace table", FlowSpace_port)

    '''creation of file containing acceptance ratio results and number of flowspace rules for each slicing method in /acceptance_ratio folder'''

    if options.use_prior:
        data = read_json_file(filename)
    else:
    	if options.write:
        	dirname = os.path.dirname(filename)
        	if not os.path.exists(dirname):
            		os.mkdir(dirname)
    			write_json_file(filename + 'acceptance_ratio', data)
       
    return data, filename
예제 #12
0
if COMPUTE_START:
    controllers += range(1, NUM_FROM_START + 1)

if COMPUTE_END:
    controllers += (range(g.number_of_nodes() - NUM_FROM_END + 1,
                          g.number_of_nodes() + 1))

if WEIGHTED:
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)
else:
    apsp = nx.all_pairs_shortest_path_length(g)
    apsp_paths = nx.all_pairs_shortest_path(g)

if USE_PRIOR_OPTS:
    data = read_json_file(PRIOR_OPTS_FILENAME)
else:
    all_data = {}  # data, keyed by # failures
    for failures in range(1, MAX_FAILURES + 1):
        # data['data'][num controllers] = [latency:latency, nodes:[best-pos node(s)]]
        # data['metrics'] = [list of metrics included]
        # latency is also equal to 1/closeness centrality.
        all_data[failures] = {}
        extra_params['max_failures'] = failures
        metrics.run_all_combos(METRICS, g, controllers, all_data[failures],
                               apsp, apsp_paths, WEIGHTED, WRITE_DIST,
                               WRITE_COMBOS, extra_params)
    # extract ordering of availability
    extract = {}  # extract[1] = data for 1 failure
    failures = range(1, MAX_FAILURES + 1)
    for j in failures:
예제 #13
0
def do_metrics(options, topo, g, mylist):
    temp = []
    FlowSpace_domain = []  #FlowSpace Rules Table for domain wide slicing
    FlowSpace_switch = []  #FlowSpace Rules Table for switch wide slicing
    FlowSpace_port = []  #FlowSpace Rules Table for port wide
    query = []
    dst = []  #stores connection points as  multiple destinations
    paths_temp = []  #stores disjoint paths from src to connection points
    print "computing metricss for topo: %s" % topo
    options = parse_args()
    if options.weights_from_file:
        filename_weights = get_filename_weights(options)
        read_weights_from_file(g, filename_weights)
    #graph_util.construct_array(g,mylist) #testing elements-please ignore
    #lookup_process.init_lookup(mylist)
    #lookup_process.read_query(query)
    #lookup_process.lookup_table(mylist,query)
    filename = get_filename(topo, options)
    filename_domain, filename_switch, filename_port = get_tablefilename(
        topo, options)
    data = {
    }  # Data to be computed --> acceptance ratio, flowspace rules and number of flowspace rules
    bandwidth = options.bandwidth
    if bandwidth:
        data['Weights used'] = 'Bandwidth'
    else:
        data['Weighs used'] = 'Propagation delay'
    apsp = nx.all_pairs_dijkstra_path_length(
        g)  # weight computation according to dijkstra algorithm
    apsp_paths = nx.all_pairs_dijkstra_path(g)  # all pair of dijkstra paths
    if options.disjoint_paths:
        dst = []
        paths_temp = []
        connection_points = []
        connection_points = options.connection_points  #for future implementation of multi-domain environments
        if g.__contains__(options.src):
            src = options.src
        else:
            src = options.dst
        graph_util.parse_points(g, dst, connection_points)
        #print dst
        counter = 0
        '''Disjoint paths computation'''
        for i in range(len(dst)):
            #print apsp[src][dst]
            temp1, temp2 = paths.vertex_disjoint_shortest_pair(g, src, dst[i])
            if temp1 != None and temp2 != None:
                length1 = get_length(apsp, temp1)
                paths_temp.append((temp1, length1, dst[i]))
                length2 = get_length(apsp, temp2)
                paths_temp.append((temp2, length2, dst[i]))
                counter = counter + 2
            elif temp1 != None and temp2 == None:
                length = get_length(apsp, temp1)
                paths_temp.append((temp1, length, dst[i]))
                counter = counter + 1
        if counter == 0 or counter == 1:
            print "Could not find two or more paths to check if they are disjoint"
            print "The execution will know be stopped"
            raise SystemExit
        #print apsp[src][dst]
        paths_temp = sorted(paths_temp, key=itemgetter(1))
        path1, path2 = get_disjoint(g, paths_temp)
        if path1 == None or path2 == None:
            print(
                "Could not establish a set of disjoint paths between the requsted source and destination nodes"
            )
            print "The execution will now be stopped"
            raise SystemExit

        print path1, path2
        path_temp1, cost1, dst1 = path1
        path_temp2, cost2, dst2 = path2
        apsp[src][dst1] = cost1
        apsp_paths[src][dst1] = path_temp1
        apsp[src][dst2] = cost2
        apsp_paths[src][dst2] = path_temp2
    '''Precomputations for metrics computation'''

    if options.reuse:
        star_path_counter = options.star_paths
        iters = options.simple_paths
        disjoints = options.disjoint
        dis_counter = 0
        unbound_ratio = options.unbound
        dst = []
        dis_paths = []
        paths_temp = []
        connection_points = []
        connection_points = options.connection_points
        graph_util.parse_points(g, dst, connection_points)
        for node in g.nodes():
            if dis_counter >= disjoints:
                break
            src = node
            counter = 0
            for i in range(len(dst)):
                #print apsp[src][dst]
                temp1, temp2 = paths.vertex_disjoint_shortest_pair(
                    g, src, dst[i])
                if temp1 != None and temp2 != None:
                    length1 = get_length(apsp, temp1)
                    if length1 == -1:
                        break
                    paths_temp.append((temp1, length1, dst[i]))
                    length2 = get_length(apsp, temp2)
                    if length2 == -1:
                        break
                    paths_temp.append((temp2, length2, dst[i]))
                    counter = counter + 2
                elif temp1 != None and temp2 == None:
                    length = get_length(apsp, temp1)
                    if length == -1:
                        break
                    paths_temp.append((temp1, length, dst[i]))
                    counter = counter + 1
            if counter == 0 or counter == 1:
                continue
            paths_temp = sorted(paths_temp, key=itemgetter(1))
            path1, path2 = get_disjoint(g, paths_temp)
            if path1 != None and path2 != None:
                dis_counter = dis_counter + 2
                dis_paths.append(path1[0])
                dis_paths.append(path2[0])

        if dis_counter == disjoints:
            print("-------Found %d disjoint paths" % dis_counter)
        else:
            print(
                "-------Found %d disjoint paths out of %d that was requested" %
                (dis_counter, disjoints))
        evaluation.compute_metrics(
            FlowSpace_domain, FlowSpace_switch, FlowSpace_port, g, data, iters,
            dis_counter, dis_paths, star_path_counter, unbound_ratio
        )  #this function actually computes acceptance ratio and generate non-overlapping flowspace rules
        '''creation of file containing flowspace rules in /tables folder. One file is cretated for each slicing method'''

        dirname = os.path.dirname(filename_domain)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_domain + "Flowspace table", FlowSpace_domain)
        dirname = os.path.dirname(filename_switch)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_switch + "Flowspace table", FlowSpace_switch)
        dirname = os.path.dirname(filename_port)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename_port + "Flowspace table", FlowSpace_port)
    '''creation of file containing acceptance ratio results and number of flowspace rules for each slicing method in /acceptance_ratio folder'''

    if options.use_prior:
        data = read_json_file(filename)
    else:
        if options.write:
            dirname = os.path.dirname(filename)
            if not os.path.exists(dirname):
                os.mkdir(dirname)
                write_json_file(filename + 'acceptance_ratio', data)

    return data, filename
예제 #14
0
# Eventually expand this to n.
if COMPUTE_START:
    controllers += range(1, NUM_FROM_START + 1)

if COMPUTE_END:
    controllers += (range(g.number_of_nodes() - NUM_FROM_END + 1, g.number_of_nodes() + 1))

if WEIGHTED:
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)
else:
    apsp = nx.all_pairs_shortest_path_length(g)
    apsp_paths = nx.all_pairs_shortest_path(g)

if USE_PRIOR_OPTS:
    data = read_json_file(PRIOR_OPTS_FILENAME)
else:
    all_data = {}  # data, keyed by # failures
    for failures in range(1, MAX_FAILURES + 1):
        # data['data'][num controllers] = [latency:latency, nodes:[best-pos node(s)]]
        # data['metrics'] = [list of metrics included]
        # latency is also equal to 1/closeness centrality.
        all_data[failures] = {}
        extra_params['max_failures'] = failures
        metrics.run_all_combos(METRICS, g, controllers, all_data[failures], apsp,
                           apsp_paths, WEIGHTED, WRITE_DIST, WRITE_COMBOS, extra_params)
    # extract ordering of availability
    extract = {}  # extract[1] = data for 1 failure
    failures = range(1, MAX_FAILURES + 1)
    for j in failures:
        extract[j] = []