예제 #1
0
def do_metrics(options, topo, g):
    '''Compute the metrics for a single topology.'''

    print("==========options")
    print(options)
    print("computing metricss for topo: %s" % topo)
    controllers = get_controllers(g, options)
    filename = get_filename(topo, options, controllers)

    data = {}  # See top for data schema details.
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)

    extra_params = get_extra_params(g)
    if options.use_prior:
        data = read_json_file(filename)
    else:
        start = time.time()
        weighted = True
        metrics.run_all_combos(options.metrics, g, controllers, data, apsp,
                               apsp_paths, weighted, options.write_dist,
                               options.write_combos, extra_params,
                               options.processes, options.multiprocess,
                               options.chunksize, options.median)
        total_duration = time.time() - start
        print("%0.6f" % total_duration)

    if not options.dist_only:
        metrics.run_greedy_informed(data, g, apsp, options.weighted)
        metrics.run_greedy_alg_dict(
            data, g, 'greedy-cc', 'latency',
            nx.closeness_centrality(g, weighted_edges=options.weighted), apsp,
            options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency',
                                    nx.degree_centrality(g), apsp,
                                    options.weighted)
        for i in [10, 100, 1000]:
            metrics.run_best_n(data, g, apsp, i, options.weighted)
            metrics.run_worst_n(data, g, apsp, i, options.weighted)

    print(
        "*******************************************************************")

    # Ignore the actual combinations in CSV outputs as well as single points.
    exclude = ["distribution", "metric", "group", "id"]
    if not options.write_combos:
        exclude += ['highest_combo', 'lowest_combo']

    if options.write:
        dirname = os.path.dirname(filename)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename + '.json', data)
        if options.write_csv:
            write_csv_file(filename, data["data"], exclude=exclude)
            if options.write_dist:
                write_dist_csv_file(filename + '_dist', data["data"], exclude)

    return data, filename
예제 #2
0
파일: metrics.py 프로젝트: NKSG/cpp
def do_metrics(options, topo, g):
    '''Compute the metrics for a single topology.'''

    print "computing metricss for topo: %s" % topo
    controllers = get_controllers(g, options)
    filename = get_filename(topo, options, controllers)

    data = {}  # See top for data schema details.
    apsp = nx.all_pairs_dijkstra_path_length(g)
    apsp_paths = nx.all_pairs_dijkstra_path(g)

    extra_params = get_extra_params(g)
    if options.use_prior:
        data = read_json_file(filename)
    else:
        start = time.time()
        weighted = True
        metrics.run_all_combos(options.metrics, g, controllers, data, apsp,
                               apsp_paths, weighted, options.write_dist,
                               options.write_combos, extra_params, options.processes,
                               options.multiprocess, options.chunksize, options.median)
        total_duration = time.time() - start
        print "%0.6f" % total_duration

    if not options.dist_only:
        metrics.run_greedy_informed(data, g, apsp, options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-cc', 'latency', nx.closeness_centrality(g, weighted_edges = options.weighted), apsp, options.weighted)
        metrics.run_greedy_alg_dict(data, g, 'greedy-dc', 'latency', nx.degree_centrality(g), apsp, options.weighted)
        for i in [10, 100, 1000]:
            metrics.run_best_n(data, g, apsp, i, options.weighted)
            metrics.run_worst_n(data, g, apsp, i, options.weighted)

    print "*******************************************************************"

    # Ignore the actual combinations in CSV outputs as well as single points.
    exclude = ["distribution", "metric", "group", "id"]
    if not options.write_combos:
        exclude += ['highest_combo', 'lowest_combo']

    if options.write:
        dirname = os.path.dirname(filename)
        if not os.path.exists(dirname):
            os.mkdir(dirname)
        write_json_file(filename + '.json', data)
        if options.write_csv:
            write_csv_file(filename, data["data"], exclude = exclude)
            if options.write_dist:
                write_dist_csv_file(filename + '_dist', data["data"], exclude)

    return data, filename
예제 #3
0
else:
    apsp = nx.all_pairs_shortest_path_length(g)
    apsp_paths = nx.all_pairs_shortest_path(g)

if USE_PRIOR_OPTS:
    data = read_json_file(PRIOR_OPTS_FILENAME)
else:
    all_data = {}  # data, keyed by # failures
    for failures in range(1, MAX_FAILURES + 1):
        # data['data'][num controllers] = [latency:latency, nodes:[best-pos node(s)]]
        # data['metrics'] = [list of metrics included]
        # latency is also equal to 1/closeness centrality.
        all_data[failures] = {}
        extra_params['max_failures'] = failures
        metrics.run_all_combos(METRICS, g, controllers, all_data[failures],
                               apsp, apsp_paths, WEIGHTED, WRITE_DIST,
                               WRITE_COMBOS, extra_params)
    # extract ordering of availability
    extract = {}  # extract[1] = data for 1 failure
    failures = range(1, MAX_FAILURES + 1)
    for j in failures:
        extract[j] = []
        print "getting data for %i failure" % j
        for i, point in enumerate(all_data[j]['data'][str(1)]['distribution']):
            id = point['id']
            combo = point['combo']
            a = point['availability']
            extract[j].append([id, a, combo])
        extract[j] = sorted(extract[j], key=itemgetter(1), reverse=True)
    for combo in combinations(failures, 2):
        print "comparing %s and %s:" % (combo[0], combo[1])
예제 #4
0
    apsp_paths = nx.all_pairs_dijkstra_path(g)
else:
    apsp = nx.all_pairs_shortest_path_length(g)
    apsp_paths = nx.all_pairs_shortest_path(g)

if USE_PRIOR_OPTS:
    data = read_json_file(PRIOR_OPTS_FILENAME)
else:
    all_data = {}  # data, keyed by # failures
    for failures in range(1, MAX_FAILURES + 1):
        # data['data'][num controllers] = [latency:latency, nodes:[best-pos node(s)]]
        # data['metrics'] = [list of metrics included]
        # latency is also equal to 1/closeness centrality.
        all_data[failures] = {}
        extra_params['max_failures'] = failures
        metrics.run_all_combos(METRICS, g, controllers, all_data[failures], apsp,
                           apsp_paths, WEIGHTED, WRITE_DIST, WRITE_COMBOS, extra_params)
    # extract ordering of availability
    extract = {}  # extract[1] = data for 1 failure
    failures = range(1, MAX_FAILURES + 1)
    for j in failures:
        extract[j] = []
        print "getting data for %i failure" % j
        for i, point in enumerate(all_data[j]['data'][str(1)]['distribution']):
            id = point['id']
            combo = point['combo']
            a = point['availability']
            extract[j].append([id, a, combo])
        extract[j] =  sorted(extract[j], key = itemgetter(1), reverse = True)
    for combo in combinations(failures, 2):
        print "comparing %s and %s:" % (combo[0], combo[1])
        left = combo[0]