예제 #1
0
def compare_capacities(paras, rep="", save_to_csv=True):
    """
	Compare new and old definition of capacities.
	"""

    G = paras["G"]
    paras_real = G.paras_real
    capacities_new = extract_capacity_from_traffic(G, paras_real)
    capacities_old = extract_old_capacity_from_traffic(G, paras_real)

    with open("/home/earendil/Documents/ELSA/Modules/All_areas_volumes_" + str(paras_real["airac"]) + ".pic", "r") as f:
        real_areas, volumes = pickle.load(f)

    sectors = sorted(capacities_new.keys())

    capacities_new, capacities_old, areas, volumes = (
        [capacities_new[v] for v in sectors],
        [capacities_old[v] for v in sectors],
        [real_areas[G.node[v]["name"]] for v in sectors],
        [volumes[G.node[v]["name"]] for v in sectors],
    )

    if save_to_csv:
        for met, met_name in [
            (capacities_new, "cap_new"),
            (capacities_old, "cap_old"),
            (areas, "areas"),
            (volumes, "vol"),
        ]:
            with open(rep + "/" + met_name + "_dist.csv", "wb") as csvfile:
                writer = csv.writer(csvfile, delimiter=";")
                writer.writerow([met_name])
                for d in met:
                    writer.writerow([d])

    os.system("mkdir -p " + rep)

    plot_scatter(
        capacities_old, capacities_new, rep=rep, suffix="capacities", xlabel="Old capacities", ylabel="New capacities"
    )
    plot_scatter(areas, capacities_new, rep=rep, suffix="area_new_capacities", xlabel="area", ylabel="New capacities")
    plot_quantiles(
        capacities_old, capacities_new, rep=rep, suffix="capacities", xlabel="Old capacities", ylabel="New capacities"
    )
    plot_quantiles(areas, capacities_new, rep=rep, suffix="area_new_capacities", xlabel="area", ylabel="New capacities")

    plot_hist(capacities_new, rep=rep, suffix="capacities_new", xlabel="capacities_new")
예제 #2
0
def compute_basic_stats(paras, rep="", save_to_csv=False):
    """
	Simple statistics.
	Beware! Here the weights are not given by the traffic. They are the time of crossing!
	"""
    G = paras["G"].G_nav

    os.system("mkdir -p " + rep)

    print "Number of nodes, number of edges:", len(G.nodes()), len(G.edges())

    # ----- Distribution of degree ----- #
    deg = [G.degree(n) for n in G.nodes()]
    bins = max(deg) - min(deg)
    if save_to_csv:
        with open(rep + "/deg_dist.csv", "wb") as csvfile:
            writer = csv.writer(csvfile, delimiter=";")
            writer.writerow(["Degree"])
            for d in deg:
                writer.writerow([d])

    print ""
    print "Min/Mean/Std/Max degree:", min(deg), np.mean(deg), np.std(deg), max(deg)

    plot_hist(deg, xlabel="Degree", title="Distribution of degree", bins=bins, rep=rep, suffix="deg")

    # ----- Distribution of strength ----- #
    strr = [G.degree(n, weight="weight") for n in G.nodes()]
    bins = max(strr) - min(strr)
    if save_to_csv:
        with open(rep + "/str_dist.csv", "wb") as csvfile:
            writer = csv.writer(csvfile, delimiter=";")
            writer.writerow(["Strength"])
            for s in strr:
                writer.writerow([s])

    print ""
    print "Min/Mean/Std/Max stregth:", min(strr), np.mean(strr), np.std(strr), max(strr)

    plot_hist(strr, xlabel="Strength", title="Distribution of strength", bins=bins, rep=rep, suffix="str")

    # ----- Distribution of weights ----- #
    wei = [G[e[0]][e[1]]["weight"] for e in G.edges()]
    bins = 30

    if save_to_csv:
        with open(rep + "/wei_dist.csv", "wb") as csvfile:
            writer = csv.writer(csvfile, delimiter=";")
            writer.writerow(["Weights"])
            for w in wei:
                writer.writerow([w])

    print ""
    print "Min/Mean/Std/Max weight:", min(wei), np.mean(wei), np.std(wei), max(wei)
    print "Total weight:", sum(wei)

    plot_hist(wei, xlabel="Time of travel between navpoints", title="", bins=bins, rep=rep, suffix="wei")