def difference_stuff(): prev = CTMStaticProblem.load("data/jdr_with_state.json") final = CTMStaticProblem.load("data/data_fixed_again.json") latex_figure() pylab.hist( [ abs(p.state.flow - f.state.flow) / (p.state.flow) for p, f in [ (prev.link_by_name(lid), final.link_by_name(lid)) for lid in set([link.name for link in prev.get_links()]) ] ], bins=20, ) pylab.title("Relative change in link flow input") pylab.xlabel("Relative change (-)") pylab.ylabel("Count (-)") pylab.savefig("figures/data_fixer_difference_flow.pdf") latex_figure() pylab.hist( [ abs(p.state.density - f.state.density) / (p.state.density) for p, f in [ (prev.link_by_name(lid), final.link_by_name(lid)) for lid in set([link.name for link in prev.get_links()]) ] ], bins=20, ) pylab.title("Relative change in link density input") pylab.xlabel("Relative change (-)") pylab.ylabel("Count (-)") pylab.savefig("figures/data_fixer_difference_density.pdf") routes = [route.name() for route in prev.all_routes()] latex_figure() pylab.hist( [ abs(prev.route_travel_time(route) - final.route_travel_time(route)) / prev.route_travel_time(route) for route in routes ], bins=20, range=(0, 0.3), ) pylab.title("Relative change in route travel times after data preparation") pylab.xlabel("Relative change (-)") pylab.ylabel("Count (-)") pylab.savefig("figures/data_fixer_difference_route.pdf")
def network_stats(): net = CTMStaticProblem.load("data/data_fixed_again.json") net.cache_props() print "n_links", len(net.get_links()) print "sources", len(net.sources) print "sinks", len(net.sinks) print "routes", len(net.all_routes()) print "length", sum(link.l for link in net.get_links()), "meters"
def nash_comparison(): net = CTMStaticProblem.load("data/data_fixed.json") net.cache_props() for routes in net.od_routes.itervalues(): if len(routes) > 1: print "" tts = sorted(net.route_travel_time(route) for route in routes) print tts[1] - tts[0], tts[:2]
def congestion_level(): net = CTMStaticProblem.load("data/data_fixed_again.json") net.cache_props() latex_figure() pylab.hist([link.congestion_level() for link in net.get_links()], bins=50, range=(0, 1), normed=True) pylab.title("Congestion levels of input data") pylab.xlabel("Congestion level (-)") pylab.ylabel("Normalized counts (-)") pylab.savefig("figures/congestion_level.pdf")
def n_routes(): net = CTMStaticProblem.load("data/data_fixed.json") net.cache_props() latex_figure() pylab.hist([len(routes) for routes in net.od_routes.itervalues()], bins=7, align="mid", range=(-0.5, 6.5)) pylab.title("Number of available routes between o-d pairs") pylab.xlabel("Number of routes between o-d pair (-)") pylab.ylabel("Count (-)") pylab.savefig("figures/available_routes.pdf")
def combine_data(): with open("data/asdf16", "r") as fn: data = json.load(fn) net = CTMStaticProblem.load("data/jdr_peninsula_fixed.json") lids_data = set(map(int, data.keys())) lids_net = set([int(link.name) for link in net.get_links()]) print len(lids_data.intersection(lids_net)) for lid, rec in data.iteritems(): link = net.link_by_name(lid) link.state.flow = rec["flow"] link.state.density = rec["density"] net.dump("data/jdr_with_state.json")
def congested_region(): net = CTMStaticProblem.load("data/data_fixed_again.json") print float(len([link for link in net.get_links() if link.state.density > link.fd.rho_crit()])) / len( net.get_links() )
def checker(): net = CTMStaticProblem.load("data/jdr_with_state.json") for link in net.get_links(): print link.state