def main(): usage = """usage: python gdb_import_gtfs.py [options] <graphdb_filename> <gtfsdb_filename> <agency_id>""" parser = OptionParser(usage=usage) parser.add_option("-n", "--namespace", dest="namespace", default="0", help="agency namespace") parser.add_option("-m", "--maxtrips", dest="maxtrips", default=None, help="maximum number of trips to load") (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] agency_id = args[2] print "importing agency '%s' from gtfsdb '%s' into graphdb '%s'"%(agency_id, gtfsdb_filename, graphdb_filename) gtfsdb = GTFSDatabase( gtfsdb_filename ) gdb = GraphDatabase( graphdb_filename, overwrite=False ) maxtrips = int(options.maxtrips) if options.maxtrips else None gdb_load_gtfsdb_to_boardalight(gdb, options.namespace, gtfsdb, agency_id, gdb.get_cursor(), maxtrips=maxtrips) gdb.commit() print "done"
def main(): usage = """usage: python gdb_link_osm_gtfs.py <graphdb_filename> <osmdb_filename> <gtfsdb_filename>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] osmdb_filename = args[1] gtfsdb_filename = args[2] gtfsdb = GTFSDatabase( gtfsdb_filename ) osmdb = OSMDB( osmdb_filename ) gdb = GraphDatabase( graphdb_filename ) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate( gtfsdb.stops() ): print "%d/%d"%(i,n_stops) nd_id, nd_lat, nd_lon, nd_dist = osmdb.nearest_node( stop_lat, stop_lon ) station_vertex_id = "sta-%s"%stop_id osm_vertex_id = "osm-%s"%nd_id print station_vertex_id, osm_vertex_id gdb.add_edge( station_vertex_id, osm_vertex_id, Link() ) gdb.add_edge( osm_vertex_id, station_vertex_id, Link() )
def main(): usage = """usage: python gdb_link_gtfs_gtfs.py <graphdb_filename> <gtfsdb_filename> <range>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] range = float(args[2]) gtfsdb = GTFSDatabase( gtfsdb_filename ) gdb = GraphDatabase( graphdb_filename ) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate( gtfsdb.stops() ): print "%d/%d %s"%(i,n_stops,stop_id), station_vertex_id = "sta-%s"%stop_id for link_stop_id, link_stop_name, link_stop_lat, link_stop_lon in gtfsdb.nearby_stops( stop_lat, stop_lon, range ): if link_stop_id == stop_id: continue print ".", link_length = vincenty( stop_lat, stop_lon, link_stop_lat, link_stop_lon) link_station_vertex_id = "sta-%s"%link_stop_id gdb.add_edge( station_vertex_id, link_station_vertex_id, Street("link", link_length) ) print ""
def main(): usage = """usage: python gdb_import_gtfs.py [options] <graphdb_filename> <gtfsdb_filename> [<agency_id>]""" parser = OptionParser(usage=usage) parser.add_option("-n", "--namespace", dest="namespace", default="0", help="agency namespace") parser.add_option("-m", "--maxtrips", dest="maxtrips", default=None, help="maximum number of trips to load") parser.add_option("-d", "--date", dest="sample_date", default=None, help="only load transit running on a given day. YYYYMMDD" ) (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] agency_id = args[2] if len(args)==3 else None print "importing from gtfsdb '%s' into graphdb '%s'"%(gtfsdb_filename, graphdb_filename) gtfsdb = GTFSDatabase( gtfsdb_filename ) gdb = GraphDatabase( graphdb_filename, overwrite=False ) maxtrips = int(options.maxtrips) if options.maxtrips else None gdb_load_gtfsdb( gdb, options.namespace, gtfsdb, gdb.get_cursor(), agency_id, maxtrips=maxtrips, sample_date=options.sample_date) gdb.commit() print "done"
def process_transit_graph(graphdb_filename, gtfsdb_filenames, osmdb_filename=None, profiledb_filename=None, agency_id=None, link_stations=False, slogs={}): g = Graph() if profiledb_filename: print( "Opening ProfileDB '%s'"%profiledb_filename ) profiledb = ProfileDB( profiledb_filename ) else: print( "No ProfileDB supplied" ) profiledb = None if osmdb_filename: # Load osmdb =============================== print( "Opening OSM-DB '%s'"%osmdb_filename ) osmdb = OSMDB( osmdb_filename ) compiler.load_streets_to_graph( g, osmdb, profiledb, slogs, reporter=sys.stdout ) # Load gtfsdb ============================== for i, gtfsdb_filename in enumerate(gtfsdb_filenames): gtfsdb = GTFSDatabase( gtfsdb_filename ) service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, str(i), gtfsdb, agency_id=agency_id, service_ids=service_ids) if osmdb_filename: compiler.load_transit_street_links_to_graph( g, osmdb, gtfsdb, reporter=sys.stdout ) if link_stations: compiler.link_nearby_stops( g, gtfsdb ) # Export to graphdb ======================== graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def handle(self, *args, **options): self.stdout.write('Loading boston.osmdb\n') self.nodedb = osmdb.OSMDB(settings.DATA_DIR + '/boston.osmdb') self.stdout.write('Importing Boston street network...\n') gdb = GraphDatabase(settings.DATA_DIR + '/boston.gdb') self.graph = gdb.incarnate() self.stdout.write('Importing trip network...\n') tripdb = osmdb.OSMDB(settings.DATA_DIR + '/trip_data.db') batchgeom = [] count = 0 # For each station for tnode in tripdb.nodes(): lat1 = float(tnode[2]) lng1 = float(tnode[3]) # get all trips departing this station cursor = tripdb.get_cursor() tedges = cursor.execute("select * from edges where start_nd = ?", [tnode[0]]) # For each trip for tedge in tedges: if tedge[3] == '': if int(options['verbosity']) > 1: self.stdout.write( 'Start and end nodes are the same.\n') continue dnode = tripdb.node(tedge[3]) lat2 = float(dnode[2]) lng2 = float(dnode[3]) dnode = None if lat2 == lat1 and lng2 == lng1: continue geom = self._spt(lat1, lng1, lat2, lng2) count += 1 batchgeom.append(( count, geom, )) if len(batchgeom) >= options['number']: self.dropfile(batchgeom, count, **options) batchgeom = [] geom = None # Don't keep a history of the execute tranactions tedges = None cursor = None
def importGtfsWrapper(gtfs_filename, db_conn_string): gdb = GraphDatabase( db_conn_string, overwrite=False ) gtfsdb = GTFSDatabase( db_conn_string, overwrite=True ) gtfsdb.load_gtfs( gtfs_filename ) gdb_load_gtfsdb( gdb, 1, gtfsdb, gdb.get_cursor())
def __init__(self, graphdb_filename, pgosmdb_handle, pggtfsdb_handle, event_dispatch): graphdb = GraphDatabase( graphdb_filename ) self.graph = graphdb.incarnate() self.graph.num_agencies = 3 self.graph.numagencies = self.graph.num_agencies self.event_dispatch = event_dispatch self.pgosmdb = pgosmdb_handle self.pggtfsdb = pggtfsdb_handle self.two_way_routing = True
def handle(self, *args, **options): self.stdout.write('Loading boston.osmdb\n') self.nodedb = osmdb.OSMDB(settings.DATA_DIR+'/boston.osmdb') self.stdout.write('Importing Boston street network...\n') gdb = GraphDatabase(settings.DATA_DIR+'/boston.gdb') self.graph = gdb.incarnate() self.stdout.write('Importing trip network...\n') tripdb = osmdb.OSMDB(settings.DATA_DIR+'/trip_data.db') batchgeom = [] count = 0 # For each station for tnode in tripdb.nodes(): lat1 = float(tnode[2]) lng1 = float(tnode[3]) # get all trips departing this station cursor = tripdb.get_cursor() tedges = cursor.execute("select * from edges where start_nd = ?", [tnode[0]]) # For each trip for tedge in tedges: if tedge[3] == '': if int(options['verbosity']) > 1: self.stdout.write('Start and end nodes are the same.\n') continue dnode = tripdb.node(tedge[3]) lat2 = float(dnode[2]) lng2 = float(dnode[3]) dnode = None if lat2 == lat1 and lng2 == lng1: continue geom = self._spt(lat1, lng1, lat2, lng2) count += 1 batchgeom.append((count,geom,)) if len(batchgeom) >= options['number']: self.dropfile(batchgeom, count, **options) batchgeom = [] geom = None # Don't keep a history of the execute tranactions tedges = None cursor = None
def __init__(self, graphdb_filename, vertex_events, edge_events, vertex_reverse_geocoders): if os.path.exists("%s.gbin" % graphdb_filename): self.graph = Graph() self.graph.deserialize(graphdb_filename, os.path.exists("%s.gmm" % graphdb_filename)) else: graphdb = GraphDatabase( graphdb_filename ) self.graph = graphdb.incarnate() self.vertex_events = vertex_events self.edge_events = edge_events self.vertex_reverse_geocoders = vertex_reverse_geocoders
def __init__(self, graphdb_filename, vertex_events, edge_events, vertex_reverse_geocoders, config={}): graphdb = GraphDatabase(graphdb_filename) self.graph = graphdb.incarnate() self.vertex_events = vertex_events self.edge_events = edge_events self.vertex_reverse_geocoders = vertex_reverse_geocoders
def process_transit_graph(gtfsdb_filename, agency_id, graphdb_filename, link=False): gtfsdb = GTFSDatabase( gtfsdb_filename ) g = Graph() service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, gtfsdb, agency_id=agency_id, service_ids=service_ids) if link: compiler.link_nearby_stops( g, gtfsdb ) graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def process_street_graph(): OSMDB_FILENAME = "ext/osm/bartarea.sqlite" GRAPHDB_FILENAME = "bartstreets.db" print( "Opening OSM-DB '%s'"%OSMDB_FILENAME ) osmdb = OSMDB( OSMDB_FILENAME ) g = Graph() compiler.load_streets_to_graph( g, osmdb, sys.stdout ) graphdb = GraphDatabase( GRAPHDB_FILENAME, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def main(graph_db, gtfs_db): graphdb = GraphDatabase( graph_db ) gtfsdb = GTFSDatabase( gtfs_db ) print "loading existing graph" full_graph = graphdb.incarnate() print "copying relevant vertices and edges from full graph" min_graph = Graph() copy_relevant_elements( min_graph, full_graph ) print "adding minimum-time transit trip edges" add_min_transit( min_graph, gtfsdb ) print "writing out new graph to database" min_graphdb = GraphDatabase( 'min_graph.gdb', overwrite=True ) min_graphdb.populate(min_graph) print "DONE." sys.exit(0)
def __init__(self, settings_filename): settings = yaml.load( open( settings_filename ) ) self.home_point = settings['center'] # create cache of osm-node positions self.osmdb = OSMDB( settings['osmdb_filename'] ) self.gtfsdb = GTFSDatabase( settings['gtfsdb_filename'] ) self.port = settings['port'] self.node_positions = {} self.index = Rtree() for node_id, tags, lat, lon in self.osmdb.nodes(): self.node_positions[node_id] = (lon,lat) self.index.add( int(node_id), (lon,lat,lon,lat) ) # incarnate graph from graphdb graphdb = GraphDatabase( settings['graphdb_filename'] ) self.graph = graphdb.incarnate()
def main(): usage = """usage: python gs_serialize.py [options] <basename> <graphdb_filename> """ parser = OptionParser(usage=usage) parser.add_option("-m", "--memmap", action="store_true", dest="memmap", default=False, help="Create a memmap serialized file.") (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit(-1) basename, graphdb_filename = args db = GraphDatabase(graphdb_filename) g = db.incarnate() g.serialize(basename, options.memmap) print "done"
def process_street_graph(osmdb_filename, graphdb_filename, profiledb_filename, slogs={}): OSMDB_FILENAME = "ext/osm/bartarea.sqlite" GRAPHDB_FILENAME = "bartstreets.db" print( "Opening OSM-DB '%s'"%osmdb_filename ) osmdb = OSMDB( osmdb_filename ) if profiledb_filename: print( "Opening ProfileDB '%s'"%profiledb_filename ) profiledb = ProfileDB( profiledb_filename ) else: print( "No ProfileDB supplied" ) profiledb = None g = Graph() compiler.load_streets_to_graph( g, osmdb, profiledb, slogs, reporter=sys.stdout ) graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def process_transit_street_graph(graphdb_filename, gtfsdb_filename, osmdb_filename, agency_id=None): g = Graph() # Load osmdb =============================== print( "Opening OSM-DB '%s'"%osmdb_filename ) osmdb = OSMDB( osmdb_filename ) compiler.load_streets_to_graph( g, osmdb, sys.stdout ) # Load gtfsdb ============================== for i, gtfsdb_filename in enumerate(gtfsdb_filenames): gtfsdb = GTFSDatabase( gtfsdb_filename ) service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, str(i), gtfsdb, agency_id=agency_id, service_ids=service_ids) compiler.load_transit_street_links_to_graph( g, osmdb, gtfsdb, reporter=sys.stdout ) # Export to graphdb ======================== graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def make_native_ch(basename): gdb = GraphDatabase( basename+".gdb" ) gg = gdb.incarnate() wo = WalkOptions() wo.hill_reluctance=20 ch = gg.get_contraction_hierarchies( wo ) chdowndb = GraphDatabase( basename+".down.gdb", overwrite=True ) chdowndb.populate( ch.downgraph, reporter=sys.stdout ) chupdb = GraphDatabase( basename+".up.gdb", overwrite=True ) chupdb.populate( ch.upgraph, reporter=sys.stdout )
def main(): usage = """usage: python gdb_link_gtfs_gtfs.py <graphdb_filename> <gtfsdb_filename> <range>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] range = float(args[2]) gtfsdb = GTFSDatabase(gtfsdb_filename) gdb = GraphDatabase(graphdb_filename) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate(gtfsdb.stops()): print "%d/%d %s" % (i, n_stops, stop_id), station_vertex_id = "sta-%s" % stop_id for link_stop_id, link_stop_name, link_stop_lat, link_stop_lon in gtfsdb.nearby_stops( stop_lat, stop_lon, range): if link_stop_id == stop_id: continue print ".", link_length = vincenty(stop_lat, stop_lon, link_stop_lat, link_stop_lon) link_station_vertex_id = "sta-%s" % link_stop_id gdb.add_edge(station_vertex_id, link_station_vertex_id, Street("link", link_length)) print ""
def main(): usage = """usage: python gdb_import_osm.py <graphdb_filename> <osmdb_filename>""" parser = OptionParser(usage=usage) parser.add_option( "-n", "--namespace", dest="namespace", default="osm", help="prefix all imported vertices with namespace string") parser.add_option( "-s", "--slog", action="append", dest="slog_strings", default=[], help= "specify slog for highway type, in highway_type:slog form. For example, 'motorway:10.5'" ) parser.add_option( "-p", "--profiledb", dest="profiledb_filename", default=None, help="specify profiledb to annotate streets with rise/fall data") (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit(-1) slogs = {} for slog_string in options.slog_strings: highway_type, slog_penalty = slog_string.split(":") slogs[highway_type] = float(slog_penalty) print "slog values: %s" % slogs graphdb_filename = args[0] osmdb_filename = args[1] print "importing osmdb '%s' into graphdb '%s'" % (osmdb_filename, graphdb_filename) profiledb = ProfileDB( options.profiledb_filename) if options.profiledb_filename else None osmdb = OSMDB(osmdb_filename) gdb = GraphDatabase(graphdb_filename, overwrite=False) gdb_import_osm(gdb, osmdb, options.namespace, slogs, profiledb) print "done"
def reincarnate_ch(basename): chdowndb = GraphDatabase( basename+".down.gdb" ) chupdb = GraphDatabase( basename+".up.gdb" ) upgg = chupdb.incarnate() downgg = chdowndb.incarnate() return ContractionHierarchy(upgg, downgg)
def test_ch(self): g = Graph() g.add_vertex( "A" ) g.add_vertex( "B" ) g.add_vertex( "C" ) g.add_edge( "A", "B", Street( "foo", 10 ) ) g.add_edge( "B", "C", Street( "bar", 10 ) ) g.add_edge( "C", "A", Street( "baz", 10 ) ) wo = WalkOptions() ch = g.get_contraction_hierarchies(wo) gdb_file = os.path.dirname(__file__) + "unit_test.db" gdb = GraphDatabase( gdb_file ) gdb.populate( ch.upgraph ) laz = gdb.incarnate() combo = laz.edges[1] self.assertEqual( combo.payload.get(0).name, "baz" ) self.assertEqual( combo.payload.get(1).name, "foo" ) os.remove( gdb_file )
def test_ch(self): g = Graph() g.add_vertex("A") g.add_vertex("B") g.add_vertex("C") g.add_edge("A", "B", Street("foo", 10)) g.add_edge("B", "C", Street("bar", 10)) g.add_edge("C", "A", Street("baz", 10)) wo = WalkOptions() ch = g.get_contraction_hierarchies(wo) gdb_file = os.path.dirname(__file__) + "unit_test.db" gdb = GraphDatabase(gdb_file) gdb.populate(ch.upgraph) laz = gdb.incarnate() combo = laz.edges[1] self.assertEqual(combo.payload.get(0).name, "baz") self.assertEqual(combo.payload.get(1).name, "foo") os.remove(gdb_file)
def link_osm_gtfs(db_conn_string, max_link_dist=150): conn = psycopg2.connect(db_conn_string) cursor = conn.cursor() gdb = GraphDatabase(db_conn_string) cursor.execute('SELECT stop_id, stop_lat, stop_lon FROM gtfs_stops') for i, (s_label, s_lat, s_lon) in enumerate(cursor.fetchall()): j = False range = 0.05 # might not be the best number cursor.execute('''SELECT id, lat, lon FROM osm_nodes WHERE endnode_refs > 1 AND lat > %s AND lat < %s AND lon > %s AND lon < %s''', ( s_lat-range, s_lat+range, s_lon-range, s_lon+range )) nodes = cursor.fetchall() dists = [] for n_label, n_lat, n_lon in nodes: dists.append( distance(s_lat, s_lon, n_lat, n_lon) ) for d in dists: if d < max_link_dist: j = True n_label, n_lat, n_lon = nodes[dists.index(d)] gdb.add_edge('sta-'+s_label, 'osm-'+n_label, Street('gtfs-osm link', d)) gdb.add_edge('osm-'+n_label, 'sta-'+s_label, Street('gtfs-osm link', d)) if not j and dists: # fallback mode d = min(dists) n_label, n_lat, n_lon = nodes[dists.index(d)] gdb.add_edge('sta-'+s_label, 'osm-'+n_label, Street('gtfs-osm link', d)) gdb.add_edge('osm-'+n_label, 'sta-'+s_label, Street('gtfs-osm link', d)) if not dists: print(colored('WARNING: failed linking %s! (%s, %s)' % (s_label, s_lat, s_lon), 'yellow')) gdb.commit() conn.commit() cursor.close()
def main(): if len(argv) < 2: print "usage: python import_ned.py graphdb_filename profiledb_filename" return graphdb_filename = argv[1] profiledb_filename = argv[2] gdb = GraphDatabase(graphdb_filename) profiledb = ProfileDB(profiledb_filename) n = gdb.num_edges() for i, (oid, vertex1, vertex2, edge) in enumerate(list(gdb.all_edges(include_oid=True))): if i % 500 == 0: print "%s/%s" % (i, n) if isinstance(edge, Street): rise, fall = get_rise_and_fall(profiledb.get(edge.name)) edge.rise = rise edge.fall = fall gdb.remove_edge(oid) gdb.add_edge(vertex1, vertex2, edge)
def main(): usage = """usage: python new_gdb.py [options] <graphdb_filename> """ parser = OptionParser(usage=usage) parser.add_option("-o", "--overwrite", action="store_true", dest="overwrite", default=False, help="overwrite any existing database") (options, args) = parser.parse_args() if len(args) != 1: parser.print_help() exit(-1) graphdb_filename = args[0] if not os.path.exists(graphdb_filename) or options.overwrite: print "Creating graph database '%s'" % graphdb_filename graphdb = GraphDatabase(graphdb_filename, overwrite=options.overwrite) else: print "Graph database '%s' already exists. Use -o to overwrite" % graphdb_filename
def main(): if len(argv) < 2: print "usage: python import_ned.py graphdb_filename profiledb_filename" return graphdb_filename = argv[1] profiledb_filename = argv[2] gdb = GraphDatabase( graphdb_filename ) profiledb = ProfileDB( profiledb_filename ) n = gdb.num_edges() for i, (oid, vertex1, vertex2, edge) in enumerate( list(gdb.all_edges(include_oid=True)) ): if i%500==0: print "%s/%s"%(i,n) if isinstance( edge, Street ): rise, fall = get_rise_and_fall( profiledb.get( edge.name ) ) edge.rise = rise edge.fall = fall gdb.remove_edge( oid ) gdb.add_edge( vertex1, vertex2, edge )
def __init__(self, graphdb_filename): self.graphdb = GraphDatabase( graphdb_filename )
class GraphCrawler(Servable): def __init__(self, graphdb_filename): self.graphdb = GraphDatabase( graphdb_filename ) def vertices(self, like=None): if like: return "\n".join( ["<a href=\"/vertex?label="%s"\">%s</a><br>"%(vl[0], vl[0]) for vl in self.graphdb.execute("SELECT label from vertices where label like ? order by label", (like,)) ]) else: return "\n".join( ["<a href=\"/vertex?label="%s"\">%s</a><br>"%(vl[0], vl[0]) for vl in self.graphdb.execute("SELECT label from vertices order by label") ]) vertices.mime = "text/html" def vertex(self, label, currtime=None, hill_reluctance=1.5, walking_speed=0.85): currtime = currtime or int(time.time()) ret = [] ret.append( "<h1>%s</h1>"%label ) wo = WalkOptions() ret.append( "<h3>walk options</h3>" ) ret.append( "<li>transfer_penalty: %s</li>"%wo.transfer_penalty ) ret.append( "<li>turn_penalty: %s</li>"%wo.turn_penalty ) ret.append( "<li>walking_speed: %s</li>"%wo.walking_speed ) ret.append( "<li>walking_reluctance: %s</li>"%wo.walking_reluctance ) ret.append( "<li>uphill_slowness: %s</li>"%wo.uphill_slowness ) ret.append( "<li>downhill_fastness: %s</li>"%wo.downhill_fastness ) ret.append( "<li>hill_reluctance: %s</li>"%wo.hill_reluctance ) ret.append( "<li>max_walk: %s</li>"%wo.max_walk ) ret.append( "<li>walking_overage: %s</li>"%wo.walking_overage ) ret.append( "<h3>incoming from:</h3>" ) for i, (vertex1, vertex2, edgetype) in enumerate( self.graphdb.all_incoming( label ) ): s1 = State(1,int(currtime)) wo = WalkOptions() wo.hill_reluctance=hill_reluctance wo.walking_speed=walking_speed s0 = edgetype.walk_back( s1, wo ) if s0: toterm = "<a href=\"/vertex?label="%s"&currtime=%d\">%s@%d</a>"%(vertex1, s0.time, vertex1, s1.time) else: toterm = "<a href=\"/vertex?label="%s"\">%s</a>"%(vertex1, vertex1) ret.append( "%s<br><pre> via %s (<a href=\"/incoming?label="%s"&edgenum=%d\">details</a>)</pre>"%(toterm, cgi.escape(repr(edgetype)), vertex2, i) ) if s0: ret.append( "<pre> %s</pre>"%cgi.escape(str(s0)) ) ret.append( "<h3>outgoing to:</h3>" ) for i, (vertex1, vertex2, edgetype) in enumerate( self.graphdb.all_outgoing( label ) ): s0 = State(1,int(currtime)) wo = WalkOptions() wo.hill_reluctance=hill_reluctance wo.walking_speed=walking_speed s1 = edgetype.walk( s0, wo ) if s1: toterm = "<a href=\"/vertex?label="%s"&currtime=%d\">%s@%d</a>"%(vertex2, s1.time, vertex2, s1.time) else: toterm = "<a href=\"/vertex?label="%s"\">%s</a>"%(vertex2, vertex2) ret.append( "%s<br><pre> via %s (<a href=\"/outgoing?label="%s"&edgenum=%d\">details</a>)</pre>"%(toterm, cgi.escape(repr(edgetype)), vertex1, i) ) if s1: ret.append( "<pre> %s</pre>"%cgi.escape(str(s1)) ) wo.destroy() return "".join(ret) vertex.mime = "text/html" def outgoing(self, label, edgenum): all_outgoing = list( self.graphdb.all_outgoing( label ) ) fromv, tov, edge = all_outgoing[edgenum] return edge.expound() def incoming(self, label, edgenum): all_incoming = list( self.graphdb.all_incoming( label ) ) fromv, tov, edge = all_incoming[edgenum] return edge.expound() def str(self): return str(self.graphdb)
from graphserver.core import Graph, Street, State, WalkOptions from PIL import Image from multiprocessing import Pool os.environ['TZ'] = 'US/Pacific' time.tzset() t0s = "Mon May 17 08:50:00 2010" t0t = time.strptime(t0s) d0s = time.strftime('%a %b %d %Y', t0t) t0 = time.mktime(t0t) print 'search date: ', d0s print 'search time: ', time.ctime(t0), t0 gtfsdb = GTFSDatabase ('./trimet.gtfsdb') gdb = GraphDatabase ('./test.gdb' ) osmdb = OSMDB ('./testgrid.osmdb' ) g = gdb.incarnate () # FOOT - would be better if i could specify 0 boardings not 0 transfers wo = WalkOptions() wo.max_walk = 2000 wo.walking_overage = 0.0 wo.walking_speed = 1.0 # trimet uses 0.03 miles / 1 minute - but it uses straight line distance as well wo.transfer_penalty = 99999 wo.walking_reluctance = 1 wo.max_transfers = 0 # make much higher? wo.transfer_slack = 60 * 5 wo_foot = wo
def __init__(self, graphdb_filename, vertex_events, edge_events, vertex_reverse_geocoders): graphdb = GraphDatabase( graphdb_filename ) self.graph = graphdb.incarnate() self.vertex_events = vertex_events self.edge_events = edge_events self.vertex_reverse_geocoders = vertex_reverse_geocoders
def __init__(self, graphdb_filename, event_dispatch): graphdb = GraphDatabase( graphdb_filename ) self.graph = graphdb.incarnate() self.event_dispatch = event_dispatch
def main(count): print 'Loading boston.osmdb' nodedb = osmdb.OSMDB(DATA_DIR + 'boston.osmdb') print 'Importing Boston street network...' gdb = GraphDatabase(DATA_DIR + 'boston.gdb') graph = gdb.incarnate() print 'Importing trip network...' tripdb = osmdb.OSMDB(DATA_DIR + 'trip_data.db') stime = time() wo = WalkOptions() cursor = tripdb.get_cursor() tripcount = 0 # For each station for tnode in tripdb.nodes(): lat1 = float(tnode[2]) lng1 = float(tnode[3]) # find origin node on the street network orig = nodedb.nearest_node(lat1, lng1) # get all trips departing this station tedges = cursor.execute("select * from edges where start_nd = ?", [tnode[0]]) # For each trip for tedge in tedges: dnode = tripdb.node(tedge[3]) lat2 = float(dnode[2]) lng2 = float(dnode[3]) if lat2 == lat1 and lng2 == lng1: # Do not route something that ends where it begins print 'Begin and end node are the same.' else: # find the destination node on the street network dest = nodedb.nearest_node(lat2, lng2) # route! spt = graph.shortest_path_tree('osm-' + orig[0], 'osm-' + dest[0], State(1, stime), wo) # get the path vertices and edges pvert, pedges = spt.path('osm-' + dest[0]) # convert the results to geometries allgeom = [] for e in pedges: dbedge = nodedb.edge(e.payload.name) if e.payload.reverse_of_source: allgeom.extend(reversed(dbedge[5])) else: allgeom.extend(dbedge[5]) print allgeom tripcount += 1 if tripcount >= count: break if tripcount >= count: break
def __init__(self, graphdb_filename): self.graphdb = GraphDatabase(graphdb_filename)
def make_native_ch(basename): gdb = GraphDatabase(basename + ".gdb") gg = gdb.incarnate() wo = WalkOptions() wo.hill_reluctance = 20 ch = gg.get_contraction_hierarchies(wo) chdowndb = GraphDatabase(basename + ".down.gdb", overwrite=True) chdowndb.populate(ch.downgraph, reporter=sys.stdout) chupdb = GraphDatabase(basename + ".up.gdb", overwrite=True) chupdb.populate(ch.upgraph, reporter=sys.stdout)
def test_basic(self): g = Graph() g.add_vertex("A") g.add_vertex("B") g.add_edge("A", "B", Link()) g.add_edge("A", "B", Street("foo", 20.0)) gdb_file = os.path.dirname(__file__) + "unit_test.db" if os.path.exists(gdb_file): os.remove(gdb_file) gdb = GraphDatabase(gdb_file) gdb.populate(g) list(gdb.execute("select * from resources")) assert "A" in list(gdb.all_vertex_labels()) assert "B" in list(gdb.all_vertex_labels()) assert glen(gdb.all_edges()) == 2 assert glen(gdb.all_outgoing("A")) == 2 assert glen(gdb.all_outgoing("B")) == 0 assert glen(gdb.all_incoming("A")) == 0 assert glen(gdb.all_incoming("B")) == 2 assert glen(gdb.resources()) == 0 assert gdb.num_vertices() == 2 assert gdb.num_edges() == 2 g.destroy() g = gdb.incarnate() list(gdb.execute("select * from resources")) assert "A" in list(gdb.all_vertex_labels()) assert "B" in list(gdb.all_vertex_labels()) assert glen(gdb.all_edges()) == 2 assert glen(gdb.all_outgoing("A")) == 2 assert glen(gdb.all_outgoing("B")) == 0 assert glen(gdb.all_incoming("A")) == 0 assert glen(gdb.all_incoming("B")) == 2 assert glen(gdb.resources()) == 0 assert gdb.num_vertices() == 2 assert gdb.num_edges() == 2 os.remove( gdb_file )
class GraphCrawler(Servable): def __init__(self, graphdb_filename): self.graphdb = GraphDatabase(graphdb_filename) def vertices(self, like=None): if like: return "\n".join([ "<a href=\"/vertex?label="%s"\">%s</a><br>" % (vl[0], vl[0]) for vl in self.graphdb.execute( "SELECT label from vertices where label like ? order by label", (like, )) ]) else: return "\n".join([ "<a href=\"/vertex?label="%s"\">%s</a><br>" % (vl[0], vl[0]) for vl in self.graphdb.execute( "SELECT label from vertices order by label") ]) vertices.mime = "text/html" def vertex(self, label, currtime=None, hill_reluctance=1.5, walking_speed=0.85): currtime = currtime or int(time.time()) ret = [] ret.append("<h1>%s</h1>" % label) wo = WalkOptions() ret.append("<h3>walk options</h3>") ret.append("<li>transfer_penalty: %s</li>" % wo.transfer_penalty) ret.append("<li>turn_penalty: %s</li>" % wo.turn_penalty) ret.append("<li>walking_speed: %s</li>" % wo.walking_speed) ret.append("<li>walking_reluctance: %s</li>" % wo.walking_reluctance) ret.append("<li>uphill_slowness: %s</li>" % wo.uphill_slowness) ret.append("<li>downhill_fastness: %s</li>" % wo.downhill_fastness) ret.append("<li>hill_reluctance: %s</li>" % wo.hill_reluctance) ret.append("<li>max_walk: %s</li>" % wo.max_walk) ret.append("<li>walking_overage: %s</li>" % wo.walking_overage) ret.append("<h3>incoming from:</h3>") for i, (vertex1, vertex2, edgetype) in enumerate(self.graphdb.all_incoming(label)): s1 = State(1, int(currtime)) wo = WalkOptions() wo.hill_reluctance = hill_reluctance wo.walking_speed = walking_speed s0 = edgetype.walk_back(s1, wo) if s0: toterm = "<a href=\"/vertex?label="%s"&currtime=%d\">%s@%d</a>" % ( vertex1, s0.time, vertex1, s1.time) else: toterm = "<a href=\"/vertex?label="%s"\">%s</a>" % ( vertex1, vertex1) ret.append( "%s<br><pre> via %s (<a href=\"/incoming?label="%s"&edgenum=%d\">details</a>)</pre>" % (toterm, cgi.escape(repr(edgetype)), vertex2, i)) if s0: ret.append("<pre> %s</pre>" % cgi.escape(str(s0))) ret.append("<h3>outgoing to:</h3>") for i, (vertex1, vertex2, edgetype) in enumerate(self.graphdb.all_outgoing(label)): s0 = State(1, int(currtime)) wo = WalkOptions() wo.hill_reluctance = hill_reluctance wo.walking_speed = walking_speed s1 = edgetype.walk(s0, wo) if s1: toterm = "<a href=\"/vertex?label="%s"&currtime=%d\">%s@%d</a>" % ( vertex2, s1.time, vertex2, s1.time) else: toterm = "<a href=\"/vertex?label="%s"\">%s</a>" % ( vertex2, vertex2) ret.append( "%s<br><pre> via %s (<a href=\"/outgoing?label="%s"&edgenum=%d\">details</a>)</pre>" % (toterm, cgi.escape(repr(edgetype)), vertex1, i)) if s1: ret.append("<pre> %s</pre>" % cgi.escape(str(s1))) wo.destroy() return "".join(ret) vertex.mime = "text/html" def outgoing(self, label, edgenum): all_outgoing = list(self.graphdb.all_outgoing(label)) fromv, tov, edge = all_outgoing[edgenum] return edge.expound() def incoming(self, label, edgenum): all_incoming = list(self.graphdb.all_incoming(label)) fromv, tov, edge = all_incoming[edgenum] return edge.expound() def str(self): return str(self.graphdb)
import json except ImportError: import simplejson as json graphdb_filename = "sanfrancisco.gdb" osmdb_filename = "sanfrancisco.osmdb" munistops_filename = "munistops.json" munistopsdb_filename = "munistops.db" starttime = 0 lat_adj_tolerance = 0.002 lon_adj_tolerance = 0.002 walk_adj_tolerance = 180 # 3 minutes munistops = json.load(file(munistops_filename)) graphdb = GraphDatabase( graphdb_filename ) graph = graphdb.incarnate() osmdb = OSMDB( osmdb_filename ) try: os.remove(munistopsdb_filename) except OSError: pass munistopsdb = sqlite3.connect(munistopsdb_filename) def main(): set_up_munistopsdb_schema() pairs = 0 for start_stop_id, end_stop_id in adjacent_muni_stops_by_coords():
def main(): from optparse import OptionParser usage = """Usage: python gst_process <configuration file> See the documentation for layout of the config file.""" parser = OptionParser(usage=usage) parser.add_option("-b", "--import-base", action="store_true", help="imports GTFS and OSM data into the database", dest="import_base", default=False) parser.add_option("-r", "--import-routes", action="store_true", help="imports routing data into the database", dest="import_routes", default=False) parser.add_option("-i", "--import-all", action="store_true", help="imports GTFS, OSM and routing data into the database", dest="import_all", default=False) parser.add_option("-c", "--calculate", action="store_true", help="calculates shortest paths", dest="calculate", default=False) parser.add_option("-e", "--export", action="store_true", help="exports the calculted paths as CSV-files", dest="export", default=False) (options, args) = parser.parse_args() if DEBUG: print(options) if len(args) != 1: parser.print_help() exit(-1) try: configuration, psql_connect_string = read_config(args[0]) except: print(colored('ERROR: failed reading the configuration file', 'red')) if DEBUG: raise parser.print_help() exit(-1) valide = validate_input(configuration, psql_connect_string, options) if not valide: parser.print_help() exit(-1) graph = None if options.import_base or options.import_all: print('Importing base data...') build_base_data(psql_connect_string, configuration['osm-data'], configuration['transit-feed']) if options.import_routes or options.import_all: print('Importing routing data...') graph = GraphDatabase(psql_connect_string).incarnate() build_route_data(graph, psql_connect_string, configuration['times'], configuration['points'], configuration['routes']) if options.calculate: print('Calculating shortest paths...') # only create tables if some importing was done create_tables = options.import_all or options.import_base or options.import_routes if not graph: graph = GraphDatabase(psql_connect_string).incarnate() start = time.time() calculate_routes(graph, psql_connect_string, configuration, num_processes=configuration['parallel-calculations']) cprint('total calculation time: %s' % utils.seconds_time_string(time.time() - start), attrs=['bold']) try: graph.destroy() except: pass if options.export: print('Exporting paths...') export_results(psql_connect_string, configuration['results'], configuration['result-details']) print('DONE')
def __init__(self, ch_basename, osmdb_filename, profiledb_filename): graphdb = GraphDatabase(graphdb_filename) self.osmdb = OSMDB(osmdb_filename) self.profiledb = ProfileDB(profiledb_filename) self.ch = reincarnate_ch(ch_basename) self.shortcut_cache = ShortcutCache(ch_basename + ".scc")
print "Increments lat %f, lon %f" % (lat_increment, lon_increment) print "Geocoder range", geocoder_range print "Using time", dt.ctime() print "Bounding box tlbr: %f, %f, %f, %f" % (lat_start, lon_start, lat_stop, lon_stop) print "Checking %d routes" % (lat_grid * lon_grid) minimum = sys.maxint maximum = -sys.maxint - 1 min_lon = sys.maxint max_lon = -sys.maxint - 1 min_lat = sys.maxint max_lat = -sys.maxint - 1 gdb = GraphDatabase('king.highway.gdb') graph = gdb.incarnate() rows = [] #missing_value = None missing_value = 50000 for i in xrange(0, lat_grid): row = [] print "Row %d" % i lat = lat_start + (i * lat_increment) if lat < min_lat:
def main(): usage = """usage: python gdb_import_osm.py <graphdb_filename> <osmdb_filename>""" parser = OptionParser(usage=usage) parser.add_option( "-n", "--namespace", dest="namespace", default="osm", help="prefix all imported vertices with namespace string") parser.add_option( "-s", "--slog", action="append", dest="slog_strings", default=[], help= "specify slog for highway type, in highway_type:slog form. For example, 'motorway:10.5'" ) parser.add_option( "-p", "--profiledb", dest="profiledb_filename", default=None, help="specify profiledb to annotate streets with rise/fall data") parser.add_option( "-c", "--slog_config", dest="slog_config", default=None, metavar="CONFIG.yaml", help="file containing slog parameters for highways, cycleways, etc") (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit(-1) slogs = {} slog_config = {} if options.slog_config: slog_config = yaml.load(open(options.slog_config).read()) for highway_type, slog_penalty in slog_config.get('slogs', {}).items(): slogs[highway_type] = float(slog_penalty) for slog_string in options.slog_strings: highway_type, slog_penalty = slog_string.split(":") slogs[highway_type] = float(slog_penalty) print "slog values: %s" % slogs slog_config['slogs'] = slogs if slog_config.get('slog_function'): slog_config['slog_function'] = import_object( slog_config['slog_function']) graphdb_filename = args[0] osmdb_filename = args[1] print "importing osmdb '%s' into graphdb '%s'" % (osmdb_filename, graphdb_filename) profiledb = ProfileDB( options.profiledb_filename) if options.profiledb_filename else None osmdb = OSMDB(osmdb_filename) gdb = GraphDatabase(graphdb_filename, overwrite=False) gdb_import_osm(gdb, osmdb, options.namespace, slog_config, profiledb) print "done"
print "Geocoder range", geocoder_range print "Using time", dt.ctime() print "Bounding box tlbr: %f, %f, %f, %f" % (lat_start, lon_start, lat_stop, lon_stop) print "Checking %d routes" % (lat_grid * lon_grid) minimum = sys.maxint maximum = -sys.maxint - 1 min_lon = sys.maxint max_lon = -sys.maxint - 1 min_lat = sys.maxint max_lat = -sys.maxint - 1 gdb = GraphDatabase('king.highway.gdb') graph = gdb.incarnate() rows = [] #missing_value = None missing_value = 50000 for i in xrange(0, lat_grid): row = [] print "Row %d" % i lat = lat_start + (i * lat_increment) if lat < min_lat:
def test_basic(self): g = Graph() g.add_vertex("A") g.add_vertex("B") g.add_edge("A", "B", Link()) g.add_edge("A", "B", Street("foo", 20.0)) gdb_file = os.path.dirname(__file__) + "unit_test.db" if os.path.exists(gdb_file): os.remove(gdb_file) gdb = GraphDatabase(gdb_file) gdb.populate(g) list(gdb.execute("select * from resources")) assert "A" in list(gdb.all_vertex_labels()) assert "B" in list(gdb.all_vertex_labels()) assert glen(gdb.all_edges()) == 2 assert glen(gdb.all_outgoing("A")) == 2 assert glen(gdb.all_outgoing("B")) == 0 assert glen(gdb.all_incoming("A")) == 0 assert glen(gdb.all_incoming("B")) == 2 assert glen(gdb.resources()) == 0 assert gdb.num_vertices() == 2 assert gdb.num_edges() == 2 g.destroy() g = gdb.incarnate() list(gdb.execute("select * from resources")) assert "A" in list(gdb.all_vertex_labels()) assert "B" in list(gdb.all_vertex_labels()) assert glen(gdb.all_edges()) == 2 assert glen(gdb.all_outgoing("A")) == 2 assert glen(gdb.all_outgoing("B")) == 0 assert glen(gdb.all_incoming("A")) == 0 assert glen(gdb.all_incoming("B")) == 2 assert glen(gdb.resources()) == 0 assert gdb.num_vertices() == 2 assert gdb.num_edges() == 2 os.remove(gdb_file)