def main(): usage = """usage: python gdb_link_gtfs_gtfs.py <graphdb_filename> <gtfsdb_filename> <range>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] range = float(args[2]) gtfsdb = GTFSDatabase( gtfsdb_filename ) gdb = GraphDatabase( graphdb_filename ) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate( gtfsdb.stops() ): print "%d/%d %s"%(i,n_stops,stop_id), station_vertex_id = "sta-%s"%stop_id for link_stop_id, link_stop_name, link_stop_lat, link_stop_lon in gtfsdb.nearby_stops( stop_lat, stop_lon, range ): if link_stop_id == stop_id: continue print ".", link_length = vincenty( stop_lat, stop_lon, link_stop_lat, link_stop_lon) link_station_vertex_id = "sta-%s"%link_stop_id gdb.add_edge( station_vertex_id, link_station_vertex_id, Street("link", link_length) ) print ""
def main(): usage = """usage: python gdb_link_osm_gtfs.py <graphdb_filename> <osmdb_filename> <gtfsdb_filename>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] osmdb_filename = args[1] gtfsdb_filename = args[2] gtfsdb = GTFSDatabase( gtfsdb_filename ) osmdb = OSMDB( osmdb_filename ) gdb = GraphDatabase( graphdb_filename ) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate( gtfsdb.stops() ): print "%d/%d"%(i,n_stops) nd_id, nd_lat, nd_lon, nd_dist = osmdb.nearest_node( stop_lat, stop_lon ) station_vertex_id = "sta-%s"%stop_id osm_vertex_id = "osm-%s"%nd_id print station_vertex_id, osm_vertex_id gdb.add_edge( station_vertex_id, osm_vertex_id, Link() ) gdb.add_edge( osm_vertex_id, station_vertex_id, Link() )
def main(): usage = """usage: python dedupe.py <graphdb_filename>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 1: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb = GTFSDatabase( graphdb_filename ) query = """ SELECT count(*), monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date FROM calendar GROUP BY monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date""" duped_periods = gtfsdb.execute( query ) equivilants = [] for count, m,t,w,th,f,s,su,start_date,end_date in duped_periods: # no need to check for dupes if there's only one if count==1: continue #print count, m, t, w, th, f, s, su, start_date, end_date # get service_ids for this dow/start_date/end_date combination service_ids = [x[0] for x in list( gtfsdb.execute( "SELECT service_id FROM calendar where monday=? and tuesday=? and wednesday=? and thursday=? and friday=? and saturday=? and sunday=? and start_date=? and end_date=?", (m,t,w,th,f,s,su,start_date,end_date) ) ) ] # group by service periods with the same set of exceptions exception_set_grouper = {} for service_id in service_ids: exception_set = list(gtfsdb.execute( "SELECT date, exception_type FROM calendar_dates WHERE service_id=?", (service_id,) ) ) exception_set.sort() exception_set = tuple(exception_set) exception_set_grouper[exception_set] = exception_set_grouper.get(exception_set,[]) exception_set_grouper[exception_set].append( service_id ) # extend list of equivilants for i, exception_set_group in enumerate( exception_set_grouper.values() ): equivilants.append( ("%d%d%d%d%d%d%d-%s-%s-%d"%(m,t,w,th,f,s,su,start_date,end_date,i), exception_set_group) ) for new_name, old_names in equivilants: for old_name in old_names: print old_name, new_name c = gtfsdb.conn.cursor() c.execute( "UPDATE calendar SET service_id=? WHERE service_id=?", (new_name, old_name) ) c.execute( "UPDATE calendar_dates SET service_id=? WHERE service_id=?", (new_name, old_name) ) c.execute( "UPDATE trips SET service_id=? WHERE service_id=?", (new_name, old_name) ) gtfsdb.conn.commit() c.close()
class HeadwayBoardEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase(gtfsdb_filename) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): return edge is not None and isinstance(edge.payload, graphserver.core.HeadwayBoard) def __call__(self, vertex1, edge, vertex2, context): event_time = vertex2.state.time trip_id = vertex2.state.trip_id stop_id = vertex1.label.split("-")[-1] route_desc = "-".join( list( self.gtfsdb.execute( "SELECT routes.route_short_name, routes.route_long_name FROM routes, trips WHERE routes.route_id=trips.route_id AND trip_id=?", (trip_id, )))[0]) stop_desc = list( self.gtfsdb.execute( "SELECT stop_name FROM stops WHERE stop_id = ?", (stop_id, )))[0][0] lat, lon = list( self.gtfsdb.execute( "SELECT stop_lat, stop_lon FROM stops WHERE stop_id = ?", (stop_id, )))[0] what = "Board the %s" % route_desc where = stop_desc when = "about %s" % str( TimeHelpers.unix_to_localtime(event_time, self.timezone_name)) geom = (lon, lat) return NarrativeEvent(what, where, when, geom)
class AlightEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase(gtfsdb_filename) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): return edge is not None and isinstance(edge.payload, graphserver.core.TripAlight) def __call__(self, vertex1, edge, vertex2, context): event_time = vertex1.state.time stop_id = vertex2.label.split("-")[-1] stop_desc = list( self.gtfsdb.execute( "SELECT stop_name FROM stops WHERE stop_id = ?", (stop_id, )))[0][0] lat, lon = list( self.gtfsdb.execute( "SELECT stop_lat, stop_lon FROM stops WHERE stop_id = ?", (stop_id, )))[0] what = "Alight" where = stop_desc when = str( TimeHelpers.unix_to_localtime(event_time, self.timezone_name)) geom = (lon, lat) return NarrativeEvent(what, where, when, geom)
def process_transit_graph(graphdb_filename, gtfsdb_filenames, osmdb_filename=None, profiledb_filename=None, agency_id=None, link_stations=False, slogs={}): g = Graph() if profiledb_filename: print( "Opening ProfileDB '%s'"%profiledb_filename ) profiledb = ProfileDB( profiledb_filename ) else: print( "No ProfileDB supplied" ) profiledb = None if osmdb_filename: # Load osmdb =============================== print( "Opening OSM-DB '%s'"%osmdb_filename ) osmdb = OSMDB( osmdb_filename ) compiler.load_streets_to_graph( g, osmdb, profiledb, slogs, reporter=sys.stdout ) # Load gtfsdb ============================== for i, gtfsdb_filename in enumerate(gtfsdb_filenames): gtfsdb = GTFSDatabase( gtfsdb_filename ) service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, str(i), gtfsdb, agency_id=agency_id, service_ids=service_ids) if osmdb_filename: compiler.load_transit_street_links_to_graph( g, osmdb, gtfsdb, reporter=sys.stdout ) if link_stations: compiler.link_nearby_stops( g, gtfsdb ) # Export to graphdb ======================== graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def importGtfsWrapper(gtfs_filename, db_conn_string): gdb = GraphDatabase( db_conn_string, overwrite=False ) gtfsdb = GTFSDatabase( db_conn_string, overwrite=True ) gtfsdb.load_gtfs( gtfs_filename ) gdb_load_gtfsdb( gdb, 1, gtfsdb, gdb.get_cursor())
def process_transit_graph(gtfsdb_filename, agency_id, graphdb_filename, link=False): gtfsdb = GTFSDatabase( gtfsdb_filename ) g = Graph() service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, gtfsdb, agency_id=agency_id, service_ids=service_ids) if link: compiler.link_nearby_stops( g, gtfsdb ) graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
class DescribeCrossingAtAlightEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase( gtfsdb_filename ) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): # if the stop_sequence is the same before and after the TripAlight was crossed, it means the algorithm crossed in the forward # direction - because the stop_sequence doesn't get set on a forward alight. If this is true then this is the appropriate time # to describe the transit trip that led to this alighting return edge is not None \ and isinstance(edge.payload, graphserver.core.TripAlight) \ and vertex1.state.stop_sequence == vertex2.state.stop_sequence def __call__(self, vertex1, edge, vertex2, context): stop_sequence_of_boarding = vertex1.state.stop_sequence trip_id = vertex1.state.trip_id alighting_trip_id, alighting_time, alighting_stop_sequences = edge.payload.get_alighting_by_trip_id( trip_id ) what = "Ride trip %s from stop_seq %s to stop_seq %s"%(trip_id, vertex1.state.stop_sequence, alighting_stop_sequences) where = None when = None geom = self.gtfsdb.shape_between( trip_id, vertex1.state.stop_sequence, alighting_stop_sequences ) return NarrativeEvent(what, where, when, geom)
class DescribeCrossingAtAlightEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase(gtfsdb_filename) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): # if the stop_sequence is the same before and after the TripAlight was crossed, it means the algorithm crossed in the forward # direction - because the stop_sequence doesn't get set on a forward alight. If this is true then this is the appropriate time # to describe the transit trip that led to this alighting return edge is not None \ and isinstance(edge.payload, graphserver.core.TripAlight) \ and vertex1.state.stop_sequence == vertex2.state.stop_sequence def __call__(self, vertex1, edge, vertex2, context): stop_sequence_of_boarding = vertex1.state.stop_sequence trip_id = vertex1.state.trip_id alighting_trip_id, alighting_time, alighting_stop_sequences = edge.payload.get_alighting_by_trip_id( trip_id) what = "Ride trip %s from stop_seq %s to stop_seq %s" % ( trip_id, vertex1.state.stop_sequence, alighting_stop_sequences) where = None when = None geom = self.gtfsdb.shape_between(trip_id, vertex1.state.stop_sequence, alighting_stop_sequences) return NarrativeEvent(what, where, when, geom)
def main(): usage = """usage: python gdb_import_gtfs.py [options] <graphdb_filename> <gtfsdb_filename> [<agency_id>]""" parser = OptionParser(usage=usage) parser.add_option("-n", "--namespace", dest="namespace", default="0", help="agency namespace") parser.add_option("-m", "--maxtrips", dest="maxtrips", default=None, help="maximum number of trips to load") parser.add_option("-d", "--date", dest="sample_date", default=None, help="only load transit running on a given day. YYYYMMDD" ) (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] agency_id = args[2] if len(args)==3 else None print "importing from gtfsdb '%s' into graphdb '%s'"%(gtfsdb_filename, graphdb_filename) gtfsdb = GTFSDatabase( gtfsdb_filename ) gdb = GraphDatabase( graphdb_filename, overwrite=False ) maxtrips = int(options.maxtrips) if options.maxtrips else None gdb_load_gtfsdb( gdb, options.namespace, gtfsdb, gdb.get_cursor(), agency_id, maxtrips=maxtrips, sample_date=options.sample_date) gdb.commit() print "done"
class AlightEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase( gtfsdb_filename ) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): return edge is not None and isinstance(edge.payload, graphserver.core.TripAlight) def __call__(self, vertex1, edge, vertex2, context): event_time = vertex1.state.time stop_id = vertex2.label.split("-")[-1] stop_desc = list( self.gtfsdb.execute( "SELECT stop_name FROM stops WHERE stop_id = ?", (stop_id,) ) )[0][0] lat, lon = list( self.gtfsdb.execute( "SELECT stop_lat, stop_lon FROM stops WHERE stop_id = ?", (stop_id,) ) )[0] what = "Alight" where = stop_desc when = str(TimeHelpers.unix_to_localtime( event_time, self.timezone_name )) geom = (lon,lat) return NarrativeEvent(what, where, when, geom)
def process_transit_street_graph(graphdb_filename, gtfsdb_filename, osmdb_filename, agency_id=None): g = Graph() # Load osmdb =============================== print( "Opening OSM-DB '%s'"%osmdb_filename ) osmdb = OSMDB( osmdb_filename ) compiler.load_streets_to_graph( g, osmdb, sys.stdout ) # Load gtfsdb ============================== for i, gtfsdb_filename in enumerate(gtfsdb_filenames): gtfsdb = GTFSDatabase( gtfsdb_filename ) service_ids = [x.encode("ascii") for x in gtfsdb.service_ids()] compiler.load_gtfsdb_to_boardalight_graph(g, str(i), gtfsdb, agency_id=agency_id, service_ids=service_ids) compiler.load_transit_street_links_to_graph( g, osmdb, gtfsdb, reporter=sys.stdout ) # Export to graphdb ======================== graphdb = GraphDatabase( graphdb_filename, overwrite=True ) graphdb.populate( g, reporter=sys.stdout )
def main(): gtfsdb = GTFSDatabase( "data/washingtondc.gtfsdb" ) osmdb = OSMDB( "data/washingtondc.osmdb" ) ll,bb,rr,tt = list(gtfsdb.execute( "SELECT min(stop_lon), min(stop_lat), max(stop_lon), max(stop_lat) FROM stops" ))[0] from prender import processing mr = processing.MapRenderer() mr.start(ll,bb,rr,tt,4000) #left,bottom,right,top,width mr.smooth() mr.strokeWeight(0.000001) mr.background(255,255,255) mr.stroke(128,128,128) render_osmdb(osmdb, mr) mr.stroke(0,0,0) render_gtfsdb(gtfsdb, mr) mr.saveLocal("map.png") mr.stop()
def main(): usage = """usage: python gdb_link_gtfs_gtfs.py <graphdb_filename> <gtfsdb_filename> <range>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 3: parser.print_help() exit(-1) graphdb_filename = args[0] gtfsdb_filename = args[1] range = float(args[2]) gtfsdb = GTFSDatabase(gtfsdb_filename) gdb = GraphDatabase(graphdb_filename) n_stops = gtfsdb.count_stops() for i, (stop_id, stop_name, stop_lat, stop_lon) in enumerate(gtfsdb.stops()): print "%d/%d %s" % (i, n_stops, stop_id), station_vertex_id = "sta-%s" % stop_id for link_stop_id, link_stop_name, link_stop_lat, link_stop_lon in gtfsdb.nearby_stops( stop_lat, stop_lon, range): if link_stop_id == stop_id: continue print ".", link_length = vincenty(stop_lat, stop_lon, link_stop_lat, link_stop_lon) link_station_vertex_id = "sta-%s" % link_stop_id gdb.add_edge(station_vertex_id, link_station_vertex_id, Street("link", link_length)) print ""
class HeadwayBoardEvent: def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase( gtfsdb_filename ) self.timezone_name = timezone_name @staticmethod def applies_to(vertex1, edge, vertex2): return edge is not None and isinstance(edge.payload, graphserver.core.HeadwayBoard) def __call__(self, vertex1, edge, vertex2, context): event_time = vertex2.state.time trip_id = vertex2.state.trip_id stop_id = vertex1.label.split("-")[-1] route_desc = "-".join(list( self.gtfsdb.execute( "SELECT routes.route_short_name, routes.route_long_name FROM routes, trips WHERE routes.route_id=trips.route_id AND trip_id=?", (trip_id,) ) )[0]) stop_desc = list( self.gtfsdb.execute( "SELECT stop_name FROM stops WHERE stop_id = ?", (stop_id,) ) )[0][0] lat, lon = list( self.gtfsdb.execute( "SELECT stop_lat, stop_lon FROM stops WHERE stop_id = ?", (stop_id,) ) )[0] what = "Board the %s"%route_desc where = stop_desc when = "about %s"%str(TimeHelpers.unix_to_localtime( event_time, self.timezone_name )) geom = (lon,lat) return NarrativeEvent(what, where, when, geom)
def ccp_save_cache(self): time_0 = time.time() log.debug('ccp_save_cache: loading the transit database') db_transit = GTFSDatabase(conf.transitdb_filename) # NOTE: Cannot cache edges, since they are C-objects. See usages of # compiler.gtfsdb_to_edges(maxtrips). We can, however, at least # count the edges.... self.cache_edges(db_transit) log.debug('ccp_save_cache: making the transit graph link cache') self.cache_links(db_transit) log.debug('ccp_save_cache: done: %s' % (misc.time_format_elapsed(time_0), ))
def load_make_graph_add_transit(self, qb): # Not calling base class fcn. # FIXME: What happens here on update? We reload all, don't we? # FIXME: For p2, only do this on load, not on update. # BUG nnnn: For p2, start new instance of route finder and then # just change routed_ports to use that one, then kill # the existing one. time_0 = time.time() usage_0 = None if conf.debug_mem_usage: usage_0 = mem_usage.get_usage_mb() log.debug('load: adding transit...') loaded = False # Load the transit network, maybe (if we have data for it). if conf.transitdb_filename: self.cache_reg = self.links_get_cache_reg(qb) log.debug('load: loading the transit database') db_transit = GTFSDatabase(conf.transitdb_filename) log.debug('load: making the transit graph') self.load_transit(qb, db_transit) # Link the two graphs log.debug('load: linking the two graphs') self.link_graphs(qb, db_transit) loaded = True # else, using Graphserver, but no public transit data to load. if loaded: log.info('load: added transit: in %s' % (misc.time_format_elapsed(time_0), )) else: # MAYBE: Let devs test without loading transit. raise GWIS_Error( 'Unable to load route finder: no transit info found.') conf.debug_log_mem_usage(log, usage_0, 'tgraph.load / transit') return loaded
# requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase verbose = False RADIUS = 2000 # meters OBSTRUCTION = 1.3 #factor to expand straight-line distance range_lat = RADIUS / 111111.111 if len(sys.argv) < 2 : print 'usage: transfers.py infile.gtfsdb [verbose]' exit(1) gtfsdb_file = sys.argv[1] try : with open(gtfsdb_file) as f : db = GTFSDatabase(gtfsdb_file) except IOError as e : print 'gtfsdb file "%s" cannot be opened' % gtfsdb_file exit(1) if len(sys.argv) > 2 and sys.argv[2] == "verbose" : verbose = True # we we are interested in all routes available on each stop all_query = """select stops.stop_id, stops.stop_name, stops.stop_lat, stops.stop_lon, routes from (select stop_id, group_concat(route_id, ',') as routes from (select distinct route_id, stop_id from trips, stop_times where trips.trip_id = stop_times.trip_id) as x group by stop_id) as y, stops where y.stop_id = stops.stop_id;""" near_query = """ select stop_id, stop_name, stop_lat, stop_lon from stops where
from graphserver.ext.gtfs.gtfsdb import GTFSDatabase from graphserver.graphdb import GraphDatabase from graphserver.core import Graph, Street, State, WalkOptions SAMPLE_SIZE = 200 SHOW_GS_ROUTE = True os.environ['TZ'] = 'US/Pacific' time.tzset() t0s = "Mon May 17 08:50:00 2010" t0t = time.strptime(t0s) d0s = time.strftime('%a %b %d %Y', t0t) t0 = time.mktime(t0t) print 'search date: ', d0s print 'search time: ', time.ctime(t0), t0 gtfsdb = GTFSDatabase ('/Users/andrew/devel/data/trimet.gtfsdb') gdb = GraphDatabase ('/Users/andrew/devel/data/trimet.gdb' ) g = gdb.incarnate () station_labels = [s[0] for s in gtfsdb.stops()] origins = station_labels[:] destinations = station_labels[:] random.shuffle(origins) random.shuffle(destinations) pairs = zip(origins, destinations)[:SAMPLE_SIZE] wo = WalkOptions() wo.max_walk = 2000 wo.walking_overage = 0.0 wo.walking_speed = 1.0 # trimet uses 0.03 miles / 1 minute - but it uses straight line distance as well
#!/usr/bin/python import sys, struct # requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase if len(sys.argv) != 2: print 'usage: timetable.py inputfile.gtfsdb' exit(1) else: gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print 'gtfsdb file %s cannot be opened' % gtfsdb_file exit(1) def stop_name(stop_id): result = list( db.execute('select stop_name from stops where stop_id = ?', (stop_id, ))) return result[0][0] def trip_info(trip_id): if trip_id == 'walk': return 'walk'
import numpy as np import random import time import httplib from graphserver.ext.gtfs.gtfsdb import GTFSDatabase from graphserver.graphdb import GraphDatabase TRIP_TIME = '08:00AM' TRIP_DATE = '01-29-2010' URL_FORMAT = '/ws/V1/trips/tripplanner/maxIntineraries/1/fromcoord/%s/tocoord/%s/date/%s/time/%s/walk/0.999/appId/6AC697CF5EB8719DB6F3AEF0B' print 'search date: ', TRIP_DATE print 'search time: ', TRIP_TIME gtfsdb = GTFSDatabase ('../data/pdx/trimet-20100117.gtfsdb') npz = np.load('../data/pdx/trimet-20100117.od_matrix.npz') station_labels = npz['station_labels'] matrix = npz['matrix'].astype(np.int32) station_idx = dict( zip(station_labels, range(len(station_labels))) ) origins = list(station_labels) destinations = origins[:] # copy 1 level random.shuffle(origins) random.shuffle(destinations) pairs = zip(origins, destinations) errors = []
def run(self): self.gtfsdb = GTFSDatabase(self.gtfsdb) self.gdb = GraphDatabase(self.gdb) # Calculate an origin-destination matrix for the graph's stations print "Loading Graphserver DB..." self.emit(QtCore.SIGNAL("say(QString)"), QtCore.QString("Loading SQLite Graphserver graph...")) g = self.gdb.incarnate() # Set up distance-preserving projection system # Make a grid over the study area and save its geographic coordinates MARGIN = 8000 # meters beyond all stations, diagonally min_lon, min_lat, max_lon, max_lat = self.gtfsdb.extent() geod = pyproj.Geod(ellps="WGS84") min_lon, min_lat, arc_dist = geod.fwd(min_lon, min_lat, 180 + 45, MARGIN) max_lon, max_lat, arc_dist = geod.fwd(max_lon, max_lat, 45, MARGIN) proj = pyproj.Proj(proj="sinu", ellps="WGS84") min_x, min_y = proj(min_lon, min_lat) proj = pyproj.Proj( proj="sinu", ellps="WGS84", lon_0=min_lon, y_0=-min_y ) # why doesn't m parameter work for scaling by 100? grid_dim = array(proj(max_lon, max_lat), dtype=int32) / 100 max_x, max_y = grid_dim print "\nMaking grid with dimesions: ", max_x, max_y self.emit(QtCore.SIGNAL("say(QString)"), QtCore.QString("Making %i by %i grid..." % (max_x, max_y))) # later, use reshape/flat to switch between 1d and 2d array representation grid_latlon = empty((max_x, max_y, 2), dtype=float32) for y in range(0, max_y): self.emit(QtCore.SIGNAL("progress(int, int)"), y, max_y) for x in range(0, max_x): # inverse project meters to lat/lon grid_latlon[x, y] = proj(x * 100, y * 100, inverse=True) station_vertices = [v for v in g.vertices if v.label[0:4] == "sta-"] station_labels = [v.label for v in station_vertices] n_stations = len(station_vertices) print "Finding station coordinates..." self.emit(QtCore.SIGNAL("say(QString)"), QtCore.QString("Projecting station coordinates...")) station_coords = empty((n_stations, 2), dtype=float32) for i, label in enumerate(station_labels): stop_id, stop_name, lat, lon = self.gtfsdb.stop(label[4:]) station_coords[i] = proj(lon, lat) if i % 20 == 0: self.emit(QtCore.SIGNAL("progress(int, int)"), i, n_stations) station_coords /= 100 # ELIMINATE STATIONS WITH SAME INTEGRAL COORDINATES # self.emit( QtCore.SIGNAL( 'say(QString)' ), QtCore.QString( 'Eliminating equivalent stations...' ) ) # while len(station_coords) > 0 : # coord = # mask = station_coords != station_coords[i] # station_coords = station_coords[mask] # newer version follows # self.emit( QtCore.SIGNAL( 'say(QString)' ), QtCore.QString( 'Eliminating equivalent stations...' ) ) # station_labels = np.array(station_labels) # station_coords_new = [] # station_labels_new = [] # while len(station_coords) > 0 : # coord = np.round(station_coords[0]) # minIdx = np.argmin(np.sum(np.abs(station_coords - coord), axis=1)) # station_labels_new.append(station_labels[minIdx]) # station_coords_new.append(station_coords[minIdx]) # mask = np.any(np.round(station_coords) != coord, axis=1) # #print mask # #print len(station_coords) # #print coord # #print station_coords[np.logical_not(mask)] # station_coords = station_coords[mask][:] # station_labels = station_labels[mask][:] # self.emit( QtCore.SIGNAL( 'progress(int, int)' ), n_stations - len(station_coords_new), n_stations ) # # station_labels = station_labels_new # station_coords = station_coords_new # station_vertices = [g.get_vertex(slabel) for slabel in station_labels_new] # n_stations = len(station_labels) # print len(station_labels), len(station_coords), len(station_vertices) print "Making OD matrix..." os.environ["TZ"] = "US/Pacific" time.tzset() t0s = "Tue Mar 09 08:00:00 2010" t0t = time.strptime(t0s) d0s = time.strftime("%a %b %d %Y", t0t) t0 = int(time.mktime(t0t)) print "search date: ", d0s print "search time: ", time.ctime(t0), t0 wo = WalkOptions() wo.max_walk = 20000 wo.walking_overage = 0.1 wo.walking_speed = 1 # trimet uses 0.03 miles / 1 minute wo.transfer_penalty = 60 * 10 wo.walking_reluctance = 2 wo.max_transfers = 40 wo.transfer_slack = 60 * 4 matrix = zeros( (n_stations, n_stations), dtype=float ) # dtype could be uint16 except that there are inf's ---- why? colortable = [QtGui.QColor(i, i, i).rgb() for i in range(256)] colortable[254] = QtGui.QColor(050, 128, 050).rgb() colortable[255] = QtGui.QColor(255, 050, 050).rgb() matrixImage = QtGui.QImage(max_x, max_y, QtGui.QImage.Format_Indexed8) matrixImage.fill(0) matrixImage.setColorTable(colortable) for origin_idx in range(n_stations): sys.stdout.write("\rProcessing %i / %i ..." % (origin_idx, n_stations)) sys.stdout.flush() self.emit( QtCore.SIGNAL("say(QString)"), QtCore.QString("Making OD matrix (station %i/%i)..." % (origin_idx, n_stations)), ) self.emit(QtCore.SIGNAL("progress(int, int)"), origin_idx, n_stations) origin_label = station_labels[origin_idx] # g.spt_in_place(origin_label, None, State(1, t0), wo) spt = g.shortest_path_tree(origin_label, None, State(1, t0), wo) for dest_idx in range(n_stations): dest_label = station_labels[dest_idx] dest_vertex = spt.get_vertex(dest_label) # first board time should be subtracted here # if dest_vertex.payload is None : if dest_vertex is None: print "Unreachable vertex. Set to infinity.", dest_idx, dest_label delta_t = inf else: # delta_t = dest_vertex.best_state.time - t0 bs = dest_vertex.best_state delta_t = bs.time - t0 - bs.initial_wait if delta_t < 0: print "Negative trip time; set to 0." delta_t = 0 matrix[origin_idx, dest_idx] = delta_t # sys.stdout.write( '%i %i\n' % (delta_t, dest_vertex.payload.initial_wait) ) # sys.stdout.flush() # time.sleep(0.5) if dest_idx == origin_idx - 1: color = 254 elif dest_idx == origin_idx: color = 255 else: color = 253 - delta_t * 3 / 60 if color < 0: color = 0 coord = station_coords[dest_idx] x = coord[0] y = coord[1] if color >= 254: for x2 in range(x - 1, x + 2): for y2 in range(y - 1, y + 2): matrixImage.setPixel(x2, y2, color) else: matrixImage.setPixel(x, y, color) self.emit(QtCore.SIGNAL("display(QImage)"), matrixImage) spt.destroy() # time.sleep(1) print x * y, "points, done." self.emit(QtCore.SIGNAL("say(QString)"), QtCore.QString("Saving as gzipped numpy ndarrays...")) savez( "od_matrix.npz", station_labels=station_labels, station_coords=station_coords, grid_dim=grid_dim, grid_latlon=grid_latlon, matrix=matrix, )
#!/usr/bin/python import sys, struct # requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase if len(sys.argv) != 2: print 'usage: datecheck.py inputfile.gtfsdb' exit(1) else: gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print 'gtfsdb file %s cannot be opened' % gtfsdb_file exit(1) # check that all routes gs reports running are actually running on each day for line in sys.stdin.readlines(): trip_id, fromid, fromtime, toid, totime = line.split() if trip_id == 'walk': continue service_id = list( db.execute('select service_id from trips where trip_id = ?', (trip_id, )))[0][0] print trip_id, '___', service_id # and date > 20130415 and date < 20130420 for line in db.execute(
def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase(gtfsdb_filename) self.timezone_name = timezone_name
from graphserver.core import Graph, Street, State, WalkOptions from PIL import Image SAMPLE_SIZE = 40 SHOW_GS_ROUTE = True os.environ['TZ'] = 'US/Pacific' time.tzset() t0s = "Mon May 17 08:50:00 2010" t0t = time.strptime(t0s) d0s = time.strftime('%a %b %d %Y', t0t) t0 = time.mktime(t0t) print 'search date: ', d0s print 'search time: ', time.ctime(t0), t0 gtfsdb = GTFSDatabase ('/Users/andrew/devel/data/trimet.gtfsdb') gdb = GraphDatabase ('/Users/andrew/devel/data/test.gdb' ) osmdb = OSMDB ('/Users/andrew/devel/data/test.osmdb' ) g = gdb.incarnate () wo = WalkOptions() wo.max_walk = 2000 wo.walking_overage = 0.0 wo.walking_speed = 1.0 # trimet uses 0.03 miles / 1 minute - but it uses straight line distance as well wo.transfer_penalty = 99999 wo.walking_reluctance = 1 wo.max_transfers = 0 wo.transfer_slack = 60 * 5 wo_foot = wo wo = WalkOptions()
#!/usr/bin/python import sys, struct from struct import Struct # requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase from datetime import timedelta, date gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print 'gtfsdb file %s cannot be opened' % gtfsdb_file exit(1) # display number of active services to spot the usable period for this feed dfrom, dto = db.date_range() d = dfrom while (d <= dto): active_sids = db.service_periods(d) print d, len(active_sids) d += timedelta(days=1)
# import numpy as np import pylab as pl import random import time import httplib from graphserver.ext.gtfs.gtfsdb import GTFSDatabase from graphserver.graphdb import GraphDatabase TRIP_TIME = '08:00AM' TRIP_DATE = '01-29-2010' URL_FORMAT = '/ws/V1/trips/tripplanner/maxIntineraries/1/fromcoord/%s/tocoord/%s/date/%s/time/%s/appId/6AC697CF5EB8719DB6F3AEF0B' gtfsdb = GTFSDatabase ('../data/pdx/trimet-20100117.gtfsdb') npz = np.load('../data/pdx/trimet-20100117.od_matrix.npz') station_labels = npz['station_labels'] station_coords = npz['station_coords'] grid_dim = npz['grid_dim'] matrix = npz['matrix'].astype(np.int32) matrix = (matrix + matrix.T) / 2 r = np.load('results/pdx-5d-1000i/result.npy') station_idx = dict( zip(station_labels, range(len(station_labels))) ) origins = list(zip(station_labels, np.round(station_coords).astype(np.int32))) destinations = origins[:] # copy 1 level
#!/usr/bin/python import sys, struct # requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase if len(sys.argv) != 2: print "usage: datecheck.py inputfile.gtfsdb" exit(1) else: gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print "gtfsdb file %s cannot be opened" % gtfsdb_file exit(1) # check that all routes gs reports running are actually running on each day for line in sys.stdin.readlines(): trip_id, fromid, fromtime, toid, totime = line.split() if trip_id == "walk": continue service_id = list(db.execute("select service_id from trips where trip_id = ?", (trip_id,)))[0][0] print trip_id, "___", service_id # and date > 20130415 and date < 20130420 for line in db.execute( "select date from calendar_dates where service_id = ? and date = 20130417 order by date", (service_id,) ):
# requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase verbose = False RADIUS = 4000 # meters OBSTRUCTION = 1.3 #factor to expand straight-line distance range_lat = RADIUS / 111111.111 if len(sys.argv) < 2: print 'usage: transfers.py infile.gtfsdb [verbose]' exit(1) gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print 'gtfsdb file "%s" cannot be opened' % gtfsdb_file exit(1) if len(sys.argv) > 2 and sys.argv[2] == "verbose": verbose = True # we we are interested in all routes available on each stop all_query = """select stops.stop_id, stops.stop_name, stops.stop_lat, stops.stop_lon, routes from (select stop_id, group_concat(route_id, ',') as routes from (select distinct route_id, stop_id from trips, stop_times where trips.trip_id = stop_times.trip_id) as x group by stop_id) as y, stops where y.stop_id = stops.stop_id;""" near_query = """ select stop_id, stop_name, stop_lat, stop_lon from stops where
from graphserver.graphdb import GraphDatabase from graphserver.core import Graph, Street, State, WalkOptions from PIL import Image from multiprocessing import Pool os.environ['TZ'] = 'US/Pacific' time.tzset() t0s = "Mon May 17 08:50:00 2010" t0t = time.strptime(t0s) d0s = time.strftime('%a %b %d %Y', t0t) t0 = time.mktime(t0t) print 'search date: ', d0s print 'search time: ', time.ctime(t0), t0 gtfsdb = GTFSDatabase ('./trimet.gtfsdb') gdb = GraphDatabase ('./test.gdb' ) osmdb = OSMDB ('./testgrid.osmdb' ) g = gdb.incarnate () # FOOT - would be better if i could specify 0 boardings not 0 transfers wo = WalkOptions() wo.max_walk = 2000 wo.walking_overage = 0.0 wo.walking_speed = 1.0 # trimet uses 0.03 miles / 1 minute - but it uses straight line distance as well wo.transfer_penalty = 99999 wo.walking_reluctance = 1 wo.max_transfers = 0 # make much higher? wo.transfer_slack = 60 * 5 wo_foot = wo
#!/usr/bin/python import sys, struct from struct import Struct # requires graphserver to be installed from graphserver.ext.gtfs.gtfsdb import GTFSDatabase from datetime import timedelta, date gtfsdb_file = sys.argv[1] try: with open(gtfsdb_file) as f: db = GTFSDatabase(gtfsdb_file) except IOError as e: print "gtfsdb file %s cannot be opened" % gtfsdb_file exit(1) # display number of active services to spot the usable period for this feed dfrom, dto = db.date_range() d = dfrom while d <= dto: active_sids = db.service_periods(d) print d, len(active_sids) d += timedelta(days=1)
def __init__(self, gtfsdb_filename, timezone_name="America/Los_Angeles"): self.gtfsdb = GTFSDatabase( gtfsdb_filename ) self.timezone_name = timezone_name
if __name__=='__main__': usage = """usage: python zzzz.py <graph_database> <assist_graph_database> <osm_database> <gtfs_database>""" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 4: parser.print_help() exit(-1) graph_db = args[0] assist_graph_db = args[1] osm_db = args[2] gtfs_db = args[3] graphdb = GraphDatabase( graph_db ) assistgraphdb = GraphDatabase( assist_graph_db ) osmdb = OSMDB( osm_db ) gtfsdb = GTFSDatabase( gtfs_db ) g = graphdb.incarnate() ag = assistgraphdb.incarnate() nodes = {} for id, tags, lat, lon, endnode_refs in osmdb.nodes(): nodes['osm-' + id] = (lat, lon) for id, name, lat, lon in gtfsdb.stops(): nodes['sta-' + id] = (lat, lon) os.environ['TZ'] = 'US/Pacific' time.tzset() t0s = "Tue Nov 16 07:50:30 2010" t0t = time.strptime(t0s) d0s = time.strftime('%a %b %d %Y', t0t) t0 = time.mktime(t0t)