def build_route_data(graph, psql_connect_string, times_filename, points_filename, routes_filename): conn = psycopg2.connect(psql_connect_string) import_route_data.read_times(times_filename, conn) import_route_data.read_points(points_filename, conn) import_route_data.read_routes(routes_filename, conn) # recreate all calculation tables process_routes.create_db_tables(conn, True) conn.commit() import_route_data.calc_corresponding_vertices(graph, psql_connect_string)
def build_base_data(db_conn_string, osm_xml_filename, gtfs_filename): import_base_data.create_gs_datbases(osm_xml_filename, gtfs_filename, db_conn_string) import_base_data.add_missing_stops(db_conn_string) print('Deleting orphan nodes...') import_base_data.delete_orphan_nodes(db_conn_string) print('Linking transit to osm data...') import_base_data.link_osm_gtfs(db_conn_string) # recreate all calculation tables conn = psycopg2.connect(db_conn_string) process_routes.create_db_tables(conn, True) conn.commit()
def calculate_routes(graph, psql_connect_string, options, num_processes=4): logfile = open('log.txt','w') conn = psycopg2.connect(psql_connect_string) process_routes.create_db_tables(conn, False) conn.commit() sys.stdout.write('created db_tables\n') prefixes = ( 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'BB', 'CC', 'DD', 'EE', 'FF', 'GG', 'HH', 'II', 'JJ', 'KK', 'LL', 'MM', 'NN', 'OO', 'PP', 'QQ', 'RR', 'SS', 'TT', 'UU', 'VV', 'WW', 'XX', 'YY', 'ZZ' ) processes = [] for i in range(int(num_processes)): p = multiprocessing.Process(target=process_routes.Proccessing, args=(graph, psql_connect_string, int(options['time-step']), float(options['walking-speed']), int(options['max-walk']), int(options['walking-reluctance']), socket.gethostname() + prefixes[i], logfile)) p.start() sys.stdout.write('started thread %s \n' %i) time.sleep(10) #workaround for duplicate calculations - should be temporary processes.append(p) status_printer = multiprocessing.Process(target=process_routes.print_status, args=(conn,logfile )) status_printer.start() processes.append(status_printer) for p in processes: p.join()
def validate_input(configuration, psql_connect_string, options): valide = True # check input files if options.import_base or options.import_all: if not os.path.exists(configuration['osm-data']): print(colored('ERROR: could not find osm-data', 'red')) print('looked at: %s' % configuration['osm-data']) valide = False if not os.path.exists(configuration['transit-feed']): print(colored('ERROR: could not find transit-feed', 'red')) print('looked at: %s' % configuration['transit-feed']) valide = False if options.import_routes or options.import_all: if not os.path.exists(configuration['routes']): print(colored('ERROR: could not find routes.csv', 'red')) print('looked at: %s' % configuration['routes']) valide = False if not os.path.exists(configuration['times']): print(colored('ERROR: could not find times.csv', 'red')) print('looked at: %s' % configuration['times']) valide = False if not os.path.exists(configuration['points']): print(colored('ERROR: could not find points.csv', 'red')) print('looked at: %s' % configuration['points']) valide = False # check database base_tables = ( 'graph_vertices', 'graph_payloads', 'graph_edges', 'graph_resources', 'osm_nodes', 'osm_ways', 'osm_edges', 'gtfs_agency', 'gtfs_calendar', 'gtfs_calendar_dates', 'gtfs_frequencies', 'gtfs_routes', 'gtfs_shapes', 'gtfs_stop_times', 'gtfs_stops', 'gtfs_transfers', 'gtfs_trips' ) route_tables = ( 'cal_corres_vertices', 'cal_points', 'cal_routes', 'cal_times' ) path_tables = ( 'cal_paths', 'cal_paths_details' ) try: conn = psycopg2.connect(psql_connect_string) c = conn.cursor() except: print(colored('ERROR: could not connect to database', 'red')) if DEBUG: raise valide = False else: c.execute("select tablename from pg_tables where schemaname='public'" ) tables = c.fetchall() if not options.import_base and not options.import_all: error = False for nt in base_tables: if (nt,) not in tables: valide = False error = True if error: print(colored('ERROR: base data not in database - please import base data first', 'red')) if not options.import_routes and not options.import_all: error = False for nt in route_tables: if (nt,) not in tables: valide = False error = True if error: print(colored('ERROR: route data not in database - please import route data first', 'red')) if options.export and not options.calculate: error = False for nt in path_tables: if (nt,) not in tables: valide = False error = True if error: print(colored('ERROR: path data not in database - please calculate shortest paths first', 'red')) if options.calculate and ((not options.import_all) and (not options.import_routes)): c.execute('SELECT id FROM cal_routes WHERE done=false') if len(c.fetchall()) == 0: print(colored('It looks like all routes have already been calculated. Do you want to start the calculation again? [ y/n ]', 'yellow')) input = sys.stdin.read(1) if input == 'y' or input == 'Y': c.execute('UPDATE cal_routes SET done=false') process_routes.create_db_tables(conn, True) else: options.calculate = False c.close() conn.commit() return valide