def current_tables_load(**kwargs): """ current table loader """ from gtfsdb import Database, CurrentRoutes, CurrentStops, CurrentRouteStops db = Database(**kwargs) for cls in [CurrentRoutes, CurrentRouteStops, CurrentStops]: db.create_table(cls) cls.post_process(db, **kwargs)
def main(): # process command line args args = init_parser() # create database db = Database(args.database_url, args.schema, args.is_geospatial) db.create() # load GTFS into database gtfs = GTFS(args.file) gtfs.load(db)
def loadToDatabase(writedir, name, gtfsfile): db_type = "sqlite:///" suffix = ".sqlite" dbpath = writedir + "/" + name + suffix dbname = db_type + dbpath print "storing into database as " + dbname db = Database(dbname, None, False) db.create() gtfs = GTFS(gtfsfile) gtfs.load(db) return dbpath
def loadToDatabase(writedir, name, gtfsfile) : db_type = "sqlite:///" suffix = ".sqlite" dbpath = writedir + "/" + name + suffix dbname = db_type + dbpath print "storing into database as " + dbname db = Database(dbname, None, False) db.create() gtfs = GTFS(gtfsfile) gtfs.load(db) return dbpath
def bip_spot_load(directory): from . import Database, BipSpot kwargs = {'gtfs_directory': directory, 'is_geospatial': True} db = Database(**kwargs) BipSpot.add_geometry_column() BipSpot.__table__.create(bind=db.engine) BipSpot.load(db, **kwargs)
def route_stop_load(): """ written as a test / debug method for RS table loader """ from gtfsdb import Database, RouteStop kwargs = get_args()[1] db = Database(**kwargs) RouteStop.load(db, **kwargs)
def database_load_versioned(feed_file, db_url, **kwargs): db = Database(url=db_url) session = db.get_session() existing_file = session.query(FeedFile).get(feed_file.md5sum) if existing_file and existing_file.completed: log.debug("FeedFile: {} already at its newest.".format(feed_file.file_url)) return session.merge(feed_file) session.commit() try: database_load(filename=feed_file.file_url, db_url=db_url, file_id=feed_file.md5sum, **kwargs) feed_file.completed = True except Exception, e: traceback.print_exc(file=sys.stdout) log.error('Error processing: {} Message: {}'.format(feed_file.file_url,e))
def database_load(filename, **kwargs): '''Basic API to load a GTFS zip file into a database arguments: filename: URL or local path to GTFS zip file keyword arguments: batch_size: record batch size for memory management is_geospatial: if database is support geo functions schema: database schema name tables: limited list of tables to load url: SQLAlchemy database url ''' db = Database(**kwargs) db.create() gtfs = GTFS(filename) gtfs.load(db, **kwargs) return db
def db_connect_tester(): """ simple routine to connect to an existing database and list a few stops bin/connect-tester --database_url sqlite:///gtfs.db _no_gtfs_zip_needed_ """ from gtfsdb import Database, Stop, Route, StopTime args, kwargs = get_args() db = Database(**kwargs) for s in db.session.query(Stop).limit(2): print s.stop_name for r in db.session.query(Route).limit(2): print r.route_name #import pdb; pdb.set_trace() stop_times = StopTime.get_departure_schedule(db.session, stop_id='11411') for st in stop_times: print st.get_direction_name() break
def main(global_config, **ini_settings): """ this function is the main entry point for pserve / Pyramid it returns a Pyramid WSGI application see setup.py entry points + config/*.ini [app:main] ala pserve (e.g., bin/pserve config/development.ini) """ # import pdb; pdb.set_trace() app = AppConfig(**ini_settings) from gtfsdb import Database kw = app.gtfsdb_param_from_config() db = Database(**kw) app.set_db(db) from . import views app.config_include_scan(views) return app.make_wsgi_app()
def create_shapes_geom(db_url, shape_id): db = Database(url=db_url, is_geospatial=True) session = db.get_session() session.merge(ShapeGeom.create_shape_geom(shape_id, session)) session.commit()
feed = agency["feed_baseurl"] if ".zip" in feed: source_zips.append(feed) print "Found {} Feeds".format(len(source_zips)) # sources = get_sources() sources = [] # sources = ['/Users/rhunter/Desktop/action_20150129_0101.zip', '/Users/rhunter/Desktop/abq-ride_20150802_0107.zip'] # db_string = 'sqlite:///gtfs.db' # db_string = 'postgresql://*****:*****@test-gtfs.cvklf6ftrsse.us-east-1.rds.amazonaws.com:5432/gtfs_data' db_string = "postgresql://*****:*****@localhost:5432/gtfs_data" db = Database(url=db_string, is_geospatial=True) db.create() try: GTFS.bootstrab_db(db) except IntegrityError: pass meta = { "dataexchange_id": "action", "file_url": "/Users/rhunter/Desktop/action_20150129_0101.zip", "file_name": "action_20150129_0101.zip", "file_checksum": "MD5123123123123", "date_added": 1213154234.0, } database_load(meta, db_url=db_string)