def process_source(source): try: gtfs = GTFS(filename=source) p_db = Database(url=db_string, is_geospatial=True) gtfs.load(p_db, filename=source) except Exception, e: print e
def main(): # process command line args args = init_parser() # create database db = Database(args.database_url, args.schema, args.is_geospatial) db.create() # load GTFS into database gtfs = GTFS(args.file) gtfs.load(db)
def loadToDatabase(writedir, name, gtfsfile): db_type = "sqlite:///" suffix = ".sqlite" dbpath = writedir + "/" + name + suffix dbname = db_type + dbpath print "storing into database as " + dbname db = Database(dbname, None, False) db.create() gtfs = GTFS(gtfsfile) gtfs.load(db) return dbpath
def loadToDatabase(writedir, name, gtfsfile) : db_type = "sqlite:///" suffix = ".sqlite" dbpath = writedir + "/" + name + suffix dbname = db_type + dbpath print "storing into database as " + dbname db = Database(dbname, None, False) db.create() gtfs = GTFS(gtfsfile) gtfs.load(db) return dbpath
def database_load(filename, **kwargs): '''Basic API to load a GTFS zip file into a database arguments: filename: URL or local path to GTFS zip file keyword arguments: batch_size: record batch size for memory management is_geospatial: if database is support geo functions schema: database schema name tables: limited list of tables to load url: SQLAlchemy database url ''' db = Database(**kwargs) db.create() gtfs = GTFS(filename) gtfs.load(db, **kwargs) return db
def database_load(filename, db_url, **kwargs): db = Database(url=db_url, is_geospatial=True) gtfs = GTFS(filename=filename, file_id=kwargs.get('file_id')) gtfs.load(db)