def init_engine(uri, clear_db=False): global sessionmaker, engine, db_session engine = create_engine(uri) db_session.remove() sessionmaker.configure(autocommit=False, autoflush=False, bind=engine) if clear_db: # create tables if they do not exist from app_server.models import User, Log, DataFit myBase.metadata.create_all(bind=engine) # and clear the tables if they had data in them for table in reversed(myBase.metadata.sorted_tables): db_session.execute(table.delete()) db_session.commit() print('Database cleared.') else: # create tables if they do not exist from app_server.models import User, Log, DataFit myBase.metadata.create_all(bind=engine)
def init_engine(new_engine): global sessionmaker, engine, session engine = new_engine session.remove() sessionmaker.configure(bind=engine)
# Here we define columns for the table person # Notice that each column is also a normal Python instance attribute. id = Column(Integer, primary_key=True) UnitId = Column(BIGINT) Type = Column(TEXT) Min = Column(REAL) Max = Column(REAL) Sum = Column(TEXT) BeginTime = Column(TIMESTAMP) EndTime = Column(TIMESTAMP) print("creating engine!!!") engine = create_engine("postgresql://*****:*****@localhost/project") engine.connect() sessionmaker = sessionmaker() sessionmaker.configure(bind=engine) Base.metadata.create_all(engine) def splitConnectionsData(): with open('Connections.csv') as csvfile: readCSV = csv.reader(csvfile, delimiter=';') next(readCSV) #skips first row print("Reading Connections...") count = 0 session = sessionmaker() for row in readCSV:
def configure_session(db_uri, sessionmaker=Session, debug=False): engine = create_engine(db_uri, echo=debug) sessionmaker.configure(bind=engine) return sessionmaker()