def global_init(db_file: str): global __factory, __async_engine if __factory: return if not db_file or not db_file.strip(): raise Exception("You must specify a db file.") folder = Path(db_file).parent folder.mkdir(parents=True, exist_ok=True) conn_str = 'sqlite:///' + db_file.strip() print("Connecting to DB with {}".format(conn_str)) # Adding check_same_thread = False after the recording. This can be an issue about # creating / owner thread when cleaning up sessions, etc. This is a sqlite restriction # that we probably don't care about in this example. engine = sa.create_engine(conn_str, echo=False, connect_args={"check_same_thread": False}) __async_engine = create_async_engine( conn_str, echo=False, connect_args={"check_same_thread": False}) __factory = orm.sessionmaker(bind=engine) # noinspection PyUnresolvedReferences import data.__all_models SqlAlchemyBase.metadata.create_all(engine)
def __init__(self, app: FastAPI): self.app = app self.engines["main"] = create_async_engine( url=get_sqlalchemy_core_url(), future=True, echo=self.echo, )
async def async_main(): engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) async_session = async_sessionmaker(engine, expire_on_commit=False) async with async_session() as session, session.begin(): session.add_all([A(data="a_%d" % i) for i in range(100)]) statements = [ select(A).where(A.data == "a_%d" % random.choice(range(100))) for i in range(30) ] results = await asyncio.gather( *(run_out_of_band(async_session, session, statement) for statement in statements)) print(f"results: {[r.all() for r in results]}")
def __init__(self, engine_url: str) -> None: if 'sqlite' in engine_url: engine_url += '?check_same_thread=False' sync_engine_url = engine_url.replace('+asyncpg', '') # Create sync backend self._db_sync_engine = create_engine(sync_engine_url) Base.metadata.create_all(self._db_sync_engine) self._session_sync_factory = sessionmaker(bind=self._db_sync_engine, autocommit=False, autoflush=True, expire_on_commit=False) # Create async backend if 'sqlite' in engine_url: self._wrap_sync = True else: self._db_async_engine = create_async_engine(engine_url) self._session_async_factory = sessionmaker( bind=self._db_async_engine, autocommit=False, autoflush=True, class_=AsyncSession, expire_on_commit=False) self._wrap_sync = False self._sync_session = None self._async_session = None
async def drop_database(): url = get_db_maintenance_url() engine = create_async_engine(str(url), isolation_level="AUTOCOMMIT") async with engine.connect() as conn: # Disconnect all users from the database we are dropping. version = conn.dialect.server_version_info pid_column = "pid" if (version >= (9, 2)) else "procpid" text = sa.text(""" SELECT pg_terminate_backend(pg_stat_activity.%(pid_column)s) FROM pg_stat_activity WHERE pg_stat_activity.datname = '%(db_name)s' AND %(pid_column)s <> pg_backend_pid(); """ % { "pid_column": pid_column, "db_name": DATABASE_URL.database, }) await conn.execute(text) # Drop the database. text = sa.text(""" DROP DATABASE %(db_name)s """ % {"db_name": DATABASE_URL.database}) await conn.execute(text) await engine.dispose()
async def db_connect(self) -> AsyncEngine: """ Estabolish connection with the database and return the asynchronous engine. Function which interract with database will then be able to use this engine to create their `AsyncSession` instances, which will then be used to perform any operations on the database. We retrun `AsyncEngine` instead of directly using `AsyncSession`, because reusing the same session isn't thread-safe and when multiple calls happen concurrently, it causes issues that asyncpg can't handle. """ load_tables( ) # Load all DB Tables, in order to bring them into the metadata of DbBase engine = create_async_engine(config.DATABASE_ENGINE_STRING) try: async with engine.begin() as conn: await conn.run_sync( DbBase.metadata.create_all ) # Create all database tables from found models except ConnectionRefusedError: # Keep recursively trying to connect to the database logger.error("Unable to connect to database, retrying in 5s") time.sleep(5) return await self.db_connect() except InvalidPasswordError as exc: logger.critical("Invalid database password.") raise exc return engine
def get_db_engine(config, echo=False): return create_async_engine( 'postgresql+asyncpg://{username}:{password}@{host}:{port}/{name}'. format(**config), echo=echo, future=True, isolation_level='SERIALIZABLE')
async def check_database(url: str, logger: BoundLogger) -> None: """Check that the database is accessible. Parameters ---------- config : `gafaelfawr.config.Config` The Gafaelfawr configuration. logger : `structlog.stdlib.BoundLogger` Logger used to report problems """ engine = create_async_engine(url, future=True) factory = _create_session_factory(engine) for _ in range(5): try: async with factory() as session: async with session.begin(): await session.execute(select(SQLAdmin).limit(1)) return except (ConnectionRefusedError, OperationalError): logger.info("database not ready, waiting two seconds") time.sleep(2) continue # If we got here, we failed five times. Try one last time without # catching exceptions so that we raise the appropriate exception to our # caller. async with factory() as session: async with session.begin(): await session.execute(select(SQLAdmin).limit(1))
async def async_main(): engine = create_async_engine( "postgresql+asyncpg://postgres:example@localhost/hw3", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) # expire_on_commit=False will prevent attributes from being expired # after commit. async_session = sessionmaker( engine, expire_on_commit=False, class_=AsyncSession ) async with async_session() as session: data = json.loads(requests.get(DB_URL).text) async with session.begin(): for post in data['posts']: session.add(Posts(id=post['id'], data=post['title'])) for comment in data['comments']: session.add(Comments(id=comment['id'], data=comment['body'], post_id=comment['postId'])) await session.commit() await session.close()
def get_db_service() -> DbService: settings = config.get_settings() engine = create_async_engine(settings.SQLALCHEMY_DATABASE_URI, echo=settings.DEBUG) metadata = MetaData() Base = declarative_base(metadata=metadata) return DbService(metadata=metadata, engine=engine, Model=Base)
def __init__(self, uri, query=None, name=None, frequency=DEFAULT_FREQUENCY, repeat=None, wait=None, loop=None): # Local import import sqlalchemy as db from sqlalchemy.ext.asyncio import create_async_engine self._db = db # Make module accessible to other methods try: self._engine = create_async_engine(uri) except Exception as ex: raise RuntimeError( f'Unable to initalize database using uri: {uri}: {ex}') super().__init__(loop=loop) if query is not None: self.add(query, name=name, frequency=frequency, repeat=repeat, wait=wait)
async def get_engine_pool(echo=False): async_engine = create_async_engine( f'mysql+aiomysql://{os.getenv("sql_user")}:{os.getenv("sql_password")}@{os.getenv("sql_host")}/{os.getenv("sql_database")}', echo=echo, pool_size=20) # async_session = sessionmaker(async_engine, expire_on_commit=False, class_=AsyncSession) return async_engine
async def connect_to_db(app: FastAPI) -> None: logger.debug("Connecting db ...") cfg: PostgresSettings = app.state.settings.CATALOG_POSTGRES engine: AsyncEngine = create_async_engine( cfg.dsn_with_async_sqlalchemy, pool_size=cfg.POSTGRES_MINSIZE, max_overflow=cfg.POSTGRES_MAXSIZE - cfg.POSTGRES_MINSIZE, connect_args={ "server_settings": { "application_name": cfg.POSTGRES_CLIENT_NAME } }, pool_pre_ping= True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects ) logger.debug("Connected to %s", engine.url) # pylint: disable=no-member logger.debug("Checking db migration...") try: await raise_if_migration_not_ready(engine) except Exception: # NOTE: engine must be closed because retry will create a new engine await engine.dispose() raise logger.debug("Migration up-to-date") app.state.engine = engine logger.debug( "Setup engine: %s", await get_pg_engine_stateinfo(engine), )
async def _create_db_client(self) -> None: """ Create a database client and add it to the object to use further. """ if self._db_engine: return None self._db_engine = create_async_engine(self._conn_str, echo=True)
async def setup(cls, app: Application): cls.__engine = create_async_engine(URL(**config['postgres'])) cls.__sessionmaker = sessionmaker(bind=cls.__engine, autoflush=False, autocommit=False, expire_on_commit=False, class_=AsyncSession)
def database(monkeypatch, tmpdir): """Yield database connection with empty tables created""" dbfile = f"sqlite+aiosqlite:///{tmpdir}/test.db" engine = create_async_engine(dbfile) asyncio.run(db._init(engine)) monkeypatch.setattr(db, "get_engine", lambda: engine) yield engine
async def async_main(): URLDB = "postgresql+asyncpg://postgres:[email protected]:5432/postgres" engine = create_async_engine( URLDB, echo=True, ) async with engine.connect() as conn: # select a Result, which will be delivered with buffered # results t = text("SELECT * FROM bc250_2019.lml_unidade_federacao_a") print("----------------------------------------------------") print(t) print("----------------------------------------------------") result = await conn.execute(t) #print(result.fetchall()) #async_result = await conn.stream() sub_query = "SELECT * FROM bc250_2019.lml_unidade_federacao_a" query = f"SELECT ST_AsGeobuf(q, 'geom') FROM ({sub_query}) AS q" t = text(query) print("----------------------------------------------------") print(t) print("----------------------------------------------------") result = await conn.execute(t) fetch_all = result.fetchall() print(fetch_all[0][0]) # for AsyncEngine created in function scope, close and # clean-up pooled connections await engine.dispose()
def global_init(db_file: str): global __factory, __async_engine if __factory: return if not db_file or not db_file.strip(): raise Exception("You must specify a db file.") folder = Path(db_file).parent folder.mkdir(parents=True, exist_ok=True) # Post-recording update: # SQLAlchemy started enforcing the underlying Python DB API was truly async # We don't really get that with SQLite but when you switch something like Postgres # It would "light up" with async. Since recording, SQLAlchemy throws and error # if this would be the case. We need to explicitly switch to aiosqlite as below. conn_str = 'sqlite+pysqlite:///' + db_file.strip() async_conn_str = 'sqlite+aiosqlite:///' + db_file.strip() print("Connecting to DB with {}".format(async_conn_str)) # Adding check_same_thread = False after the recording. This can be an issue about # creating / owner thread when cleaning up sessions, etc. This is a sqlite restriction # that we probably don't care about in this example. engine = sa.create_engine(conn_str, echo=False, connect_args={"check_same_thread": False}) __async_engine = create_async_engine( async_conn_str, echo=False, connect_args={"check_same_thread": False}) __factory = orm.sessionmaker(bind=engine) # noinspection PyUnresolvedReferences import data.__all_models SqlAlchemyBase.metadata.create_all(engine)
async def db_ctx(app: web.Application): # number of gunicorn workers = multiprocessing.cpu_count() as per gunicorn_conf.py # max_connections = 2000 as per toolset/setup/linux/databases/postgresql/postgresql.conf:64 # give 10% leeway max_size = min(1800 / multiprocessing.cpu_count(), 160) max_size = max(int(max_size), 1) min_size = max(int(max_size / 2), 1) print( f'connection pool: min size: {min_size}, max size: {max_size}, orm: {CONNECTION_ORM}' ) if CONNECTION_ORM: dsn = pg_dsn('asyncpg') engine = create_async_engine(dsn, future=True, pool_size=max_size) app['db_session'] = sessionmaker(engine, class_=AsyncSession) else: dsn = pg_dsn() app['pg'] = await asyncpg.create_pool(dsn=dsn, min_size=min_size, max_size=max_size, loop=app.loop) yield if not CONNECTION_ORM: await app['pg'].close()
async def setup(connectionstring: str, network_name: str): ''' Factory for DnsDb class :param connectionstring: connectionstring for the Postgres DB :param network_name: domain for the network, ie. 'byoda.net' :returns: :raises: ''' dnsdb = DnsDb(network_name) dnsdb._engine = create_async_engine( connectionstring, echo=False, isolation_level='AUTOCOMMIT', future=True, poolclass=NullPool ) # Base = declarative_base() dnsdb.async_session = sessionmaker( dnsdb._engine, class_=AsyncSession, expire_on_commit=False ) # Ensure the 'accounts' subdomain for the network exists subdomain = f'accounts.{network_name}' async with AsyncSession(dnsdb._engine) as db_session: domain_id = await dnsdb._get_domain_id(db_session, subdomain) dnsdb._domain_ids[subdomain] = domain_id return dnsdb
async def async_main() -> None: """Main program function.""" engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) async_session = async_sessionmaker(engine, expire_on_commit=False) async with async_session.begin() as session: await session.run_sync(work_with_a_session_one) await session.run_sync(work_with_a_session_two, param="foo") session.add_all([ A(bs=[B(), B()], data="a1"), A(bs=[B()], data="a2"), A(bs=[B(), B()], data="a3"), ]) async with async_session() as session: result = await session.execute(select(A).order_by(A.id)) r: ScalarResult[A] = result.scalars() a1 = r.one() a1.data = "new data" await session.commit()
def connect(self): engine = create_async_engine(self.connection_details.get("url")) async_session = sessionmaker(engine, expire_on_commit=False, class_=AsyncSession) self.session = async_session return async_session
async def async_main(): """Main program function.""" engine = create_async_engine( "postgresql+asyncpg://scott:tiger@localhost/test", echo=True, ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) await conn.run_sync(Base.metadata.create_all) async with AsyncSession(engine) as session: async with session.begin(): session.add_all( [ A(bs=[B(), B()], data="a1"), A(bs=[B()], data="a2"), A(bs=[B(), B()], data="a3"), ] ) # we have the option to run a function written in sync style # within the AsyncSession.run_sync() method. The function will # be passed a synchronous-style Session object and the function # can use traditional ORM patterns. await session.run_sync(run_queries) await session.commit()
def create_app(connection_str, sqla_models) -> Starlette: """Create the Starlette app""" async_connection = connection_str.replace("postgresql://", "postgresql+asyncpg://") engine = create_async_engine(async_connection) # Convert sqla models to graphql schema gql_schema = sqla_models_to_graphql_schema(sqla_models) # Build the Starlette GraphQL Route graphql_route = get_graphql_route(gql_schema=gql_schema, engine=engine) # Build the Starlette GraphiQL Route and StaticFiles graphiql_route = get_graphiql_route() # Instantiate the Starlette app _app = Starlette( routes=[graphql_route, graphiql_route], exception_handlers={HTTPException: http_exception}, on_startup=[engine.connect], on_shutdown=[engine.dispose], ) return _app
def create_app(): fernet_key = fernet.Fernet.generate_key() SECRET_KEY = base64.urlsafe_b64decode(fernet_key) app = web.Application() # Инициализация БД engine = create_async_engine(DATABASE_URL, future=True, echo=True) app['DB ENGINE'] = engine app['DB SESSION'] = sessionmaker(engine, expire_on_commit=False, class_=AsyncSession) app['USERS'] = {} # Инициализируем пустым словарем app['ROOMS'] = { '1': [], '2': [], '3': [], } # Добавить комнаты ключом ROOMS setup(app, EncryptedCookieStorage(SECRET_KEY)) # Параметризацию для сокетов сюда - будут комнаты app.add_routes([ web.get('/ws/{room_id}', websocket_handler), web.post('/signin', sign_in), web.get('/rooms', room_list) ]) return app
async def async_main(config): engine = create_async_engine( CONNECTION_URI, echo=False, ) async with engine.connect() as conn: async for diff in watch_source_dest( conn, config.get('spec').get('mappings') ): print('- source {:70} ({:10} ~{:10} rows)'.format( diff.source_table, humanize.naturalsize( abs(diff.source_stats.table_bytes), False, True, "%.3f" ), int(diff.source_stats.row_estimate), )) print(' dest {:70} ({:10} ~{:10} rows)'.format( diff.destination_table, humanize.naturalsize( abs(diff.destination_stats.table_bytes), False, True, "%.3f" ), int(diff.destination_stats.row_estimate), )) print('') print('---------------------------------------------------------------')
async def on_startup(): engine = create_async_engine(config.POSTGRES_URI, echo=False) async with engine.begin() as conn: # await conn.run_sync(BaseModel.metadata.drop_all) await conn.run_sync(BaseModel.metadata.create_all)
def make_async_engine(url: str, **kwargs) -> AsyncEngine: """ Create and return an asyncio database engine from the DB_INFO environment variable. """ # pool parameters kwargs["pool_recycle"] = kwargs.get("pool_recycle", 200) kwargs["pool_timeout"] = kwargs.get("pool_timeout", 30) kwargs["pool_size"] = kwargs.get("pool_size", 5) kwargs["max_overflow"] = kwargs.get("max_overflow", 10) kwargs["pool_reset_on_return"] = "rollback" kwargs["pool_pre_ping"] = True # use orjson for faster json (de)serializing kwargs["json_serializer"] = orjson.dumps kwargs["json_deserializer"] = orjson.loads kwargs["execution_options"] = kwargs.get( "execution_options", {"schema_translate_map": { None: "public" }}) kwargs["future"] = True kwargs["echo"] = kwargs.get("echo", True) return create_async_engine(url, **kwargs)
def __init__(self, output_file: str): # toBrowse can contain GET and POST resources self.to_browse = [] # browsed contains only GET resources self.browsed_links = [] # forms contains only POST resources self.browsed_forms = [] self.uploads = [] self.headers = {} self.root_url = "" self.tag = "" self.array = None self.method = "" self.path = "" self.encoding = "" self.enctype = "application/x-www-form-urlencoded" self.referer = "" self.get_params = [] self.post_params = [] self.file_params = [] self.depth = 0 self.output_file = output_file self._must_create = not os.path.exists(self.output_file) self._engine = create_async_engine( f'sqlite+aiosqlite:///{self.output_file}')
async def start(self): if self._exists and not self._wrapped_engine: self._wrapped_engine = create_async_engine(self._db_uri, pool_pre_ping=True) async with self._wrapped_engine.begin() as conn: await conn.run_sync(self.meta.create_all)