async def setup_db(app: Application, args: Namespace): """ Подключение и отключение БД. До оператора yield выполняется инициализация подключения к БД, создание пула коннектов и т.д. То, что после оператора yield - закрытие всех соединений и освобождение ресурсов. :param app: экземпляр приложения :param args: аргументы командной строки """ app["db"] = PG() await app["db"].init(str(args.pg_url), min_size=args.pg_pool_min_size, max_size=args.pg_pool_max_size) await app["db"].fetchval("SELECT 1") log.info("Connected to database") try: yield finally: log.info("Disconnecting from database") await app["db"].pool.close() log.info("Disconnected from database")
async def partially_update_citizen(db: PG, import_id: int, citizen_id: int, updated_data: dict) -> dict: """ Частичное обновление жителя. :param db: объект для взаимодействия с БД :param import_id: идентификатор выгрузки :param citizen_id: идентификатор жителя :param updated_data: актуальные данные для обновления жителя :return: обновленное состояние жителя """ async with db.transaction() as conn: # Блокировка позволит избежать состояние гонки между конкурентными # запросами на изменение родственников await acquire_lock(conn=conn, import_id=import_id) citizen = await get_citizen(conn=conn, import_id=import_id, citizen_id=citizen_id) if not citizen: raise HTTPNotFound return await update_citizen(conn=conn, import_id=import_id, citizen=citizen, updated_data=updated_data)
async def migrated_postgres_conn( migrated_postgres: str) -> AsyncGenerator[PG, None]: pg = PG() await pg.init(dsn=migrated_postgres) try: yield pg finally: await pg.pool.close()
def __init__(self, *, driver="postgresql", host, login, pwd, db, loop_=None): sql_params = f"{driver}://{login}:{pwd}@{host}/{db}" if not loop_: loop_ = asyncio.get_event_loop() self.loop = loop_ self.pg = PG() self.loop.run_until_complete( self.pg.init(sql_params, min_size=5, max_size=100))
async def init_pg(app, pg_url): """ Init asyncpgsa driver (asyncpg + sqlalchemy) """ app['pg'] = PG() await app['pg'].init(pg_url) try: yield finally: await app['pg'].pool.close()
def get_citizens_cursor(db: PG, import_id: int) -> AsyncPGCursor: """ Возвращает курсор для асинхронного получения данных о жителях по определенной выгрузке. :param db: объект для взаимодействия с БД :param import_id: идентфикатор выгрузки :return: объект курсора """ query = CITIZENS_QUERY.where(citizens_table.c.import_id == import_id) return AsyncPGCursor(query=query, transaction_ctx=db.transaction())
async def setup_pg(app: Application) -> PG: log.info('Connecting to database: %s', DEFAULT_PG_URL) app['pg'] = PG() await app['pg'].init(str(DEFAULT_PG_URL), min_size=pg_pool_min_size, max_size=pg_pool_max_size) await app['pg'].fetchval('SELECT 1') log.info('Connected to database %s', DEFAULT_PG_URL) try: yield finally: log.info('Disconnecting from database %s', DEFAULT_PG_URL) await app['pg'].pool.close() log.info('Disconnected from database %s', DEFAULT_PG_URL)
async def setup_pg(app: Application, args: Namespace) -> PG: db_info = args.pg_url.with_password(CENSORED) log.info('Connecting to database: %s', db_info) app['pg'] = PG() await app['pg'].init(str(args.pg_url), min_size=args.pg_pool_min_size, max_size=args.pg_pool_max_size) await app['pg'].fetchval('SELECT 1') log.info('Connected to database %s', db_info) try: yield finally: log.info('Disconnecting from database %s', db_info) await app['pg'].pool.close() log.info('Disconnected from database %s', db_info)
async def setup_app_pg(app: Application, args: cfg) -> PG: db_info = args.DEFAULT_PG_URL log.info('Connecting to database: %s', db_info) app['pg'] = PG() await app['pg'].init(str(args.DEFAULT_PG_URL), min_size=args.PG_POOL_MIN_SIZE, max_size=args.PG_POOL_MAX_SIZE) await app['pg'].fetchval('SELECT 1') log.info('Connected to database %s', db_info) try: yield finally: log.info('Disconnecting from database %s', db_info) await app['pg'].pool.close() log.info('Disconnected from database %s', db_info)
async def create_import(db: PG, citizens: List[dict]) -> int: async with db.transaction() as conn: query = imports_table.insert().returning(imports_table.c.import_id) import_id = await conn.fetchval(query=query) citizen_rows = make_citizen_rows(import_id=import_id, citizens=citizens) relation_rows = make_relation_rows(import_id=import_id, citizens=citizens) chunked_citizen_rows = chunk_list(iterable=citizen_rows, size=MAX_CITIZENS_PER_INSERT) chunked_relation_rows = chunk_list(iterable=relation_rows, size=MAX_RELATIONS_PER_INSERT) query = citizens_table.insert() for chunk in chunked_citizen_rows: await conn.execute(query.values(list(chunk))) query = relations_table.insert() for chunk in chunked_relation_rows: await conn.execute(query.values(list(chunk))) return import_id
async def setup_pg(app) -> PG: db_url='postgresql://{}:{}@{}/devices'.format( os.environ['POSTGRESUSER'], os.environ['POSTGRESPASS'], os.environ['POSTGRESHOST']) logging.info('Connecting to POSTGRES database') app['pg'] = PG() await app['pg'].init( str(db_url), min_size=1, max_size=30 ) await app['pg'].fetchval('SELECT 1') logging.info('Connected to POSTGRES database') try: yield finally: logging.info('Disconnecting from POSTGRES database') await app['pg'].pool.close() logging.info('Disconnected from POSTGRES database')
async def init_pg(app, db_url): """ Init asyncpgsa driver (asyncpg + sqlalchemy) """ app['pg'] = PG() await app['pg'].init(db_url)
async def init_pg(app): app['pg'] = PG() await app['pg'].init('postgresql://*****:*****@0.0.0.0/staff')
async def setup_pg(args): pg = PG() await pg.init(str(args.DEFAULT_PG_URL), min_size=args.PG_POOL_MIN_SIZE, max_size=args.PG_POOL_MAX_SIZE) return pg
class DataBase: def __init__(self, *, driver="postgresql", host, login, pwd, db, loop_=None): sql_params = f"{driver}://{login}:{pwd}@{host}/{db}" if not loop_: loop_ = asyncio.get_event_loop() self.loop = loop_ self.pg = PG() self.loop.run_until_complete( self.pg.init(sql_params, min_size=5, max_size=100)) async def save_to_db(self, url, title, html_body, *, parent): print(f"Save url: {url}") async with self.pg.transaction() as conn: insert_query = insert(urls_table).values({ "url": str(url), "title": title, "html": await files.save_html(url, html_body), "parent": parent }) update_query = insert_query.on_conflict_do_update( constraint="urls_url_key", set_={ "title": title, "html": await self._update_html(url, html_body, conn), "parent": parent }) await conn.execute(update_query) async def get_from_db(self, *, parent, limit=10): async with self.pg.transaction() as conn: query = select([ urls_table.c.url, urls_table.c.title ]).where(urls_table.c.parent == parent).limit(limit) for res in await conn.fetch(query): print(f"{res[0]}: \"{res[1]}\"") @staticmethod async def _update_html(url: URL, html_file_body: str, connection): """ Удалять HTML-файлы, ссылки на которые удаляются из БД """ query = select([ urls_table.c.html, ]).where(urls_table.c.url == str(url)) old_html = await connection.fetchval(query) if old_html: files.del_html(old_html) print("Delete file: ", old_html) html_file_name = await files.save_html(url, html_file_body) return html_file_name