def go(*, use_loop=True, **kwargs): pg_params.update(kwargs) if use_loop: engine = yield from sa.create_engine(loop=loop, **pg_params) else: engine = yield from sa.create_engine(**pg_params) return engine
async def get(self, request: Request, petition_id: str = None): if petition_id: async with create_engine(dsn) as engine: async with engine.acquire() as conn: result = await Petition.select(conn, petition_id) # User logged in if result: return sanic_json(result, 200) else: return sanic_json(None, 400) else: async with create_engine(dsn) as engine: async with engine.acquire() as conn: result = await Petition.select(conn) # User logged in if result: return sanic_json(result, 200) else: return sanic_json(None, 400)
def make_engine(self, use_loop=True): if use_loop: return (yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1', loop=self.loop)) else: return (yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1'))
def make_engine(self, use_loop=True, **kwargs): if use_loop: return (yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1', loop=self.loop, **kwargs)) else: return (yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1', **kwargs))
async def amain(): # init database import argparse import sys from aiopg.sa import create_engine import ywsd.settings parser = argparse.ArgumentParser(description="Yate Routing Engine") parser.add_argument("--config", type=str, help="Config file to use.", default="routing_engine.yaml") parser.add_argument("--stage2", help="Only setup tables for stage2 routing", action="store_true") parser.add_argument("--stage1", help="Only setup tables for stage1 routing", action="store_true") parser.add_argument("--regenerate", help="Drop tables if they already exist", action="store_true") args = parser.parse_args() settings = ywsd.settings.Settings(args.config) async with create_engine(**settings.DB_CONFIG) as engine: async with engine.acquire() as conn: if args.regenerate: await regenerate_database_objects(conn, args.stage2, args.stage1) else: await initialize_database(conn, args.stage2, args.stage1)
def configure(self): self.engine = yield from create_engine( self.config.get('SQLALCHEMY_DSN'), loop=self.loop ) with add_route_ctx(self, core, name_prefix='core') as add_route: add_route('GET', '/', 'index')
def go(): engine = yield from create_engine(user='******', database='test', host='127.0.0.1', password='******') yield from create_tables(engine)
async def upload_images(): async with create_engine(dsn) as engine: async with engine.acquire() as conn: comics = await models.list_comics( conn, (models.table_comic.c.cdn == ''), limit=1000 ) for comic in comics: image_url = comic['image'] try: image_data = await fetch_url(image_url, binary=True) assert image_data except Exception as e: logger.exception('download %s failed! \n%s', image_url, e) continue try: cdn_url = await upload(image_data, comic) except Exception as e: logger.exception('upload %s failed! \n%s', image_url, e) continue uuid = str(comic['uuid']) await models.update_comics( conn, (models.table_comic.c.uuid == uuid), cdn=cdn_url ) logger.info('download %s then upload to %s success!', image_url, cdn_url) await asyncio.sleep(60 * 1)
async def alert_ten(self): day_10 = datetime.date.today() - timedelta(days=10) data = [] count = 0 total = 0 async with create_engine(connection) as engine: async with engine.acquire() as conn: last_row = await (await conn.execute( self.tb_name.select().order_by( self.tb_name.select().columns['id'].desc()) )).fetchone() async for row in conn.execute(self.tb_name.select().where( self.tb_name.c.date > day_10)): date = str(row.date) time = str(row.time) product_stock = abs(row.raw_milk) total += product_stock result = {"date": date, "time": time, "Quantity": total} data.append(result) count += 1 calc = (total / count) * .1 if calc > last_row.stock: return json( {"alert": "product stock is less than 10%"}) return json({"total milk": "amazing"})
async def handle(request): async with create_engine(connection) as engine: async with engine.acquire() as conn: result = [] async for row in conn.execute(polls.select()): result.append({"question": row.question, "pub_date": row.pub_date}) return json({"polls": result})
def drop_db_engine(postgres_dsn: Dict) -> sa.engine.Engine: postgres_dsn_copy = postgres_dsn.copy( ) # make a copy to change these parameters postgres_dsn_copy["database"] = "postgres" dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format( **postgres_dsn_copy) return sa.create_engine(dsn, isolation_level="AUTOCOMMIT")
async def moz_parser(urls_q): async with aiohttp.ClientSession() as session: while urls_q.qsize() > 0: url = await urls_q.get() try: async with session.get(url) as response: html_code = await response.text() except Exception as e: print(type(e), e) await urls_q.put(url) continue dom_tree = await loop.run_in_executor(executor, html.fromstring, html_code) links = dom_tree.xpath('//h2/a') async with create_engine(**db_config) as engine: async with engine.acquire() as conn: for link in links: href = link.attrib['href'] name = link.text await conn.execute(Links.insert().values(name=name, url=href)) with open('results.txt', 'a', encoding='utf-8') as f: f.write(f'{href}\t{name}\n') print(f'SUCCESS | {url}')
def init(loop): adminMainController = AdminMainController() engine = yield from create_engine(**config['db']) with (yield from engine) as connection: adminUserController = AdminUserController( adminMainController, users, connection) redis = yield from aioredis.create_pool(('localhost', 6379)) storage = RedisStorage(redis) app = web.Application(middlewares=[session_middleware(storage)]) aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates')) app.router.add_route('GET', '/', main_page_view) app.router.add_route('GET', '/admin', adminMainController.get) app.router.add_route( 'GET', '/admin/{table}', adminMainController.get_table) app.router.add_route( 'POST', '/admin/{table}', adminMainController.create_record) app.router.add_static('/dist', 'dist') server = yield from loop.create_server(app.make_handler(), '127.0.0.1', 8080) return server
async def update_db(data): parsed = json.loads(data) print(parsed['text']) vote, coin = is_vote(parsed["text"]) if None in [vote, coin]: return print(f'New vote detected : {coin}') async with create_engine( user="******", database="deviant", host="127.0.0.1", password="******", port=5432, ) as engine: mgr = SQLModelManager.instance() mgr.database = engine user, created = await User.objects.get_or_create(id=int(parsed['id'])) if created: user.name = parsed["user"]["name"] user.screen_name = parsed["user"]["screen_name"] user.followers_count = parsed["user"]["followers_count"] user.statuses_count = parsed["user"]["statuses_count"] await user.save() new_record = Tweet() new_record.id = parsed["id"] new_record.text = parsed["text"] new_record.created_at = tdate_to_timestamp(parsed["created_at"]) new_record.user = user new_record.coin = coin.upper() await new_record.save(force_insert=True)
async def error_coro(queues): session_1 = aiobotocore.get_session(loop=asyncio.get_running_loop()) session_2 = aiobotocore.get_session(loop=asyncio.get_running_loop()) config = AioConfig(max_pool_connections=AIOBOTO_CONCURRENCY) async with aiohttp.ClientSession() as download_session, \ session_1.create_client('s3', config=config, **UPLOAD_CREDENTIALS_1) as upload_session_1, \ session_2.create_client('s3', config=config, **UPLOAD_CREDENTIALS_2) as upload_session_2, \ create_engine(**POSTGRESQL_CONNECT_DICT) as database_session: sessions = dict( upload=dict( session=upload_session_1, host=UPLOAD_CREDENTIALS_1['endpoint_url'], bucket=BUCKET_NAME_1, ), upload_2=dict( session=upload_session_2, host=UPLOAD_CREDENTIALS_2['endpoint_url'], bucket=BUCKET_NAME_2, ), download=download_session, database=database_session, ) await image_error_manager( queues=queues, sessions=sessions, )
async def list_comics(request): async with create_engine(dsn) as engine: async with engine.acquire() as conn: limit = 10 offset = 0 per_page = request.query.get('per_page', '8') page = request.query.get('page', '1') if per_page.isdigit() and int(per_page) > 0: if int(per_page) < config.MAX_LIMIT: limit = int(per_page) if page.isdigit() and int(page) > 0: if int(page) < config.MAX_OFFSET: offset = (int(page) - 1) * limit if offset > 0: offset += 1 where = [] site = request.query.get('site') if site: where.append(models.table_comic.c.site == site) random = request.query.get('random') if random: order_by = 'random()' else: order_by = models.table_comic.c.posted_at.desc() comics = await models.list_comics(conn, *where, limit=limit, offset=offset, order_by=order_by) return json_response(comics)
async def delete(self, request: Request, petition_id: str = None): headers = request.headers if not petition_id: return sanic_json("No id provided", 404) if not headers.get("X-User", None): return sanic_json("No user in headers", 401) if request.app.TOKEN_CACHE.get(headers.get("X-User"), None): user_id = headers.get("X-User") else: return sanic_json("No active user found", 401) try: async with create_engine(dsn) as engine: async with engine.acquire() as conn: await Petition.delete(conn, petition_id) return sanic_json("Petition deleted or not found", 200) except Exception: return sanic_json("Something went wrong", 500)
async def post(self, request: Request): headers = request.headers if not headers.get("X-User", None): return sanic_json("No user in headers", 401) if request.app.TOKEN_CACHE.get(headers.get("X-User"), None): user_id = headers.get("X-User") else: return sanic_json("No active user found", 401) try: args = self.post_schema.load(request.json) except ValidationError: return sanic_json("Incorrect arguments", 400) try: async with create_engine(dsn) as engine: async with engine.acquire() as conn: args["user_id"] = request.app.TOKEN_CACHE.get( headers.get("X-User")) result = await Petition.insert(conn, args) if result: return sanic_json(result, 200) else: return sanic_json("Something went wrong", 500) except IntegrityError: return sanic_json("Such petition already exists", 406)
async def go(): sql = 'SELECT generate_series(1, 5);' result = [] async with create_engine(host=self.host, user=self.user, database=self.database, password=self.password, loop=self.loop) as engine: async with engine.acquire() as conn: async with conn.begin() as tr: async with conn.execute(sql) as cursor: async for v in cursor: result.append(v) assert tr.is_active assert result == [(1,), (2, ), (3, ), (4, ), (5, )] assert cursor.closed assert not tr.is_active tr2 = await conn.begin() async with tr2: assert tr2.is_active async with conn.execute('SELECT 1;') as cursor: rec = await cursor.scalar() assert rec == 1 cursor.close() assert not tr2.is_active assert conn.closed
async def create_db(self): async with create_engine(**BASIC_DB_CONFIG) as engine: async with engine.acquire() as conn: conn.autocommit = True await conn.execute("DROP DATABASE IF EXISTS whreports") exists = await conn.execute( f"SELECT 1 FROM pg_database WHERE datname = '{self.db_name}'" ) if not exists.rowcount: role_exists = await conn.execute( "SELECT 1 FROM pg_roles WHERE rolname='admin'") if not role_exists.rowcount: await conn.execute( "CREATE ROLE admin WITH LOGIN ENCRYPTED PASSWORD 'admin';" ) await conn.execute( "CREATE DATABASE {} WITH OWNER = admin;".format( self.db_name)) # Automatic migration script_dir = os.path.dirname(__file__) LIQUIBASE_COMMAND = """ {} {} --driver={} --classpath={} --changeLogFile={} --url={} --username={} --password={} --logLevel=info {} """ liquibase_command = LIQUIBASE_COMMAND.format( 'sudo' if get_pg_host() == 'localhost' else '', os.path.join(script_dir, "./migrations/liquibase"), "org.postgresql.Driver", os.path.join( script_dir, "./migrations/jdbcdrivers/postgresql-42.2.5.jar"), os.path.join(script_dir, "./migrations/changelog.xml"), f"jdbc:postgresql://{get_pg_host()}/{self.db_name}", 'postgres', 'admin', "migrate") os.system(liquibase_command)
def create_engine(): """ Create new (synchronous) connection to DB. If test is True, then create engine with the testing DB. """ engine = sa.create_engine(config.postgres.dsn) return engine
async def list_comics(request): async with create_engine(dsn) as engine: async with engine.acquire() as conn: limit = 10 offset = 0 per_page = request.query.get('per_page', '8') page = request.query.get('page', '1') if per_page.isdigit() and int(per_page) > 0: if int(per_page) < config.MAX_LIMIT: limit = int(per_page) if page.isdigit() and int(page) > 0: if int(page) < config.MAX_OFFSET: offset = (int(page) - 1) * limit if offset > 0: offset += 1 where = [ ] site = request.query.get('site') if site: where.append(models.table_comic.c.site == site) random = request.query.get('random') if random: order_by = 'random()' else: order_by = models.table_comic.c.posted_at.desc() comics = await models.list_comics( conn, *where, limit=limit, offset=offset, order_by=order_by ) return json_response(comics)
async def update_project(project_id, user_id, date): async with create_engine(connection_url_ss_train) as engine: async with engine.acquire() as conn: query = projects.update().where( projects.c.id == project_id).values(user_id=user_id, date=date) await conn.execute(query)
def create_engine(self): engine = yield from create_engine(user=self.user, database=self.database, host=self.host, password=self.password, echo=True) return engine
async def test_conversation_insert_raw(timestamp, loop, db, dsn): async with create_engine(dsn, loop=loop) as engine: async with engine.acquire() as conn: async with conn.begin() as tr: conversation = dict( conv_id="x", creator="*****@*****.**", subject="testing", ref="testing", timestamp=timestamp, status="draft", ) await conn.execute(sa_conversations.insert().values(**conversation)) con_count = await conn.scalar(sa_conversations.count()) assert con_count == 1 data = None async for row in conn.execute(sa_conversations.select()): data = row assert data.conv_id == "x" assert data.creator == "*****@*****.**" assert data.subject == "testing" assert data.timestamp.isoformat() == timestamp.isoformat() assert data.status == "draft" await tr.rollback()
async def insert_user(login, password: str): password_hash = hashlib.md5(bytes(password, 'utf-8')).hexdigest() async with create_engine(connection_url_ss_train) as engine: async with engine.acquire() as conn: query = users.insert().values(login=login, password_hash=password_hash) await conn.execute(query)
async def prepare(self): async with create_engine(user="******", database="deviant", host="127.0.0.1", password="******", port=5432) as engine: SQLModelManager.instance().database = engine users = await User.objects.all() for u in users: tmp_dict = {} tmp_dict["Name"] = u.screen_name tmp_dict["Tweets"] = u.statuses_count tmp_dict["Followers"] = u.followers_count tmp_dict["id"] = str(u.id) tmp_dict["Follow"] = u.follower u.tweets = await Tweet.objects.filter(user=u.id) if True in [i.retweet for i in u.tweets]: tmp_dict["Retweet"] = True tmp = [i.coin for i in u.tweets if i.coin != ''] log.debug(f'tmp : {len(tmp)}') if len(tmp) == 1: tmp_dict["Coin"] = tmp[0].upper() self.datas["data"].append(tmp_dict)
async def go(urls: list): async with create_engine(user='******', database='webproject', host='127.0.0.1', password='******') as engine: await create_table_page(engine) await create_table_relation(engine) # tasks = [] # tasks2 = [] # # for url in urls: # task = asyncio.create_task(insert_to_table_page(url, engine)) # task2 = asyncio.create_task(insert_to_table_relation(engine)) # tasks.append(task) # tasks2.append(task2) tasks = [ asyncio.create_task(insert_to_table_page(url, engine)) for url in urls ] tasks2 = [ asyncio.create_task(insert_to_table_page(url, engine)) for url in urls ] await asyncio.gather(*tasks2) await asyncio.gather(*tasks) async with engine.acquire() as connect: async for row in connect.execute(page.select()): print(row.id, row.val)
async def upload_images(): async with create_engine(dsn) as engine: async with engine.acquire() as conn: comics = await models.list_comics( conn, (models.table_comic.c.cdn == ''), limit=1000 ) for comic in comics: image_url = comic['image'] if image_url.startswith('http://https://'): image_url = image_url[len('http://'):] try: image_data = await fetch_url(image_url, binary=True) assert image_data except Exception as e: logger.exception('download %s failed! \n%s', image_url, e) continue try: cdn_url = await upload(image_data, comic) except Exception as e: logger.exception('upload %s failed! \n%s', image_url, e) continue uuid = str(comic['uuid']) await models.update_comics( conn, (models.table_comic.c.uuid == uuid), cdn=cdn_url ) logger.info('download %s then upload to %s success!', image_url, cdn_url) await asyncio.sleep(60 * 1)
async def update_user(user_id, login, password): password_hash = hashlib.md5(bytes(password, 'utf-8')).hexdigest() async with create_engine(connection_url_ss_train) as engine: async with engine.acquire() as conn: query = users.update().where(users.c.id == user_id).values( login=login, password_hash=password_hash) await conn.execute(query)
async def main_coro(queues): session = aiobotocore.get_session(loop=asyncio.get_running_loop()) config = AioConfig(max_pool_connections=AIOBOTO_CONCURRENCY) async with aiohttp.ClientSession() as download_session,\ session.create_client('s3', config=config, **UPLOAD_CREDENTIALS_1) as upload_session_1, \ session.create_client('s3', config=config, **UPLOAD_CREDENTIALS_2) as upload_session_2, \ create_engine(**POSTGRESQL_CONNECT_DICT) as database_session: sessions = dict( upload=dict( session=upload_session_1, host=UPLOAD_CREDENTIALS_1['endpoint_url'], bucket=BUCKET_NAME_1, ), upload_2=dict( session=upload_session_2, host=UPLOAD_CREDENTIALS_2['endpoint_url'], bucket=BUCKET_NAME_2, ), download=download_session, database=database_session, ) await publish_msg_to_download_queue( queues=queues, sessions=sessions, page_size=AMOUNT_UPDATE_IMAGES, )
def connect(self, **kwargs): engine = yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1', loop=self.loop, **kwargs) with (yield from engine) as conn: try: yield from conn.execute(DropTable(tbl)) except psycopg2.ProgrammingError: pass try: yield from conn.execute(DropTable(tbl2)) except psycopg2.ProgrammingError: pass yield from conn.execute("DROP TYPE IF EXISTS simple_enum CASCADE;") yield from conn.execute("""CREATE TYPE simple_enum AS ENUM ('first', 'second');""") try: yield from conn.execute(CreateTable(tbl)) self.tbl = tbl self.has_hstore = True except psycopg2.ProgrammingError: yield from conn.execute(CreateTable(tbl2)) self.tbl = tbl2 self.has_hstore = False return engine
async def handle(request): ''' The Response object needs to be StreamResponse. ''' response = web.StreamResponse( status=200, reason='OK', headers={ 'Content-Type': 'text/plain', 'X-Accel-Buffering': 'no' }, ) response.enable_chunked_encoding() await response.prepare(request) async with create_engine(user=user_name, database=database_name, host=host_name) as engine: meta = sa.MetaData() meta.bind = engine async with engine.acquire() as conn: data = await conn.execute(query) while True: fetch_data = await data.fetchmany(1000) sum_data = str() for fetch in fetch_data: sum_data = sum_data + 'object_id:{} \n'.format(fetch[0]) await response.write(sum_data.encode()) await response.write_eof() await response.close() return response
async def update_invoice(invoice_id, project_id, description): async with create_engine(connection_url_ss_train) as engine: async with engine.acquire() as conn: query = invoices.update().where( invoices.c.id == invoice_id).values( project_id=project_id, description=description) await conn.execute(query)
async def moz_parser(urls_q): async with aiohttp.ClientSession() as session: async with sa.create_engine(dsn) as db_pool: while urls_q.qsize() > 0: url = await urls_q.get() print(f"Start processing {url}") try: async with session.get(url) as response: html_code = await response.text() # await asyncio.sleep(5) except Exception as e: print(type(e), e) await urls_q.put(url) continue dom_tree = await loop.run_in_executor(executor, html.fromstring, html_code) links = dom_tree.xpath("//h2/a") async with db_pool.acquire() as conn: # with locker: for link in links: data = {"href": link.attrib['href'], "name": link.text} await conn.execute(Link.insert().values(**data)) print(f"SUCCESS | {url}")
def middleware(request): if app.get('engine') is None: engine = yield from create_engine('dbname=default user=root', minsize=1, maxsize=8) app['engine'] = engine return (yield from handler(request))
async def get_user(from_user): if not hasattr(from_user, 'id'): return None telegram_id = from_user.id try: async with create_engine(dsn) as engine: async with engine.acquire() as conn: t_user = await find_user_by_telegram_id(telegram_id, conn) if t_user is None: insert = user.insert().values( telegram_id=telegram_id, username=from_user.username, first_name=from_user.first_name, language_code=from_user.language_code, is_bot=from_user.is_bot, ) await conn.scalar(insert) t_user = await find_user_by_telegram_id(telegram_id, conn) else: await conn.execute(user.update().where( user.c.id == t_user.id).values(login_date=func.now())) return t_user except Exception as message: logging.warning(message)
async def create_db(db_name): async with create_engine('user={user} ' 'host={host} ' 'password={password}'.format(**db)) as engine: async with engine.acquire() as connection: await connection.execute('CREATE DATABASE {}'.format(db_name)) await engine.wait_closed()
async def url_is_exists(url): async with create_engine(dsn) as engine: async with engine.acquire() as conn: comic = await get_comic( conn, table_comic.c.source == url ) if comic is not None: return True
async def go(): async with create_engine(host=self.host, user=self.user, database=self.database, password=self.password, loop=self.loop) as engine: async with engine.acquire() as conn: assert isinstance(conn, SAConnection) assert engine.closed
def setup(app): engine = yield from create_engine(user=DATABASE_USERNAME, database=DATABASE_NAME, host=DATABASE_HOST, password=DATABASE_PASSWORD) app['db_engine'] = engine app['db_declarative_base'] = Base metadata.bind = engine
def get_engine(): #print("Get engine") engine = yield from create_engine(user='******', database='http_framework_probe', host='127.0.0.1', password='') #print("returning engine") return engine
async def go(): async with create_engine(**settings.DATABASE) as engine: async with engine.acquire() as conn: await create_table(engine) await conn.execute(tbl.insert().values(val='abc')) async for row in conn.execute(tbl.select()): print(row.id, row.val)
def create_engine(cluster): return aiopg_sa.create_engine( user=ClusterControl.username, password=ClusterControl.password, host=cluster['Address'], port=cluster['Port'], dbname='dev', client_encoding='utf8', enable_hstore=False, )
def _get_pg(self): database = "test" self.pg_pool = yield from create_engine( database=database, user="******", password="******", host="localhost", loop=self.loop )
def connect(dsn): """ :param dsn: :type dsn: DSN :returns: aiopg.sa.Engine """ _create_tables(dsn) return asyncio.get_event_loop() \ .run_until_complete(create_engine(dsn))
def go(): engine = yield from create_engine(user='******', database='aiopg', host='127.0.0.1', password='******') yield from create_tables(engine) yield from fill_data(engine) yield from count(engine) yield from show_julia(engine) yield from ave_age(engine)
async def test_datastore_setup(loop, empty_db, dsn): async with create_engine(dsn, loop=loop, timeout=5) as engine: ds = PostgresDataStore() ds.engine = engine controller = Controller(ds) async with ds.connection() as conn: action = Action("*****@*****.**", None, Verbs.ADD) conv_id = await controller.act(action, subject="the subject") cds = ds.new_conv_ds(conv_id, conn) props = await cds.get_core_properties() assert props["subject"] == "the subject"
async def go(): sql = 'SELECT generate_series(1, 5);' result = [] async with create_engine(host=self.host, user=self.user, database=self.database, password=self.password, loop=self.loop) as engine: async with engine.acquire() as conn: async for value in conn.execute(sql): result.append(value) assert result == [(1,), (2, ), (3, ), (4, ), (5, )] assert conn.closed
async def go(): async with create_engine(user='******', database='aiopg', host='127.0.0.1', password='******') as engine: await create_table(engine) async with engine.acquire() as conn: await conn.execute(tbl.insert().values(val='abc')) async for row in conn.execute(tbl.select()): print(row.id, row.val)
async def go(): async with create_engine(host=self.host, user=self.user, database=self.database, password=self.password, loop=self.loop) as engine: async with engine.acquire() as conn: with pytest.raises(RuntimeError) as ctx: async with conn.begin() as tr: assert tr.is_active raise RuntimeError('boom') assert str(ctx.value) == 'boom' assert not tr.is_active assert conn.closed
async def wshandler(request): ws = web.WebSocketResponse() await ws.prepare(request) async with create_engine(user=db["USER"], database=db["NAME"], password=db["PASSWORD"]) as engine: async with engine.acquire() as conn: async for msg in ws: if msg.tp == web.MsgType.text: json_data = json.loads(msg.data) await conn.execute(circle.insert().values(**json_data)) elif msg.tp == web.MsgType.close: break return ws
async def get_comic(request): async with create_engine(dsn) as engine: async with engine.acquire() as conn: uuid = request.match_info['uuid'] if not validate_uuid(uuid): return json_response({}, status=404) where = (models.table_comic.c.uuid == uuid) comic = await models.get_comic(conn, where) if comic is not None: return json_response(comic) else: return json_response({}, status=404)
def go(): engine = yield from create_engine(user='******', database='aiopg', host='127.0.0.1', password='******') yield from create_table(engine) with (yield from engine) as conn: yield from conn.execute(tbl.insert().values(val='abc')) res = yield from conn.execute(tbl.select()) for row in res: print(row.id, row.val)
async def prepare_db(app, loop): """ Let's add some data """ async with create_engine(connection) as engine: async with engine.acquire() as conn: await conn.execute('DROP TABLE IF EXISTS sanic_polls') await conn.execute("""CREATE TABLE sanic_polls ( id serial primary key, question varchar(50), pub_date timestamp );""") for i in range(0, 100): await conn.execute( polls.insert().values(question=i, pub_date=datetime.datetime.now()) )
def init(loop): redis_pool = yield from create_pool(('localhost', 6379)) dbengine = yield from create_engine(user='******', password='******', database='aiohttp_security', host='127.0.0.1') app = web.Application(loop=loop) setup_session(app, RedisStorage(redis_pool)) setup_security(app, SessionIdentityPolicy(), DBAuthorizationPolicy(dbengine)) web_handlers = Web() yield from web_handlers.configure(app) handler = app.make_handler() srv = yield from loop.create_server(handler, '127.0.0.1', 8080) print("Server started at http://127.0.0.1:8080") return srv, app, handler
async def go(): async with create_engine(host=self.host, user=self.user, database=self.database, password=self.password, loop=self.loop) as engine: async with engine.acquire() as conn: async with conn.begin() as tr: # check that in context manager we do not execute # commit for second time. Two commits in row causes # InvalidRequestError exception await tr.commit() assert not tr.is_active tr2 = await conn.begin() async with tr2: assert tr2.is_active # check for double commit one more time await tr2.commit() assert not tr2.is_active assert conn.closed