def test_get_spatial_dialect(self): spatial_dialect = DialectManager.get_spatial_dialect(PGDialect_psycopg2()) ok_(isinstance(spatial_dialect, PGSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(MySQLDialect()), MySQLSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(SQLiteDialect()), SQLiteSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(OracleDialect()), OracleSpatialDialect)) ok_(isinstance(DialectManager.get_spatial_dialect(MSDialect()), MSSpatialDialect)) spatial_dialect2 = DialectManager.get_spatial_dialect(PGDialect_psycopg2()) ok_(spatial_dialect is spatial_dialect2, "only one instance per dialect should be created")
def get_dialect(json_serializer=json.dumps, json_deserializer=lambda x: x): dialect = PGDialect_psycopg2(json_serializer=json_serializer, json_deserializer=json_deserializer) dialect.statement_compiler = APGCompiler_psycopg2 dialect.implicit_returning = True dialect.supports_native_enum = True dialect.supports_smallserial = True # 9.2+ dialect._backslash_escapes = False dialect.supports_sane_multi_rowcount = True # psycopg 2.0.9+ dialect._has_native_hstore = True return dialect
def adapt_composite(value): adapted = [ adapt( getattr(value, column.name) if not isinstance(column.type, TypeDecorator) else column.type. process_bind_param(getattr(value, column.name ), PGDialect_psycopg2())) for column in composite.columns ] for value in adapted: if hasattr(value, 'prepare'): value.prepare(dbapi_connection) values = [ value.getquoted().decode(dbapi_connection.encoding) if six.PY3 else value.getquoted() for value in adapted ] return AsIs("(%s)::%s" % (', '.join(values), composite.name))
class SAConn: _pool = None _dialect = PGDialect_psycopg2( json_serializer=json.dumps, json_deserializer=lambda x: x, implicit_returning=True, supports_native_enum=True, supports_smallserial=True, supports_sane_multi_rowcount=True, ) @classmethod async def init_db_connect(cls, loop): config = read_and_validate('configs/db.yaml', TRAFARET) cls._pool = await create_pool(loop=loop, **config['postgres']) @classmethod def compile_query(cls, query): return str( query.compile(dialect=cls._dialect, compile_kwargs={"literal_binds": True})) @classmethod async def execute(cls, query): async with cls._pool.acquire() as connection: async with connection.transaction(): return await connection.execute(cls.compile_query(query)) @classmethod async def prepare(cls, query): async with cls._pool.acquire() as connection: async with connection.transaction(): stmt = await connection.prepare(cls.compile_query(query)) async for record in stmt.cursor(): yield record @classmethod async def fetchrow(cls, query): async with cls._pool.acquire() as connection: async with connection.transaction(): return await connection.fetchrow(cls.compile_query(query)) @classmethod async def close(cls): await cls._pool.close()
import asyncio import json import aiopg from .connection import SAConnection from .exc import InvalidRequestError from aiopg.connection import TIMEOUT try: from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 except ImportError: # pragma: no cover raise ImportError('aiopg.sa requires sqlalchemy') _dialect = PGDialect_psycopg2(json_serializer=json.dumps, json_deserializer=lambda x: x) _dialect.implicit_returning = True _dialect.supports_native_enum = True _dialect.supports_smallserial = True # 9.2+ _dialect._backslash_escapes = False _dialect.supports_sane_multi_rowcount = True # psycopg 2.0.9+ _dialect._has_native_hstore = True @asyncio.coroutine def create_engine(dsn=None, *, minsize=10, maxsize=10, loop=None, dialect=_dialect, timeout=TIMEOUT,
def __init__(self, *args, **kwargs): PGDialect_psycopg2.__init__(self, *args, **kwargs) self.supports_isolation_level = False
def setUp(self): self.pg_sql_dialect = PGDialect_psycopg2() self.sqlite_dialect = SQLiteDialect_pysqlite()