def _create_tables(self, schema): version = schema['versions'][-1] subpaths = version.get('subpaths', {}) resource_type = schema['type'] files = version.get('files', []) # Define main table main_table = sa.Table( chop_long_name(resource_type), self.metadata, sa.Column('id', sa.String(46), primary_key=True), sa.Column('revision', sa.String(46)), sa.Column('search', JSONB, nullable=False), sa.Column('data', JSONB, nullable=False), *(sa.Column('data_' + subpath, JSONB, nullable=True) for subpath in sorted(subpaths.keys()))) self.tables[resource_type] = main_table # Define gin index for EXACT searches self._add_index(chop_long_name('gin_idx_' + resource_type), main_table.name, main_table.c.search) # Define auxiliary tables and gin indexes for all nested lists. aux_table = sa.Table( chop_long_name(resource_type + '__aux'), self.metadata, sa.Column('id', sa.ForeignKey(main_table.c.id, ondelete='CASCADE'), index=True), sa.Column('data', JSONB, nullable=False), ) self.aux_tables[resource_type] = aux_table # Define files table if needed. if files: files_table = sa.Table( chop_long_name(resource_type + '__files'), self.metadata, sa.Column('id', sa.ForeignKey(main_table.c.id, ondelete='CASCADE'), index=True), sa.Column('subpath', sa.String(128), nullable=False), sa.Column('blob', sa.LargeBinary()), sa.UniqueConstraint( 'id', 'subpath', name=self._get_file_unique_idx_name(resource_type))) self.files_tables[resource_type] = files_table
async def accounts(request, role_ids=None): accountroles = metadata().tables['AccountRoles'] statement = sa.select([accountroles]) if role_ids is not None: statement = statement.where( accountroles.c.role_ids.contains( sa.cast(role_ids, postgresql.ARRAY(sa.String(32))))) async with request.app['engine'].acquire() as conn: async for row in conn.execute(statement): yield row
def metadata() -> sa.MetaData: result = sa.MetaData() sa.Table( 'AccountRolesLog', result, sa.Column('id', sa.Integer, index=True, nullable=False, primary_key=True), sa.Column('created_at', sa.DateTime, server_default=sa_functions.now(), index=True, nullable=False), sa.Column('created_by', sa.Unicode, index=True, nullable=False), sa.Column('request_info', sa.UnicodeText, nullable=None), sa.Column('account_id', sa.String, index=True, nullable=False), sa.Column('action', sa.String(1), index=True, nullable=False), sa.Column('role_ids', postgresql.ARRAY(sa.String(32)), nullable=False), sa.Index('idx_arl_role_ids', 'role_ids', postgresql_using='gin')) sa.Table( 'AccountRoles', result, sa.Column('account_id', sa.String, index=True, nullable=False, primary_key=True), sa.Column('role_ids', postgresql.ARRAY(sa.String(32)), nullable=False), sa.Column('log_id', sa.Integer, sa.ForeignKey('AccountRolesLog.id'), index=True, nullable=False, unique=True), sa.Index('idx_ar_role_ids', 'role_ids', postgresql_using='gin')) return result
def test_5(): metadata = sa.MetaData() tbl = sa.Table('sensor_values_t', metadata, sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('key', sa.String(50))) async def go(): async with aiopg.sa.create_engine(dsn) as engine: async with engine.acquire() as conn: await conn.execute("TRUNCATE TABLE foglamp.sensor_values_t;") for idx in range(0, ROWS_COUNT): await conn.execute( tbl.insert().values(key=id_generator(10))) loop = asyncio.get_event_loop() loop.run_until_complete(go())
def test_2(): metadata = sa.MetaData() tbl = sa.Table('sensor_values_t', metadata, sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('key', sa.String(50))) async def go(): async with aiopg.sa.create_engine(dsn) as engine: async with engine.acquire() as conn: await conn.execute(tbl.insert().values(key=id_generator(10))) # !F! print("DBG 1") #async for row in conn.execute(tbl.select().where(tbl.c.key=='9f2acad687df4ae7b7faeef8affdd0a9') ): async for row in conn.execute( tbl.select().where(tbl.c.key == '6b110c8ca02')): print(row.id, row.key) print("DBG 2") loop = asyncio.get_event_loop() loop.run_until_complete(go())
import aiopg.sa import sqlalchemy as sa from aiohttp.web import Application from .settings import config METADATA = sa.MetaData() image = sa.Table( "image", METADATA, sa.Column("id", sa.Integer(), primary_key=True), sa.Column("origin", sa.String(), nullable=False, unique=True), sa.Column("url_big", sa.String(), nullable=False), sa.Column("url_thumb", sa.String(), nullable=False), sa.Column( "indexed_at", sa.DateTime(), nullable=False, server_default=sa.text("(now() at time zone 'utc')"), ), ) image_color = sa.Table( "image_color", METADATA, sa.Column("id", sa.Integer(), primary_key=True), sa.Column("image_id", sa.Integer(), sa.ForeignKey("image.id"), nullable=False),
import aiopg.sa import sqlalchemy as sa from sanic_study.main import app meta = sa.MetaData() question = sa.Table( 'question', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_text', sa.String(200), nullable=False), sa.Column('pub_date', sa.Date, nullable=False), sa.PrimaryKeyConstraint('id', name='question_id_pkey') ) choice = sa.Table( 'choice', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_id', sa.Integer, nullable=False), sa.Column('choice_text', sa.String(200), nullable=False), sa.Column('votes', sa.Integer, server_default="0", nullable=False), sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(['question_id'], [question.c.id], name='choice_question_id_fkey', ondelete='CASCADE') ) async def init_pg(app):
DatabaseError, PostgresRetryPolicyUponOperation, init_pg_tables, is_pg_responsive, retry_pg_api, ) from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent metadata = sa.MetaData() tbl = sa.Table( "tbl", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("val", sa.String(255)), ) @pytest.fixture async def postgres_service_with_fake_data( request, loop, postgres_service: DataSourceName ) -> DataSourceName: async def _create_table(engine: aiopg.sa.Engine): async with engine.acquire() as conn: await conn.execute(f"DROP TABLE IF EXISTS {tbl.name}") await conn.execute( f"""CREATE TABLE {tbl.name} ( id serial PRIMARY KEY, val varchar(255))""" )
# import asyncio import aiopg.sa import sqlalchemy as sa meta = sa.MetaData() table_documents = sa.Table( 'documents', meta, sa.Column('id', sa.Integer, primary_key=True, unique=True, autoincrement=True), sa.Column('name', sa.String(200), nullable=True), sa.Column('created', sa.Date, nullable=True), sa.Column('updated', sa.Date, nullable=True)) async def init_pg(app): conf = app['config']['postgres'] engine = await aiopg.sa.create_engine( database=conf['database'], user=conf['user'], password=conf['password'], host=conf['host'], port=conf['port'], minsize=conf['minsize'], maxsize=conf['maxsize'], ) app['documents'] = engine
import asyncio import aiopg.sa import sqlalchemy as sa from aio_crud_store.aiopg_store import AiopgStore DB_URI = 'postgresql:///aio_crud_store' # create table metadata = sa.MetaData() metadata.bind = sa.create_engine(DB_URI) table = sa.Table('aiopg_store', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('foo', sa.String()), sa.Column('spam', sa.Integer()), ) metadata.drop_all() metadata.create_all() async def main(): engine = await aiopg.sa.create_engine(DB_URI) # initialize store store = AiopgStore(table, engine) # create id = await store.create({'foo': 'bar'}) print(id) # 1
from byn.datatypes import BcseData, ExternalRateData from byn.predict.predictor import PredictionRecord from byn.utils import ( EnumAwareEncoder, anext, atuple, ) logger = logging.getLogger(__name__) metadata = sa.MetaData() external_rate = sa.Table( 'external_rate', metadata, sa.Column('currency', sa.String(3), primary_key=True), sa.Column('timestamp', sa.Integer, primary_key=True), sa.Column('timestamp_close', sa.Integer), sa.Column('open', sa.DECIMAL(12, 6)), sa.Column('close', sa.DECIMAL(12, 6)), sa.Column('low', sa.DECIMAL(12, 6)), sa.Column('high', sa.DECIMAL(12, 6)), sa.Column('volume', sa.SMALLINT), ) external_rate_live = sa.Table( 'external_rate_live', metadata, sa.Column('currency', sa.String(3), primary_key=True), sa.Column('timestamp', sa.Integer, primary_key=True), sa.Column('volume', sa.SMALLINT, primary_key=True),
'users', 'projects', 'projects_staff', ) class Role(Enum): SENIOR = 'senior' ASSISTANT = 'assistant' metadata = sa.MetaData() users = sa.Table( 'users', metadata, sa.Column('id', sa.Integer(), primary_key=True), sa.Column('name', sa.String(255), unique=True), sa.Column('role', sa.Enum(Role), default=Role.SENIOR), sa.Column('password', sa.String(255), nullable=False), sa.Column('created_on', sa.DateTime(), default=datetime.now), sa.Column('updated_on', sa.DateTime(), default=datetime.now, onupdate=datetime.now), sa.Column('deleted_on', sa.DateTime(), nullable=True)) projects = sa.Table( 'projects', metadata, sa.Column('id', sa.Integer(), primary_key=True), sa.Column('name', sa.String(255), unique=True), sa.Column('user_id', sa.ForeignKey('users.id')), sa.Column('created_on', sa.DateTime(), default=datetime.now), sa.Column('updated_on',
import aiopg.sa import sqlalchemy as sa __all__ = ['users', 'karma'] meta = sa.MetaData() users = sa.Table( 'users', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('email', sa.String(256), nullable=False), sa.Column('password_hash', sa.String(512), nullable=False), sa.Column('registration_date', sa.Date, nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='users_id_pkey')) karma = sa.Table( 'karma', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('users_id', sa.Integer, nullable=False), sa.Column('karma', sa.Integer, server_default="0", nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='karma_id_pkey'), sa.ForeignKeyConstraint(['users_id'], [users.c.id], name='karma_users_id_fkey', ondelete='CASCADE'), )
from sanic import exceptions import aiopg.sa import sqlalchemy as sa from discode_server.utils import baseconv from discode_server.utils import highlight log = logging.getLogger(__file__) meta = sa.MetaData() paste = sa.Table( 'pastes', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('contents', sa.Text(), nullable=False), sa.Column('created_on', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('sha', sa.String(64), nullable=False), sa.Column('lexer', sa.String(60), nullable=True), sa.Column('lexer_guessed', sa.Boolean, default=False), ) comment = sa.Table( 'comments', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('paste_id', sa.Integer, sa.ForeignKey("pastes.id", ondelete="CASCADE"), nullable=False), sa.Column('line', sa.Integer, nullable=False), sa.Column('contents', sa.Text(), nullable=False), sa.Column('created_on', sa.DateTime, default=datetime.datetime.utcnow), )
import aiopg import aiopg.sa import sqlalchemy as sa from attrdict import AttrDict metadata = sa.MetaData() table_trade_history = sa.Table( 'trade_history', metadata, sa.Column('time', sa.Integer, primary_key=True), sa.Column('exchange', sa.String(255)), sa.Column('pair', sa.String(255)), sa.Column('bid', sa.Float), sa.Column('ask', sa.Float), sa.Column('bid_size', sa.Float), sa.Column('ask_size', sa.Float), ) async def configure(env: AttrDict): dsn = env.cfg.dsn env.db = await aiopg.sa.create_engine(dsn) return env
import aiopg.sa import sqlalchemy as sa __all__ = ['question', 'choice'] meta = sa.MetaData() question = sa.Table( "question", meta, sa.Column("id", sa.Integer, nullable=False), sa.Column("text", sa.String(200), nullable=False), sa.Column("pub_date", sa.Date, nullable=False), # Indexes sa.PrimaryKeyConstraint("id", name="question_id_pkey")) choice = sa.Table( "choice", meta, sa.Column("id", sa.Integer, nullable=False), sa.Column("question_id", sa.Integer, nullable=False), sa.Column("choice_text", sa.String(200), nullable=False), sa.Column("votes", sa.Integer, server_default="0", nullable=False), # Indexes sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(["question_id"], [question.c.id], name="choice_question_id_fkey", ondelete="CASCADE"))
import aiopg.sa import sqlalchemy as sa from chat.settings import config meta = sa.MetaData() chat_user = sa.Table( 'chat_user', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('username', sa.String(64), nullable=False), sa.Column('password', sa.String(200), nullable=False), sa.PrimaryKeyConstraint('id', name='chat_user_id_pkey') ) chat = sa.Table( 'chat', meta, sa.Column('id', sa.Integer, primary_key=True, autoincrement=True), sa.Column('name', sa.String(64), nullable=False, unique=True), sa.PrimaryKeyConstraint('id', name='chat_id_pkey') ) message = sa.Table( 'message', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('chat_id', sa.Integer, nullable=False), sa.Column('message', sa.String(256), nullable=False), sa.PrimaryKeyConstraint('id', name='message_id_pkey'),