def _create_tables(self, schema): version = schema['versions'][-1] subpaths = version.get('subpaths', {}) resource_type = schema['type'] files = version.get('files', []) # Define main table main_table = sa.Table( chop_long_name(resource_type), self.metadata, sa.Column('id', sa.String(46), primary_key=True), sa.Column('revision', sa.String(46)), sa.Column('search', JSONB, nullable=False), sa.Column('data', JSONB, nullable=False), *(sa.Column('data_' + subpath, JSONB, nullable=True) for subpath in sorted(subpaths.keys()))) self.tables[resource_type] = main_table # Define gin index for EXACT searches self._add_index(chop_long_name('gin_idx_' + resource_type), main_table.name, main_table.c.search) # Define auxiliary tables and gin indexes for all nested lists. aux_table = sa.Table( chop_long_name(resource_type + '__aux'), self.metadata, sa.Column('id', sa.ForeignKey(main_table.c.id, ondelete='CASCADE'), index=True), sa.Column('data', JSONB, nullable=False), ) self.aux_tables[resource_type] = aux_table # Define files table if needed. if files: files_table = sa.Table( chop_long_name(resource_type + '__files'), self.metadata, sa.Column('id', sa.ForeignKey(main_table.c.id, ondelete='CASCADE'), index=True), sa.Column('subpath', sa.String(128), nullable=False), sa.Column('blob', sa.LargeBinary()), sa.UniqueConstraint( 'id', 'subpath', name=self._get_file_unique_idx_name(resource_type))) self.files_tables[resource_type] = files_table
def metadata() -> sa.MetaData: result = sa.MetaData() sa.Table( 'AccountRolesLog', result, sa.Column('id', sa.Integer, index=True, nullable=False, primary_key=True), sa.Column('created_at', sa.DateTime, server_default=sa_functions.now(), index=True, nullable=False), sa.Column('created_by', sa.Unicode, index=True, nullable=False), sa.Column('request_info', sa.UnicodeText, nullable=None), sa.Column('account_id', sa.String, index=True, nullable=False), sa.Column('action', sa.String(1), index=True, nullable=False), sa.Column('role_ids', postgresql.ARRAY(sa.String(32)), nullable=False), sa.Index('idx_arl_role_ids', 'role_ids', postgresql_using='gin')) sa.Table( 'AccountRoles', result, sa.Column('account_id', sa.String, index=True, nullable=False, primary_key=True), sa.Column('role_ids', postgresql.ARRAY(sa.String(32)), nullable=False), sa.Column('log_id', sa.Integer, sa.ForeignKey('AccountRolesLog.id'), index=True, nullable=False, unique=True), sa.Index('idx_ar_role_ids', 'role_ids', postgresql_using='gin')) return result
async def get_db(dsn: str, **engine_kwargs) -> aiopg.sa.Engine: engine = sa.create_engine(dsn) meta = sa.MetaData() order_table = sa.Table('orders', meta, autoload=True, autoload_with=engine) trade_history_table = sa.Table('trade_history', meta, autoload=True, autoload_with=engine) order_pairs = sa.Table('order_pairs', meta, autoload=True, autoload_with=engine) engine.dispose() async_engine = await aiopg.sa.create_engine(dsn=dsn, **engine_kwargs) async_engine.meta = meta async_engine.tables = { 'orders': order_table, 'trade_history': trade_history_table, 'order_pairs': order_pairs, } return async_engine
def test_5(): metadata = sa.MetaData() tbl = sa.Table('sensor_values_t', metadata, sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('key', sa.String(50))) async def go(): async with aiopg.sa.create_engine(dsn) as engine: async with engine.acquire() as conn: await conn.execute("TRUNCATE TABLE foglamp.sensor_values_t;") for idx in range(0, ROWS_COUNT): await conn.execute( tbl.insert().values(key=id_generator(10))) loop = asyncio.get_event_loop() loop.run_until_complete(go())
def test_2(): metadata = sa.MetaData() tbl = sa.Table('sensor_values_t', metadata, sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('key', sa.String(50))) async def go(): async with aiopg.sa.create_engine(dsn) as engine: async with engine.acquire() as conn: await conn.execute(tbl.insert().values(key=id_generator(10))) # !F! print("DBG 1") #async for row in conn.execute(tbl.select().where(tbl.c.key=='9f2acad687df4ae7b7faeef8affdd0a9') ): async for row in conn.execute( tbl.select().where(tbl.c.key == '6b110c8ca02')): print(row.id, row.key) print("DBG 2") loop = asyncio.get_event_loop() loop.run_until_complete(go())
from aiohttp import web from servicelib.aiohttp.aiopg_utils import ( DatabaseError, PostgresRetryPolicyUponOperation, init_pg_tables, is_pg_responsive, retry_pg_api, ) from servicelib.common_aiopg_utils import DataSourceName, create_pg_engine current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent metadata = sa.MetaData() tbl = sa.Table( "tbl", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("val", sa.String(255)), ) @pytest.fixture async def postgres_service_with_fake_data( request, loop, postgres_service: DataSourceName ) -> DataSourceName: async def _create_table(engine: aiopg.sa.Engine): async with engine.acquire() as conn: await conn.execute(f"DROP TABLE IF EXISTS {tbl.name}") await conn.execute( f"""CREATE TABLE {tbl.name} ( id serial PRIMARY KEY, val varchar(255))"""
# import asyncio import aiopg.sa import sqlalchemy as sa meta = sa.MetaData() table_documents = sa.Table( 'documents', meta, sa.Column('id', sa.Integer, primary_key=True, unique=True, autoincrement=True), sa.Column('name', sa.String(200), nullable=True), sa.Column('created', sa.Date, nullable=True), sa.Column('updated', sa.Date, nullable=True)) async def init_pg(app): conf = app['config']['postgres'] engine = await aiopg.sa.create_engine( database=conf['database'], user=conf['user'], password=conf['password'], host=conf['host'], port=conf['port'], minsize=conf['minsize'], maxsize=conf['maxsize'], ) app['documents'] = engine
import aiopg.sa import sqlalchemy as sa async def init_pg(app): conf = app['config']['db'] engine = await aiopg.sa.create_engine(database=conf['database'], user=conf['user'], password=conf['password'], host=conf['host'], loop=app.loop) app['db'] = engine async def close_pg(app): app['db'].close() await app['db'].wait_closed() meta = sa.MetaData() note = sa.Table('notes', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('created_at', sa.Date, nullable=False), sa.Column('updated_at', sa.Date, nullable=False), sa.Column('content', sa.Text, nullable=False), sa.PrimaryKeyConstraint('id', name='question_id_pkey'))
import asyncio import aiopg.sa import sqlalchemy as sa from aio_crud_store.aiopg_store import AiopgStore DB_URI = 'postgresql:///aio_crud_store' # create table metadata = sa.MetaData() metadata.bind = sa.create_engine(DB_URI) table = sa.Table('aiopg_store', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('foo', sa.String()), sa.Column('spam', sa.Integer()), ) metadata.drop_all() metadata.create_all() async def main(): engine = await aiopg.sa.create_engine(DB_URI) # initialize store store = AiopgStore(table, engine) # create id = await store.create({'foo': 'bar'}) print(id) # 1
) class Role(Enum): SENIOR = 'senior' ASSISTANT = 'assistant' metadata = sa.MetaData() users = sa.Table( 'users', metadata, sa.Column('id', sa.Integer(), primary_key=True), sa.Column('name', sa.String(255), unique=True), sa.Column('role', sa.Enum(Role), default=Role.SENIOR), sa.Column('password', sa.String(255), nullable=False), sa.Column('created_on', sa.DateTime(), default=datetime.now), sa.Column('updated_on', sa.DateTime(), default=datetime.now, onupdate=datetime.now), sa.Column('deleted_on', sa.DateTime(), nullable=True)) projects = sa.Table( 'projects', metadata, sa.Column('id', sa.Integer(), primary_key=True), sa.Column('name', sa.String(255), unique=True), sa.Column('user_id', sa.ForeignKey('users.id')), sa.Column('created_on', sa.DateTime(), default=datetime.now), sa.Column('updated_on', sa.DateTime(), default=datetime.now, onupdate=datetime.now),
from byn.predict.predictor import PredictionRecord from byn.utils import ( EnumAwareEncoder, anext, atuple, ) logger = logging.getLogger(__name__) metadata = sa.MetaData() external_rate = sa.Table( 'external_rate', metadata, sa.Column('currency', sa.String(3), primary_key=True), sa.Column('timestamp', sa.Integer, primary_key=True), sa.Column('timestamp_close', sa.Integer), sa.Column('open', sa.DECIMAL(12, 6)), sa.Column('close', sa.DECIMAL(12, 6)), sa.Column('low', sa.DECIMAL(12, 6)), sa.Column('high', sa.DECIMAL(12, 6)), sa.Column('volume', sa.SMALLINT), ) external_rate_live = sa.Table( 'external_rate_live', metadata, sa.Column('currency', sa.String(3), primary_key=True), sa.Column('timestamp', sa.Integer, primary_key=True), sa.Column('volume', sa.SMALLINT, primary_key=True), sa.Column('timestamp_received', sa.INTEGER), sa.Column('rate', sa.DECIMAL(12, 6)), )
import aiopg.sa import sqlalchemy as sa from chat.settings import config meta = sa.MetaData() chat_user = sa.Table( 'chat_user', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('username', sa.String(64), nullable=False), sa.Column('password', sa.String(200), nullable=False), sa.PrimaryKeyConstraint('id', name='chat_user_id_pkey') ) chat = sa.Table( 'chat', meta, sa.Column('id', sa.Integer, primary_key=True, autoincrement=True), sa.Column('name', sa.String(64), nullable=False, unique=True), sa.PrimaryKeyConstraint('id', name='chat_id_pkey') ) message = sa.Table( 'message', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('chat_id', sa.Integer, nullable=False), sa.Column('message', sa.String(256), nullable=False), sa.PrimaryKeyConstraint('id', name='message_id_pkey'),
def init_tables(metadata): tables = {} tables["batch_checkpoints"] = sa.Table( "batch_checkpoints", metadata, sa.Column("job_name", sa.String, primary_key=True), sa.Column("time", sa.DateTime(timezone=True), primary_key=True), sa.Column("checkpoint", sa.String, nullable=False), ) tables["realtime_raw"] = sa.Table( "realtime_raw", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("feed_id", sa.String, primary_key=True), sa.Column("time", sa.DateTime(timezone=True), primary_key=True), sa.Column("json", postgresql.JSONB, nullable=False), sa.Column("raw", sa.LargeBinary, nullable=False), sa.Column("update_time", sa.DateTime(timezone=True), nullable=False, index=True), ) tables["agency"] = sa.Table( "agency", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("agency_id", sa.String, primary_key=True), sa.Column("agency_name", sa.String, nullable=False), sa.Column("agency_url", sa.String, nullable=False), sa.Column("agency_timezone", sa.String, nullable=False), sa.Column("agency_lang", sa.String), sa.Column("agency_phone", sa.String), ) tables["stops"] = sa.Table( "stops", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("stop_id", sa.String, primary_key=True), sa.Column("stop_code", sa.String), sa.Column("stop_name", sa.String), sa.Column("stop_desc", sa.String), sa.Column("stop_loc", ga.Geometry("POINT")), sa.Column("zone_id", sa.String), sa.Column("stop_url", sa.String), sa.Column("location_type", sa.Enum(gtfs.LocationType)), sa.Column("parent_station", sa.String), ) tables["routes"] = sa.Table( "routes", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("route_id", sa.String, primary_key=True), sa.Column("agency_id", sa.String), sa.Column("route_short_name", sa.String), sa.Column("route_long_name", sa.String), sa.Column("route_desc", sa.String), sa.Column("route_type", sa.Integer), sa.Column("route_url", sa.String), sa.Column("route_color", sa.String), sa.Column("route_text_color", sa.String), ) tables["trips"] = sa.Table( "trips", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("trip_id", sa.String, primary_key=True), sa.Column("route_id", sa.String, nullable=False), sa.Column("service_id", sa.String, nullable=False), sa.Column("trip_headsign", sa.String), sa.Column("direction_id", sa.Integer), sa.Column("block_id", sa.String), sa.Column("shape_id", sa.String), sa.ForeignKeyConstraint(["system", "route_id"], ["routes.system", "routes.route_id"]), # When we import shape_id, add ForeignKeyConstraint here ) # Table for MTA trip_id lookups in realtime data. Realtime trip_ids are a # substring of the full trip_id. It looks like it is based on origin time, # route, and direction, and it is unique for Weekday/Saturday/Sunday. tables["mta_trip_id"] = sa.Table( "mta_trip_id", metadata, # (system, alternate_trip_id, service_day) is not unique and cannot # be the primary key sa.Column("system", sa.String, primary_key=True), sa.Column("alternate_trip_id", sa.String, primary_key=True), sa.Column("service_day", sa.Enum(nyc.ServiceDay), primary_key=True), sa.Column("trip_id", sa.String, primary_key=True), sa.Index("idx_lookup_mta_trip_id", "system", "alternate_trip_id", "service_day"), sa.ForeignKeyConstraint(["system", "trip_id"], ["trips.system", "trips.trip_id"]), ) # Scheduled stops tables["stop_times"] = sa.Table( "stop_times", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("trip_id", sa.String, primary_key=True), sa.Column("stop_sequence", sa.Integer, primary_key=True), sa.Column("stop_id", sa.String, nullable=False), # We cannot store these as Time, because these can have a value # >= 24:00:00 for trips starting/ending after midnight. # Instead, store as an interval. # For GTFS, times are measured relatively to "noon - 12h" which is # midnight except for days with daylight savings time changes. sa.Column("arrival_time", sa.Interval), sa.Column("departure_time", sa.Interval), sa.Column("stop_headsign", sa.String), sa.Column("pickup_type", sa.Integer), sa.Column("drop_off_type", sa.Integer), sa.Column("shape_dist_traveled", sa.Float), sa.ForeignKeyConstraint(["system", "trip_id"], ["trips.system", "trips.trip_id"]), sa.ForeignKeyConstraint(["system", "stop_id"], ["stops.system", "stops.stop_id"]), ) tables["trip_paths"] = sa.Table( "trip_paths", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("shape_id", sa.String, primary_key=True), sa.Column("routes", sa.ARRAY(sa.String)), sa.Column("shape", ga.Geometry("LINESTRING")), ) # Actual stops, based on realtime data. Can be past or future. tables["realtime_stop_times"] = sa.Table( "realtime_stop_times", metadata, sa.Column("system", sa.String, nullable=False), sa.Column("route_id", sa.String, nullable=False), sa.Column("stop_id", sa.String, nullable=False), sa.Column("start_date", sa.Date, nullable=False), sa.Column("trip_id", sa.String, nullable=False), sa.Column("arrival", sa.DateTime(timezone=True), index=True), sa.Column("departure", sa.DateTime(timezone=True), index=True), sa.Column("update_time", sa.DateTime(timezone=True), nullable=False), sa.CheckConstraint("arrival IS NOT NULL OR departure IS NOT NULL"), # Index for lookups. Not primary key because we might need to add seq # in the future to support trips that reuse stops. sa.Index( "idx_lookup_realtime_stop_times", "system", "route_id", "stop_id", "start_date", "trip_id", unique=True, ), sa.Index( "ix_realtime_stop_times_lookup_trip", "system", "route_id", "start_date", "trip_id", ), sa.ForeignKeyConstraint( ["system", "route_id"], [tables["routes"].c.system, tables["routes"].c.route_id], ), sa.ForeignKeyConstraint( ["system", "stop_id"], [tables["stops"].c.system, tables["stops"].c.stop_id], ), ) tables["realtime_vehicle_positions"] = sa.Table( "realtime_vehicle_positions", metadata, sa.Column("system", sa.String, nullable=False), sa.Column("route_id", sa.String, nullable=False), sa.Column("stop_id", sa.String, nullable=False), sa.Column("start_date", sa.Date, nullable=False), sa.Column("trip_id", sa.String, nullable=False), sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), sa.Column("status", sa.Enum(gtfs.VehicleStopStatus), nullable=False), sa.Column("update_time", sa.DateTime(timezone=True), nullable=False), # Index for unique vehicle positions. Not primary key because we # might need to add seq in the future to support trips that reuse stops. sa.Index( "idx_unique_realtime_vehicle_positions", "system", "route_id", "stop_id", "start_date", "trip_id", "timestamp", unique=True, ), sa.ForeignKeyConstraint( ["system", "route_id"], [tables["routes"].c.system, tables["routes"].c.route_id], ), sa.ForeignKeyConstraint( ["system", "stop_id"], [tables["stops"].c.system, tables["stops"].c.stop_id], ), ) # Temporary table for processing realtime data tables["realtime_raw_stop_times"] = sa.Table( "realtime_raw_stop_times", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("route_id", sa.String, primary_key=True), sa.Column("start_date", sa.Date, primary_key=True), sa.Column("trip_id", sa.String, primary_key=True), sa.Column("train_id", sa.String, primary_key=True), sa.Column("time", sa.DateTime(timezone=True), primary_key=True), sa.Column("stop_times", postgresql.JSONB, nullable=False), sa.Column("update_time", sa.DateTime(timezone=True), nullable=False, index=True), ) # Actual stops, based on realtime data. Can be past or future. tables["realtime_stop_times2"] = sa.Table( "realtime_stop_times2", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("route_id", sa.String, primary_key=True), sa.Column("start_date", sa.Date, primary_key=True), sa.Column("trip_id", sa.String, primary_key=True), sa.Column("train_id", sa.String, primary_key=True), sa.Column("stop_id", sa.String, primary_key=True), sa.Column("arrival", sa.DateTime(timezone=True)), sa.Column("departure", sa.DateTime(timezone=True)), sa.Column( "departure_or_arrival", sa.DateTime(timezone=True), nullable=False, index=True, ), sa.Column("time", sa.DateTime(timezone=True), nullable=False), sa.CheckConstraint("arrival IS NOT NULL OR departure IS NOT NULL"), # Index for lookups. Not primary key because we might need to add seq # in the future to support trips that reuse stops. sa.Index( "ix_realtime_stop_times2__lookup_stop", "system", "stop_id", "departure_or_arrival", ), sa.ForeignKeyConstraint( ["system", "route_id"], [tables["routes"].c.system, tables["routes"].c.route_id], ), sa.ForeignKeyConstraint( ["system", "stop_id"], [tables["stops"].c.system, tables["stops"].c.stop_id], ), ) tables["nyc_subway_stations"] = sa.Table( "nyc_subway_stations", metadata, sa.Column("objectid", sa.String, primary_key=True), sa.Column("name", sa.String, nullable=False), sa.Column("notes", sa.String, nullable=False), sa.Column("lines", sa.ARRAY(sa.String), nullable=False), sa.Column("loc", ga.Geometry("POINT"), nullable=False), ) tables["nyc_subway_lines"] = sa.Table( "nyc_subway_lines", metadata, sa.Column("objectid", sa.String, primary_key=True), sa.Column("lines", sa.ARRAY(sa.String), nullable=False), sa.Column("shape_len", sa.Float, nullable=False), sa.Column("path", ga.Geometry("LINESTRING"), nullable=False), ) # Stops that we render on a map. This does not need to be all stops in # the stops table. tables["map_stops"] = sa.Table( "map_stops", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("stop_id", sa.String, primary_key=True), # This does not need to be the same locatiotn as stops.stop_loc, # e.g. if we are glueing different datasets together sa.Column("loc", ga.Geometry("POINT"), nullable=False), sa.ForeignKeyConstraint( ["system", "stop_id"], [tables["stops"].c.system, tables["stops"].c.stop_id], ), ) tables["map_nodes"] = sa.Table( "map_nodes", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("id", sa.Integer, primary_key=True), sa.Column("edge_ids", sa.ARRAY(sa.Integer), nullable=False), sa.Column("loc", ga.Geometry("POINT"), nullable=False), sa.Column("stop_ids", sa.ARRAY(sa.String)), ) tables["map_edges"] = sa.Table( "map_edges", metadata, sa.Column("system", sa.String, primary_key=True), sa.Column("id", sa.Integer, primary_key=True), sa.Column("node_id1", sa.Integer, nullable=False), sa.Column("node_id2", sa.Integer, nullable=False), sa.Column("routes", sa.ARRAY(sa.String), nullable=False), sa.Column("path", ga.Geometry("LINESTRING"), nullable=False), ) return tables
import aiopg.sa import sqlalchemy as sa metadata = sa.MetaData() clients = sa.Table( 'clients', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('login', sa.String) ) wallets = sa.Table( 'wallets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('client_id', sa.Integer), sa.Column('currency', sa.String), sa.Column('amount', sa.DECIMAL) ) history = sa.Table( 'history', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('client_id', sa.Integer), sa.Column('amount', sa.DECIMAL) ) async def get_client(db: aiopg.sa.SAConnection, client_id: int):
from sanic import exceptions import aiopg.sa import sqlalchemy as sa from discode_server.utils import baseconv from discode_server.utils import highlight log = logging.getLogger(__file__) meta = sa.MetaData() paste = sa.Table( 'pastes', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('contents', sa.Text(), nullable=False), sa.Column('created_on', sa.DateTime, default=datetime.datetime.utcnow), sa.Column('sha', sa.String(64), nullable=False), sa.Column('lexer', sa.String(60), nullable=True), sa.Column('lexer_guessed', sa.Boolean, default=False), ) comment = sa.Table( 'comments', meta, sa.Column('id', sa.Integer, primary_key=True), sa.Column('paste_id', sa.Integer, sa.ForeignKey("pastes.id", ondelete="CASCADE"), nullable=False), sa.Column('line', sa.Integer, nullable=False), sa.Column('contents', sa.Text(), nullable=False), sa.Column('created_on', sa.DateTime, default=datetime.datetime.utcnow), )
import aiopg import aiopg.sa import sqlalchemy as sa from attrdict import AttrDict metadata = sa.MetaData() table_trade_history = sa.Table( 'trade_history', metadata, sa.Column('time', sa.Integer, primary_key=True), sa.Column('exchange', sa.String(255)), sa.Column('pair', sa.String(255)), sa.Column('bid', sa.Float), sa.Column('ask', sa.Float), sa.Column('bid_size', sa.Float), sa.Column('ask_size', sa.Float), ) async def configure(env: AttrDict): dsn = env.cfg.dsn env.db = await aiopg.sa.create_engine(dsn) return env
import aiopg.sa import sqlalchemy as sa __all__ = ['question', 'choice'] meta = sa.MetaData() question = sa.Table( "question", meta, sa.Column("id", sa.Integer, nullable=False), sa.Column("text", sa.String(200), nullable=False), sa.Column("pub_date", sa.Date, nullable=False), # Indexes sa.PrimaryKeyConstraint("id", name="question_id_pkey")) choice = sa.Table( "choice", meta, sa.Column("id", sa.Integer, nullable=False), sa.Column("question_id", sa.Integer, nullable=False), sa.Column("choice_text", sa.String(200), nullable=False), sa.Column("votes", sa.Integer, server_default="0", nullable=False), # Indexes sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(["question_id"], [question.c.id], name="choice_question_id_fkey", ondelete="CASCADE"))
import aiopg.sa import sqlalchemy as sa from sanic_study.main import app meta = sa.MetaData() question = sa.Table( 'question', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_text', sa.String(200), nullable=False), sa.Column('pub_date', sa.Date, nullable=False), sa.PrimaryKeyConstraint('id', name='question_id_pkey') ) choice = sa.Table( 'choice', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_id', sa.Integer, nullable=False), sa.Column('choice_text', sa.String(200), nullable=False), sa.Column('votes', sa.Integer, server_default="0", nullable=False), sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(['question_id'], [question.c.id], name='choice_question_id_fkey', ondelete='CASCADE') ) async def init_pg(app):
class DrawSourceResource(enum.Enum): empty = 'empty' low = 'low' half = 'half' full = 'full' user_draw_source_relationship = sa.Table( 'users_draw_sources', meta, sa.Column('user_id', UUID(as_uuid=True), sa.ForeignKey('users.id'), primary_key=True), sa.Column('draw_source_id', UUID(as_uuid=True), sa.ForeignKey('draw_sources.id'), primary_key=True), sa.Column('resource', sa.Enum(DrawSourceResource, name='draw_source_resource'), nullable=True, default=DrawSourceResource.full), sa.Column('quantity', sa.Integer, nullable=False, default=1), ) udsr = user_draw_source_relationship user = sa.Table( 'users', meta, sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid4),
import aiopg.sa import sqlalchemy as sa from aiohttp.web import Application from .settings import config METADATA = sa.MetaData() image = sa.Table( "image", METADATA, sa.Column("id", sa.Integer(), primary_key=True), sa.Column("origin", sa.String(), nullable=False, unique=True), sa.Column("url_big", sa.String(), nullable=False), sa.Column("url_thumb", sa.String(), nullable=False), sa.Column( "indexed_at", sa.DateTime(), nullable=False, server_default=sa.text("(now() at time zone 'utc')"), ), ) image_color = sa.Table( "image_color", METADATA, sa.Column("id", sa.Integer(), primary_key=True), sa.Column("image_id", sa.Integer(), sa.ForeignKey("image.id"), nullable=False),
import aiopg.sa import sqlalchemy as sa __all__ = ['users', 'karma'] meta = sa.MetaData() users = sa.Table( 'users', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('email', sa.String(256), nullable=False), sa.Column('password_hash', sa.String(512), nullable=False), sa.Column('registration_date', sa.Date, nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='users_id_pkey')) karma = sa.Table( 'karma', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('users_id', sa.Integer, nullable=False), sa.Column('karma', sa.Integer, server_default="0", nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='karma_id_pkey'), sa.ForeignKeyConstraint(['users_id'], [users.c.id], name='karma_users_id_fkey', ondelete='CASCADE'), )