def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('time_entries', 'duration', existing_type=postgresql.INTERVAL(), nullable=False) op.drop_column('time_entries', 'amount')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "vote_ban", sa.Column("old_tags", sa.VARCHAR(), autoincrement=False, nullable=True), ) op.alter_column( "sticker", "sticker_set_name", existing_type=sa.VARCHAR(), nullable=True ) op.alter_column( "sticker", "original_emojis", existing_type=sa.VARCHAR(), nullable=True ) op.alter_column( "inline_query_request", "offset", existing_type=sa.VARCHAR(), nullable=True ) op.alter_column( "inline_query_request", "next_offset", existing_type=sa.VARCHAR(), nullable=True ) op.alter_column( "inline_query_request", "duration", existing_type=postgresql.INTERVAL(), nullable=True, ) op.add_column( "inline_query", sa.Column("bot", sa.VARCHAR(), autoincrement=False, nullable=True), ) op.alter_column("inline_query", "query", existing_type=sa.VARCHAR(), nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'USER', type_='unique') op.alter_column('USER', 'password', existing_type=sa.VARCHAR(length=60), nullable=True) op.drop_constraint(None, 'TOUR', type_='unique') op.alter_column('TOUR', 'upload_time', existing_type=postgresql.TIMESTAMP(), nullable=False, existing_server_default=sa.text('now()')) op.alter_column('TOUR', 'title', existing_type=sa.VARCHAR(length=32), nullable=False) op.alter_column('TOUR', 'rating', existing_type=sa.REAL(), nullable=False) op.alter_column('TOUR', 'price', existing_type=sa.REAL(), nullable=False) op.alter_column('TOUR', 'guide_id', existing_type=sa.INTEGER(), nullable=False) op.alter_column('TOUR', 'duration', existing_type=postgresql.INTERVAL(), nullable=False) op.create_index('LANGUAGE_pkey', 'LANGUAGE', ['id'], unique=True) op.drop_table('TOUR_DATE')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'metric', sa.Column('id', sa.Integer(), nullable=False), sa.Column('job_id', sa.Integer(), nullable=False), sa.Column('type', sa.Enum('ed2nav', 'fusio2ed', 'gtfs2ed', 'osm2ed', 'geopal2ed', 'synonym2ed', 'poi2ed', name='metric_type'), nullable=False), sa.Column('dataset_id', sa.Integer(), nullable=True), sa.Column('duration', postgresql.INTERVAL(), nullable=True), sa.ForeignKeyConstraint( ['dataset_id'], ['data_set.id'], ), sa.ForeignKeyConstraint( ['job_id'], ['job.id'], ), sa.PrimaryKeyConstraint('id')) op.add_column('data_set', sa.Column('uid', postgresql.UUID(), nullable=True)) op.create_unique_constraint('data_set_uid_idx', 'data_set', ['uid'])
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'radio_station', 'gateway_id') op.alter_column(u'radio_program', 'duration', existing_type=postgresql.INTERVAL(), nullable=False) op.drop_table('telephony_gateway')
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'recordings', 'duration') op.add_column( u'audio_files', sa.Column('duration', postgresql.INTERVAL(), autoincrement=False, nullable=False))
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'job', sa.Column('interval', postgresql.INTERVAL(), autoincrement=False, nullable=True)) op.drop_column('job', 'timezone') op.drop_column('job', 'crontab')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('inline_query_request', 'next_offset', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('inline_query_request', 'duration', existing_type=postgresql.INTERVAL(), nullable=False)
def upgrade(): """General cleanup and constraint stuff.""" session = Session(bind=op.get_bind()) # Set all changes to reviewed, where an task exists session.query(InlineQueryRequest) \ .filter(or_( InlineQueryRequest.duration.is_(None), InlineQueryRequest.offset.is_(None), InlineQueryRequest.next_offset.is_(None), )) \ .delete() session.query(InlineQueryRequest) \ .filter(or_( InlineQueryRequest.duration.is_(None), InlineQueryRequest.offset.is_(None), InlineQueryRequest.next_offset.is_(None), )) \ .delete() op.alter_column('inline_query', 'query', existing_type=sa.VARCHAR(), nullable=False) op.drop_column('inline_query', 'bot') op.alter_column('inline_query_request', 'duration', existing_type=postgresql.INTERVAL(), nullable=False) op.alter_column('inline_query_request', 'next_offset', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('inline_query_request', 'offset', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('sticker', 'original_emojis', existing_type=sa.VARCHAR(), nullable=False) op.drop_column('vote_ban', 'old_tags') # Chat check constraint op.create_check_constraint("only_one_action_check", "chat", """ (expecting_sticker_set IS TRUE AND tagging_random_sticker IS FALSE AND fix_single_sticker IS FALSE AND full_sticker_set IS FALSE) OR \ (tagging_random_sticker IS TRUE AND expecting_sticker_set IS FALSE AND fix_single_sticker IS FALSE AND full_sticker_set IS FALSE) OR \ (fix_single_sticker IS TRUE AND tagging_random_sticker IS FALSE AND expecting_sticker_set IS FALSE AND full_sticker_set IS FALSE) OR \ (full_sticker_set IS TRUE AND tagging_random_sticker IS FALSE AND fix_single_sticker IS FALSE AND expecting_sticker_set IS FALSE) OR \ (full_sticker_set IS FALSE AND tagging_random_sticker IS FALSE AND fix_single_sticker IS FALSE AND expecting_sticker_set IS FALSE) """) # Sticker set check constraints op.create_check_constraint("reviewed_and_complete_check", "sticker_set", "NOT (reviewed AND NOT complete)") # Task check constraints op.create_check_constraint("check_user_tags_check", "task", "(type = 'check_user_tags' AND is_default_language IS NOT NULL AND \ user_id IS NOT NULL) OR type != 'check_user_tags'") op.create_check_constraint("vote_ban_check", "task", "(type = 'vote_ban' AND user_id IS NOT NULL) OR type != 'vote_ban'") op.create_check_constraint("scan_set_check", "task", "(type = 'scan_set' AND sticker_set_name IS NOT NULL and chat_id IS NOT NULL) OR type != 'vote_ban'")
def downgrade(): op.add_column('membership_fee', sa.Column('grace_period', postgresql.INTERVAL(), autoincrement=False, nullable=False)) op.drop_column('membership_fee', 'booking_end') op.drop_column('membership_fee', 'booking_begin') membership_fee = sa.table('membership_fee', sa.Column('grace_period', sa.Interval(), nullable=False)) op.execute(membership_fee.update() .values({'grace_period': timedelta(days=14)}))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column( "inline_query_request", "duration", existing_type=postgresql.INTERVAL(), nullable=True, ) op.alter_column("inline_query_request", "next_offset", existing_type=sa.VARCHAR(), nullable=True)
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'time_entries', sa.Column('amount', sa.NUMERIC(precision=4, scale=2), autoincrement=False, nullable=True)) op.alter_column('time_entries', 'duration', existing_type=postgresql.INTERVAL(), nullable=True)
def upgrade(): op.execute("create extension pgcrypto;") # ### commands auto generated by Alembic - please adjust! ### op.create_table('tasks', sa.Column('id', postgresql.UUID(), nullable=False), sa.Column('status', sa.Enum('in_queue', 'running', 'completed', name='taskstatusenum'), nullable=False), sa.Column('create_time', sa.DateTime(), nullable=False), sa.Column('start_time', sa.DateTime(), nullable=True), sa.Column('execution_time', postgresql.INTERVAL(), nullable=True), sa.PrimaryKeyConstraint('id') )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('aggregated_metrics', schema=None) as batch_op: batch_op.alter_column( 'period', existing_type=postgresql.INTERVAL(), type_=sa.Enum('hour', 'day', 'month', 'year', name='intervaltype'), existing_nullable=True, ) with op.batch_alter_table('packages', schema=None) as batch_op: batch_op.add_column(sa.Column('platforms', sa.String(), nullable=True)) batch_op.add_column(sa.Column('url', sa.String(), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'TOUR_DATE', sa.Column('id', sa.Integer(), server_default=FetchedValue(), nullable=False), sa.Column('tour_id', sa.Integer(), nullable=True), sa.Column('tour_date', sa.DateTime(), server_default=FetchedValue(), nullable=True), sa.ForeignKeyConstraint( ['tour_id'], ['TOUR.id'], ), sa.PrimaryKeyConstraint('id')) op.drop_index('LANGUAGE_pkey', table_name='LANGUAGE') op.alter_column('TOUR', 'duration', existing_type=postgresql.INTERVAL(), nullable=True) op.alter_column('TOUR', 'guide_id', existing_type=sa.INTEGER(), nullable=True) op.alter_column('TOUR', 'price', existing_type=sa.REAL(), nullable=True) op.alter_column('TOUR', 'rating', existing_type=sa.REAL(), nullable=True) op.alter_column('TOUR', 'title', existing_type=sa.VARCHAR(length=32), nullable=True) op.alter_column('TOUR', 'upload_time', existing_type=postgresql.TIMESTAMP(), nullable=True, existing_server_default=sa.text('now()')) op.create_unique_constraint(None, 'TOUR', ['uuid']) op.alter_column('USER', 'email', existing_type=sa.VARCHAR(length=60), nullable=True) op.alter_column('USER', 'firstname', existing_type=sa.VARCHAR(length=32), nullable=True) op.alter_column('USER', 'lastname', existing_type=sa.VARCHAR(length=32), nullable=True) op.create_unique_constraint(None, 'USER', ['uuid'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'telephony_gateway', sa.Column('id', sa.Integer(), nullable=False), sa.Column('number_top', sa.Integer(), nullable=True), sa.Column('number_bottom', sa.Integer(), nullable=True), sa.Column('sofia_string', sa.String(length=160), nullable=True), sa.PrimaryKeyConstraint('id')) op.alter_column(u'radio_program', 'duration', existing_type=postgresql.INTERVAL(), nullable=True) op.add_column(u'radio_station', sa.Column('gateway_id', sa.Integer(), nullable=True))
class Sales(Base): __tablename__ = 'sales' id = sa.Column( postgresql_types.BIGINT(), primary_key=True, unique=True, nullable=False, autoincrement=True, doc="testtest", ) book_id = sa.Column( postgresql_types.UUID(), sa.ForeignKey(column="book.id", ondelete="CASCADE"), primary_key=False, unique=False, nullable=False, doc="testtest", ) sold = sa.Column( postgresql_types.TIMESTAMP(), primary_key=False, unique=False, nullable=False, doc="testtest", ) reservation = sa.Column( postgresql_types.INTERVAL(), primary_key=False, unique=False, nullable=True, doc="testtest", ) source = sa.Column( postgresql_types.INET(), primary_key=False, unique=False, nullable=True, doc="testtest", ) book = sa.orm.relationship( 'Book', foreign_keys="[sales.c.book_id]", remote_side=None, backref="sales", )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('vote_ban', sa.Column('old_tags', sa.VARCHAR(), autoincrement=False, nullable=True)) op.alter_column('sticker', 'sticker_set_name', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('sticker', 'original_emojis', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('inline_query_request', 'offset', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('inline_query_request', 'next_offset', existing_type=sa.VARCHAR(), nullable=True) op.alter_column('inline_query_request', 'duration', existing_type=postgresql.INTERVAL(), nullable=True) op.add_column('inline_query', sa.Column('bot', sa.VARCHAR(), autoincrement=False, nullable=True)) op.alter_column('inline_query', 'query', existing_type=sa.VARCHAR(), nullable=True)
def downgrade(): op.create_table('traffic_group', sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('credit_limit', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('credit_interval', postgresql.INTERVAL(), autoincrement=False, nullable=False), sa.Column('credit_amount', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('initial_credit_amount', sa.BIGINT(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['id'], ['group.id'], name='traffic_group_id_fkey'), sa.PrimaryKeyConstraint('id', name='traffic_group_pkey'), postgresql_ignore_search_path=False) op.add_column( 'building', sa.Column('default_traffic_group_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.create_foreign_key('building_default_traffic_group_id_fkey', 'building', 'traffic_group', ['default_traffic_group_id'], ['id']) op.create_index('ix_building_default_traffic_group_id', 'building', ['default_traffic_group_id'], unique=False) op.create_table( 'traffic_balance', sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('amount', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('timestamp', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name='traffic_balance_user_id_fkey', ondelete='CASCADE'), sa.PrimaryKeyConstraint('user_id', name='traffic_balance_pkey')) op.create_table( 'traffic_credit', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), sa.Column('timestamp', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=False), sa.Column('amount', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.CheckConstraint('amount >= 0', name='traffic_credit_amount_check'), sa.ForeignKeyConstraint(['user_id'], ['user.id'], name='traffic_credit_user_id_fkey', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name='traffic_credit_pkey')) op.create_index('ix_traffic_credit_user_id', 'traffic_credit', ['user_id'], unique=False) op.execute(''' CREATE OR REPLACE VIEW current_traffic_balance AS SELECT "user".id AS user_id, (((COALESCE(traffic_balance.amount, (0)::bigint))::numeric + COALESCE(recent_credit.amount, (0)::numeric)) - COALESCE(recent_volume.amount, (0)::numeric)) AS amount FROM ((("user" LEFT JOIN traffic_balance ON (("user".id = traffic_balance.user_id))) LEFT JOIN LATERAL ( SELECT sum(traffic_credit.amount) AS amount FROM traffic_credit WHERE (("user".id = traffic_credit.user_id) AND ((traffic_balance.user_id IS NULL) OR (traffic_balance."timestamp" <= traffic_credit."timestamp")))) recent_credit ON (true)) LEFT JOIN LATERAL ( SELECT sum(traffic_volume.amount) AS amount FROM traffic_volume WHERE (("user".id = traffic_volume.user_id) AND ((traffic_balance.user_id IS NULL) OR (traffic_balance."timestamp" <= traffic_volume."timestamp")))) recent_volume ON (true)); ''') op.execute( "DROP FUNCTION IF EXISTS traffic_history (arg_user_id int, arg_start timestamptz, arg_end timestamptz)" ) op.execute(''' CREATE OR REPLACE FUNCTION traffic_history (arg_user_id int, arg_start timestamptz, arg_interval interval, arg_step interval) RETURNS TABLE ("timestamp" timestamptz, credit numeric, ingress numeric, egress numeric, balance numeric) STABLE LANGUAGE sql AS $$ WITH balance AS (SELECT traffic_balance.amount AS amount, traffic_balance.timestamp AS timestamp FROM "user" LEFT OUTER JOIN traffic_balance ON "user".id = traffic_balance.user_id WHERE "user".id = arg_user_id), traffic_events AS (SELECT traffic_credit.amount AS amount, traffic_credit.timestamp AS timestamp, 'Credit' AS type FROM traffic_credit WHERE traffic_credit.user_id = arg_user_id UNION ALL SELECT -traffic_volume.amount AS amount, traffic_volume.timestamp AS timestamp, CAST(traffic_volume.type AS TEXT) AS type FROM traffic_volume WHERE traffic_volume.user_id = arg_user_id), buckets AS (SELECT bucket, row_number() OVER (ORDER BY bucket) - 1 AS index FROM generate_series(CAST(to_timestamp(trunc(EXTRACT(epoch FROM CAST(arg_start AS TIMESTAMP WITH TIME ZONE)) / EXTRACT(epoch FROM arg_step)) * EXTRACT(epoch FROM arg_step)) AS TIMESTAMP WITH TIME ZONE) - arg_step, CAST(to_timestamp(trunc(EXTRACT(epoch FROM CAST(arg_start AS TIMESTAMP WITH TIME ZONE) + arg_interval) / EXTRACT(epoch FROM arg_step)) * EXTRACT(epoch FROM arg_step)) AS TIMESTAMP WITH TIME ZONE), arg_step) AS bucket ORDER BY bucket), traffic_hist AS (SELECT buckets.bucket, sum(CASE WHEN (traffic_events.type = 'Credit') THEN traffic_events.amount END) AS credit, sum(CASE WHEN (traffic_events.type = 'Ingress') THEN -traffic_events.amount END) AS ingress, sum(CASE WHEN (traffic_events.type = 'Egress') THEN -traffic_events.amount END) AS egress, sum(traffic_events.amount) AS amount, sum(CASE WHEN ((SELECT balance.timestamp FROM balance) IS NOT NULL AND traffic_events.timestamp < (SELECT balance.timestamp FROM balance)) THEN traffic_events.amount END) AS before_balance, sum(CASE WHEN ((SELECT balance.timestamp FROM balance) IS NULL OR traffic_events.timestamp >= (SELECT balance.timestamp FROM balance)) THEN traffic_events.amount END) AS after_balance FROM buckets LEFT OUTER JOIN traffic_events ON width_bucket(traffic_events.timestamp, (SELECT array((SELECT buckets.bucket FROM buckets WHERE buckets.index != 0)) AS array_1)) = buckets.index WHERE buckets.index < (SELECT max(buckets.index) AS max_1 FROM buckets) GROUP BY buckets.bucket ORDER BY buckets.bucket) SELECT agg_hist.bucket, agg_hist.credit, agg_hist.ingress, agg_hist.egress, agg_hist.balance FROM (SELECT traffic_hist.bucket, traffic_hist.credit AS credit, traffic_hist.ingress AS ingress, traffic_hist.egress AS egress, CASE WHEN ((SELECT balance.timestamp FROM balance) IS NOT NULL AND traffic_hist.bucket < (SELECT balance.timestamp FROM balance) AND ((SELECT min(traffic_events.timestamp) AS min_1 FROM traffic_events) IS NULL OR traffic_hist.bucket < (SELECT min(traffic_events.timestamp) AS min_1 FROM traffic_events))) THEN NULL WHEN ((SELECT balance.timestamp FROM balance) IS NULL OR traffic_hist.bucket >= (SELECT balance.timestamp FROM balance)) THEN coalesce((SELECT balance.amount FROM balance), 0) + coalesce(sum(traffic_hist.after_balance) OVER (ORDER BY traffic_hist.bucket ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 0) ELSE (coalesce((SELECT balance.amount FROM balance), 0) + coalesce(traffic_hist.after_balance, 0)) - coalesce(sum(traffic_hist.before_balance) OVER (ORDER BY traffic_hist.bucket DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 1 PRECEDING), 0) END AS balance FROM traffic_hist) AS agg_hist ORDER BY agg_hist.bucket LIMIT ALL OFFSET 1 $$ ''')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'costperutterance', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('cutofftime', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.Column('itemsdone', sa.INTEGER(), nullable=False), sa.Column('unitsdone', sa.INTEGER(), nullable=False), sa.Column('payrollid', sa.INTEGER(), nullable=False), sa.Column('amount', postgresql.DOUBLE_PRECISION(), nullable=False), sa.ForeignKeyConstraint(['payrollid'], [u'payrolls.payrollid'], name=u'costperutterance_payrollid_fkey'), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'costperutterance_taskid_fkey'), sa.PrimaryKeyConstraint(u'taskid', u'payrollid', name=op.f('pk_costperutterance'))) op.create_index('costperutterancebytaskid', 'costperutterance', ['taskid'], unique=False) op.create_table( 'loads', sa.Column('loadid', sa.INTEGER(), nullable=False), sa.Column('createdby', sa.INTEGER(), nullable=False), sa.Column('createdat', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.Column('taskid', sa.INTEGER(), nullable=False), sa.ForeignKeyConstraint(['createdby'], [u'users.userid'], name=op.f('fk_loads_createdby_users')), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=op.f('fk_loads_taskid_tasks')), sa.PrimaryKeyConstraint(u'loadid', name=op.f('pk_loads'))) op.create_index('ix_loads_taskid_loadid', 'loads', ['loadid', 'taskid'], unique=True) op.create_table( 'otherpayments', sa.Column('otherpaymentid', sa.INTEGER(), nullable=False), sa.Column('payrollid', sa.INTEGER(), nullable=False), sa.Column('identifier', sa.TEXT(), nullable=False), sa.Column('paymenttypeid', sa.INTEGER(), nullable=False), sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('userid', sa.INTEGER(), nullable=False), sa.Column('amount', sa.INTEGER(), nullable=False), sa.Column('added', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.ForeignKeyConstraint(['paymenttypeid'], [u'paymenttypes.paymenttypeid'], name=u'otherpayments_paymenttypeid_fkey'), sa.ForeignKeyConstraint(['payrollid'], [u'payrolls.payrollid'], name=u'otherpayments_payrollid_fkey'), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'otherpayments_taskid_fkey'), sa.ForeignKeyConstraint(['userid'], [u'users.userid'], name=u'otherpayments_userid_fkey'), sa.PrimaryKeyConstraint(u'otherpaymentid', name=op.f('pk_otherpayments'))) op.create_index('otherpayments_identifier_key', 'otherpayments', ['identifier'], unique=True) op.create_index('otherpaymentsbypayrollid', 'otherpayments', ['payrollid'], unique=False) op.create_index('otherpaymentsbytaskid', 'otherpayments', ['taskid'], unique=False) op.create_table( 'overallqaprogresscache', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('endtime', postgresql.TIMESTAMP(timezone=True), nullable=True), sa.Column('remaining', sa.INTEGER(), nullable=False), sa.Column('lastupdated', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'overallqaprogresscache_taskid_fkey'), sa.PrimaryKeyConstraint(u'taskid', name=op.f('pk_overallqaprogresscache'))) op.create_table( 'overalltrprogresscache', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('itemcount', sa.INTEGER(), nullable=False), sa.Column('wordcount', sa.INTEGER(), nullable=False), sa.Column('newitems', sa.INTEGER(), nullable=False), sa.Column('finished', sa.INTEGER(), nullable=False), sa.Column('finishedlastweek', sa.INTEGER(), nullable=False), sa.Column('lastupdated', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.Column('overallaccuracy', postgresql.DOUBLE_PRECISION(), nullable=True), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'overalltrprogresscache_taskid_fkey')) op.create_table( 'overallworkprogresscache', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('total', sa.INTEGER(), nullable=False), sa.Column('newutts', sa.INTEGER(), nullable=False), sa.Column('transcribed', sa.INTEGER(), nullable=False), sa.Column('transcribedlastweek', sa.INTEGER(), nullable=False), sa.Column('lastupdated', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'overallworkprogresscache_taskid_fkey'), sa.PrimaryKeyConstraint(u'taskid', name=op.f('pk_overallworkprogresscache'))) op.create_table( 'streams', sa.Column('streamid', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('created', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.Column('open', sa.BOOLEAN(), server_default=sa.text(u'true'), nullable=False), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'streams_taskid_fkey'), sa.PrimaryKeyConstraint(u'streamid', name=op.f('pk_streams'))) op.create_index('streams_taskid_key', 'streams', ['taskid', 'name'], unique=True) op.create_table( 'subtasks', sa.Column('subtaskid', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('worktypeid', sa.INTEGER(), nullable=False), sa.Column('maximumpagesize', sa.INTEGER(), server_default=sa.text(u'20'), nullable=False), sa.Column('dst_dir', sa.CHAR(length=3), server_default=sa.text(u"'ltr'::bpchar"), nullable=False), sa.Column('modeid', sa.INTEGER(), server_default=sa.text(u'1'), nullable=False), sa.Column('getpolicy', sa.TEXT(), server_default=sa.text(u"'nolimit'::text"), nullable=True), sa.Column('expirypolicy', sa.TEXT(), server_default=sa.text(u"'noextend'::text"), nullable=True), sa.Column('allowpageskip', sa.BOOLEAN(), server_default=sa.text(u'true'), nullable=True), sa.Column('needitemcontext', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=True), sa.Column('allowediting', sa.BOOLEAN(), server_default=sa.text(u'true'), nullable=True), sa.Column('allowabandon', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=True), sa.Column('lookahead', sa.INTEGER(), server_default=sa.text(u'0'), nullable=False), sa.Column('lookbehind', sa.INTEGER(), server_default=sa.text(u'0'), nullable=False), sa.Column('allowcheckout', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.Column('is2ndpassqa', sa.BOOLEAN(), nullable=True), sa.Column('defaultleaselife', postgresql.INTERVAL(), server_default=sa.text(u"'7 days'::interval"), nullable=True), sa.Column('needdynamictagset', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=True), sa.Column('instructionspage', sa.TEXT(), nullable=True), sa.Column('useqahistory', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.Column('meanamount', postgresql.DOUBLE_PRECISION(), nullable=True), sa.Column('maxamount', sa.INTEGER(), nullable=True), sa.Column('accuracy', postgresql.DOUBLE_PRECISION(), nullable=True), sa.Column('maxworkrate', postgresql.DOUBLE_PRECISION(), nullable=True), sa.Column('medianworkrate', postgresql.DOUBLE_PRECISION(), nullable=True), sa.Column('hidelabels', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=True), sa.Column('validators', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint(['modeid'], [u'batchingmodes.modeid'], name=u'subtasks_modeid_fkey'), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'subtasks_taskid_fkey'), sa.ForeignKeyConstraint(['worktypeid'], [u'worktypes.worktypeid'], name=u'subtasks_worktypeid_fkey'), sa.PrimaryKeyConstraint(u'subtaskid', name=op.f('pk_subtasks')), sa.CheckConstraint("dst_dir=ANY(ARRAY['ltr','rtl'])"), sa.CheckConstraint("getpolicy=ANY(ARRAY['nolimit','oneonly'])"), ) op.create_index('ix_subtasks_taskid', 'subtasks', ['taskid'], unique=False) op.create_index(op.f('ix_subtasks_taskid_worktypeid_name'), 'subtasks', ['taskid', 'worktypeid', 'name'], unique=True) op.create_index('ix_subtasks_taskid_subtaskid', 'subtasks', ['taskid', 'subtaskid'], unique=True) op.create_table( 'taskerrortypes', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('errortypeid', sa.INTEGER(), nullable=False), sa.Column('severity', postgresql.DOUBLE_PRECISION(), server_default=sa.text(u'1'), nullable=False), sa.Column('disabled', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.ForeignKeyConstraint(['errortypeid'], [u'errortypes.errortypeid'], name=u'taskerrortypes_errortypeid_fkey'), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'taskerrortypes_taskid_fkey'), sa.PrimaryKeyConstraint(u'taskid', u'errortypeid', name=op.f('pk_taskerrortypes')), sa.CheckConstraint('severity>=0 AND severity<=1'), ) op.create_table( 'taskreports', sa.Column('taskreportid', sa.INTEGER(), nullable=False), sa.Column('reporttypeid', sa.INTEGER(), nullable=False), sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('filename', sa.TEXT(), nullable=True), sa.Column('title', sa.TEXT(), nullable=True), sa.Column('usergroupid', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint(['reporttypeid'], [u'taskreporttypes.reporttypeid'], name=u'taskreports_reporttypeid_fkey'), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'taskreports_taskid_fkey'), sa.PrimaryKeyConstraint(u'taskreportid', name=op.f('pk_taskreports'))) op.create_index('taskreportsbytaskid', 'taskreports', ['taskid'], unique=False) op.create_table( 'tasksupervisors', sa.Column('taskid', sa.INTEGER(), nullable=False), sa.Column('userid', sa.INTEGER(), nullable=False), sa.Column('receivesfeedback', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.Column('informloads', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.ForeignKeyConstraint(['taskid'], [u'tasks.taskid'], name=u'tasksupervisors_taskid_fkey'), sa.ForeignKeyConstraint(['userid'], [u'users.userid'], name=u'tasksupervisors_userid_fkey'), sa.PrimaryKeyConstraint(u'taskid', u'userid', name=op.f('pk_tasksupervisors')))
def upgrade(): op.add_column(u'crontabber_log', sa.Column(u'duration', postgresql.INTERVAL(), nullable=True))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column( "weather_sensor_type", "display_name", existing_type=sa.VARCHAR(length=80), nullable=False, existing_server_default=sa.text("''::character varying"), ) op.alter_column( "market_type", "display_name", existing_type=sa.VARCHAR(length=80), nullable=False, existing_server_default=sa.text("''::character varying"), ) op.alter_column( "market", "display_name", existing_type=sa.VARCHAR(length=80), nullable=False, existing_server_default=sa.text("''::character varying"), ) op.alter_column( "asset_type", "display_name", existing_type=sa.VARCHAR(length=80), nullable=False, existing_server_default=sa.text("''::character varying"), ) op.create_table( "forecasting_job", sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), sa.Column( "timed_value_type", sa.VARCHAR(length=30), autoincrement=False, nullable=False, ), sa.Column("asset_id", sa.INTEGER(), autoincrement=False, nullable=False), sa.Column( "start", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False, ), sa.Column( "end", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False, ), sa.Column("horizon", postgresql.INTERVAL(), autoincrement=False, nullable=False), sa.Column( "in_progress_since", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True, ), sa.PrimaryKeyConstraint("id", name="forecasting_job_pkey"), )
def downgrade(): """downgrade """ op.add_column( 'Vacations', sa.Column('timing_resolution', postgresql.INTERVAL(), nullable=True)) #op.drop_constraint(None, 'Users') op.add_column( 'TimeLogs', sa.Column('timing_resolution', postgresql.INTERVAL(), nullable=True)) op.add_column('Tasks', sa.Column('responsible_id', sa.INTEGER(), nullable=True)) # restore data op.execute(""" UPDATE "Tasks" SET responsible_id = t2.responsible_id FROM ( SELECT task_id, responsible_id FROM "Task_Responsible" ) as t2 WHERE "Tasks".id = t2.task_id """) op.add_column( 'Tasks', sa.Column('timing_resolution', postgresql.INTERVAL(), nullable=True)) op.drop_column('Tasks', 'persistent_allocation') op.drop_column('Tasks', 'allocation_strategy') op.drop_column('Tasks', 'review_number') op.alter_column('Task_Dependencies', 'depends_to_id', new_column_name='depends_to_task_id') op.drop_column('Task_Dependencies', 'gap_unit') op.drop_column('Task_Dependencies', 'gap_timing') op.drop_column('Task_Dependencies', 'gap_model') op.drop_column('Task_Dependencies', 'gap_constraint') op.drop_column('Task_Dependencies', 'dependency_target') op.add_column( 'Studios', sa.Column('daily_working_hours', sa.INTEGER(), nullable=True)) op.drop_column('Studios', 'scheduling_started_at') op.drop_column('Studios', 'last_scheduled_by_id') op.drop_column('Studios', 'last_scheduled_at') op.drop_column('Studios', 'last_schedule_message') op.drop_column('Studios', 'is_scheduling_by_id') op.drop_column('Studios', 'is_scheduling') op.add_column( 'Projects', sa.Column('timing_resolution', postgresql.INTERVAL(), nullable=True)) op.drop_column('EntityTypes', 'dateable') op.drop_table('Task_Alternative_Resources') op.drop_table('Task_Computed_Resources') op.drop_table('Reviews') # will loose all the responsible data, change if you care! op.drop_table('Task_Responsible') # Update all WFD Tasks to NEW op.execute("""update "Tasks" set status_id = (select id from "Statuses" where code='NEW') where status_id = (select id from "Statuses" where code='WFD')""") # Update all OH Tasks to WIP op.execute("""update "Tasks" set status_id = (select id from "Statuses" where code='WIP') where status_id = (select id from "Statuses" where code='OH')""") # Update all STOP or DREV Tasks to CMPL op.execute("""update "Tasks" set status_id = (select id from "Statuses" where code='WIP') where status_id in (select id from "Statuses" where code in ('STOP', 'DREV'))""" ) op.execute("""update "Tasks" set status_id = (select id from "Statuses" where code='WIP') where status_id = (select id from "Statuses" where code='STOP')""") # Delete Statuses op.execute("""DELETE FROM "StatusList_Statuses" WHERE status_id IN ( select id FROM "SimpleEntities" WHERE name IN ('Waiting For Dependency', 'Dependency Has Revision', 'On Hold', 'Stopped', 'Requested Revision', 'Approved')); DELETE FROM "Statuses" WHERE id IN (select id FROM "SimpleEntities" WHERE name IN ('Waiting For Dependency', 'Dependency Has Revision', 'On Hold', 'Stopped', 'Requested Revision', 'Approved')); DELETE FROM "Entities" WHERE id IN (select id FROM "SimpleEntities" WHERE name IN ('Waiting For Dependency', 'Dependency Has Revision', 'On Hold', 'Stopped', 'Requested Revision', 'Approved')); DELETE FROM "SimpleEntities" WHERE name IN ('Waiting For Dependency', 'Dependency Has Revision', 'On Hold', 'Stopped', 'Requested Revision', 'Approved'); """) # Delete Review Status List op.execute(""" DELETE FROM "StatusList_Statuses" WHERE status_list_id=(SELECT id FROM "SimpleEntities" WHERE name='Review Status List'); DELETE FROM "StatusLists" WHERE id=(SELECT id FROM "SimpleEntities" WHERE name='Review Status List'); DELETE FROM "Entities" WHERE id=(SELECT id FROM "SimpleEntities" WHERE name='Review Status List'); DELETE FROM "SimpleEntities" WHERE name = 'Review Status List'; """) # column name changes # Shots._cut_in -> Shots.cut_in op.alter_column('Shots', 'cut_in', new_column_name='_cut_in') # Shots._cut_out -> Shots.cut_out op.alter_column('Shots', 'cut_out', new_column_name='_cut_out') # Tasks._schedule_seconds -> Tasks.schedule_seconds op.alter_column('Tasks', 'schedule_seconds', new_column_name='_schedule_seconds') # Tasks._total_logged_seconds -> Tasks.total_logged_seconds op.alter_column('Tasks', 'total_logged_seconds', new_column_name='_total_logged_seconds')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'chargepoint', sa.Column('id', sa.Integer(), nullable=False), sa.Column('model', sa.String(length=20), nullable=False), sa.Column('vendor', sa.String(length=20), nullable=False), sa.Column('serial_number', sa.String(length=25), nullable=True), sa.Column('firmware', sa.String(length=50), nullable=True), sa.Column('type', sa.String(length=25), nullable=True), sa.Column('last_heartbeat', sa.DateTime(timezone=True), nullable=True), sa.Column('boot_timestamp', sa.DateTime(timezone=True), nullable=True), sa.Column('identity', sa.String(length=50), nullable=False), sa.Column('connected', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('identity')) op.create_index(op.f('ix_chargepoint_id'), 'chargepoint', ['id'], unique=False) op.create_table('tokengroup', sa.Column('id', sa.Integer(), nullable=False), sa.Column('token', sa.String(length=20), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token')) op.create_index(op.f('ix_tokengroup_id'), 'tokengroup', ['id'], unique=False) op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('password', sa.String(length=128), nullable=False), sa.Column('last_login', sa.DateTime(timezone=True), nullable=True), sa.Column('is_superuser', sa.Boolean(), nullable=False), sa.Column('first_name', sa.String(length=150), nullable=False), sa.Column('last_name', sa.String(length=150), nullable=False), sa.Column('email', sa.String(length=254), nullable=False), sa.Column('is_staff', sa.Boolean(), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('date_joined', sa.DateTime(timezone=True), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) op.create_table('vehiclemanufacturer', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=50), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_vehiclemanufacturer_id'), 'vehiclemanufacturer', ['id'], unique=False) op.create_table( 'connector', sa.Column('id', sa.Integer(), nullable=False), sa.Column('connector_id', sa.Integer(), nullable=False), sa.Column('chargepoint_id', sa.Integer(), nullable=False), sa.Column('available', sa.Boolean(), nullable=False), sa.Column('in_use', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['chargepoint_id'], ['chargepoint.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('connector_id', 'chargepoint_id')) op.create_index(op.f('ix_connector_chargepoint_id'), 'connector', ['chargepoint_id'], unique=False) op.create_index(op.f('ix_connector_id'), 'connector', ['id'], unique=False) op.create_table( 'token', sa.Column('id', sa.Integer(), nullable=False), sa.Column('token', sa.String(length=20), nullable=False), sa.Column('token_group_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['token_group_id'], ['tokengroup.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token')) op.create_index(op.f('ix_token_id'), 'token', ['id'], unique=False) op.create_index(op.f('ix_token_token_group_id'), 'token', ['token_group_id'], unique=False) op.create_index(op.f('ix_token_user_id'), 'token', ['user_id'], unique=False) op.create_table( 'vehiclemodel', sa.Column('id', sa.Integer(), nullable=False), sa.Column('manufacturer_id', sa.Integer(), nullable=True), sa.Column('model', sa.String(length=50), nullable=True), sa.ForeignKeyConstraint( ['manufacturer_id'], ['vehiclemanufacturer.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_vehiclemodel_id'), 'vehiclemodel', ['id'], unique=False) op.create_index(op.f('ix_vehiclemodel_manufacturer_id'), 'vehiclemodel', ['manufacturer_id'], unique=False) op.create_table( 'authorizationrequest', sa.Column('id', sa.Integer(), nullable=False), sa.Column('token_id', sa.Integer(), nullable=True), sa.Column('chargepoint_id', sa.Integer(), nullable=False), sa.Column('token_string', sa.String(length=20), nullable=True), sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False), sa.ForeignKeyConstraint( ['chargepoint_id'], ['chargepoint.id'], ), sa.ForeignKeyConstraint( ['token_id'], ['token.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_authorizationrequest_chargepoint_id'), 'authorizationrequest', ['chargepoint_id'], unique=False) op.create_index(op.f('ix_authorizationrequest_id'), 'authorizationrequest', ['id'], unique=False) op.create_index(op.f('ix_authorizationrequest_token_id'), 'authorizationrequest', ['token_id'], unique=False) op.create_table( 'chargepointerror', sa.Column('id', sa.Integer(), nullable=False), sa.Column('connector_id', sa.Integer(), nullable=True), sa.Column('error_code', sa.String(length=50), nullable=True), sa.Column('info', sa.String(length=50), nullable=True), sa.Column('status', sa.String(length=50), nullable=True), sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False), sa.Column('vendor_id', sa.String(length=255), nullable=True), sa.Column('vendor_error_code', sa.String(length=50), nullable=True), sa.ForeignKeyConstraint( ['connector_id'], ['connector.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_chargepointerror_connector_id'), 'chargepointerror', ['connector_id'], unique=False) op.create_index(op.f('ix_chargepointerror_id'), 'chargepointerror', ['id'], unique=False) op.create_table( 'inprogresstransaction', sa.Column('id', sa.Integer(), nullable=False), sa.Column('start_timestamp', sa.DateTime(timezone=True), nullable=False), sa.Column('meter_start', sa.Integer(), nullable=False), sa.Column('reservation_id', sa.Integer(), nullable=True), sa.Column('connector_id', sa.Integer(), nullable=False), sa.Column('start_token_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['connector_id'], ['connector.id'], ), sa.ForeignKeyConstraint( ['start_token_id'], ['token.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_inprogresstransaction_connector_id'), 'inprogresstransaction', ['connector_id'], unique=False) op.create_index(op.f('ix_inprogresstransaction_id'), 'inprogresstransaction', ['id'], unique=False) op.create_index(op.f('ix_inprogresstransaction_start_token_id'), 'inprogresstransaction', ['start_token_id'], unique=False) op.create_table( 'transaction', sa.Column('id', sa.Integer(), nullable=False), sa.Column('reservation_id', sa.Integer(), nullable=True), sa.Column('start_timestamp', sa.DateTime(timezone=True), nullable=False), sa.Column('end_timestamp', sa.DateTime(timezone=True), nullable=False), sa.Column('duration', postgresql.INTERVAL(), nullable=False), sa.Column('meter_start', sa.Integer(), nullable=False), sa.Column('meter_stop', sa.Integer(), nullable=False), sa.Column('meter_used', sa.Integer(), nullable=False), sa.Column('stop_reason', sa.Text(), nullable=True), sa.Column('connector_id', sa.Integer(), nullable=True), sa.Column('end_token_id', sa.Integer(), nullable=True), sa.Column('start_token_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['connector_id'], ['connector.id'], ), sa.ForeignKeyConstraint( ['end_token_id'], ['token.id'], ), sa.ForeignKeyConstraint( ['start_token_id'], ['token.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_transaction_connector_id'), 'transaction', ['connector_id'], unique=False) op.create_index(op.f('ix_transaction_end_token_id'), 'transaction', ['end_token_id'], unique=False) op.create_index(op.f('ix_transaction_id'), 'transaction', ['id'], unique=False) op.create_index(op.f('ix_transaction_start_token_id'), 'transaction', ['start_token_id'], unique=False) op.create_table( 'vehicle', sa.Column('id', sa.Integer(), nullable=False), sa.Column('model_id', sa.Integer(), nullable=True), sa.Column('registration', sa.String(length=20), nullable=False), sa.Column('owner', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['model_id'], ['vehiclemodel.id'], ), sa.ForeignKeyConstraint( ['owner'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_vehicle_id'), 'vehicle', ['id'], unique=False) op.create_index(op.f('ix_vehicle_model_id'), 'vehicle', ['model_id'], unique=False) op.create_table('vehicle_users', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('vehicle_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['vehicle_id'], ['vehicle.id'], ))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'audio_collection_statuses', sa.Column('audio_collection_status_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.PrimaryKeyConstraint('audio_collection_status_id'), sa.UniqueConstraint('name')) op.create_table( 'audio_collection_supervisors', sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('user_id', sa.INTEGER(), nullable=False), sa.PrimaryKeyConstraint('audio_collection_id', 'user_id')) op.create_index('audio_collection_supervisors_by_audio_collection_id', 'audio_collection_supervisors', ['audio_collection_id'], unique=False) op.create_index('audio_collection_supervisors_by_user_id', 'audio_collection_supervisors', ['user_id'], unique=False) op.create_table( 'audio_importers', sa.Column('audio_importer_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('all_performances_incomplete', sa.BOOLEAN(), server_default=sa.text(u'FALSE'), nullable=False), sa.PrimaryKeyConstraint('audio_importer_id'), sa.UniqueConstraint('name')) op.create_table( 'meta_data_change_methods', sa.Column('meta_data_change_method_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.PrimaryKeyConstraint('meta_data_change_method_id'), sa.UniqueConstraint('name')) op.create_table( 'recording_platform_types', sa.Column('recording_platform_type_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.PrimaryKeyConstraint('recording_platform_type_id'), sa.UniqueConstraint('name')) op.create_table( 'audio_collections', sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('project_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('key', sa.VARCHAR(length=6), nullable=True), sa.Column('audio_collection_status_id', sa.INTEGER(), nullable=False), sa.Column('archive_file', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint( ['audio_collection_status_id'], ['audio_collection_statuses.audio_collection_status_id'], ), sa.ForeignKeyConstraint( ['project_id'], ['projects.projectid'], ), sa.PrimaryKeyConstraint('audio_collection_id'), sa.UniqueConstraint('project_id', 'name')) op.create_index('audio_collections_by_project_id', 'audio_collections', ['project_id'], unique=False) op.create_table( 'album_meta_categories', sa.Column('album_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('validator', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.PrimaryKeyConstraint('album_meta_category_id'), sa.UniqueConstraint('audio_collection_id', 'name')) op.create_index('album_meta_categories_by_audio_collection_id', 'album_meta_categories', ['audio_collection_id'], unique=False) op.create_table( 'audio_collection_status_log', sa.Column('log_entry_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('from_audio_collection_status_id', sa.INTEGER(), nullable=False), sa.Column('to_audio_collection_status_id', sa.INTEGER(), nullable=False), sa.Column('changed_by', sa.INTEGER(), nullable=False), sa.Column('changed_at', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['changed_by'], ['users.userid'], ), sa.ForeignKeyConstraint( ['from_audio_collection_status_id'], ['audio_collection_statuses.audio_collection_status_id'], ), sa.ForeignKeyConstraint( ['to_audio_collection_status_id'], ['audio_collection_statuses.audio_collection_status_id'], ), sa.PrimaryKeyConstraint('log_entry_id')) op.create_index('audio_collections_status_log_audio_collection_id', 'audio_collection_status_log', ['audio_collection_id'], unique=False) op.create_table( 'meta_data_change_log', sa.Column('log_entry_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('meta_data_change_method_id', sa.INTEGER(), nullable=False), sa.Column('info', postgresql.JSONB(), nullable=False), sa.Column('changed_by', sa.INTEGER(), nullable=False), sa.Column('changed_at', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['meta_data_change_method_id'], ['meta_data_change_methods.meta_data_change_method_id'], ), sa.PrimaryKeyConstraint('log_entry_id')) op.create_index('meta_data_change_log_by_audio_collection_id', 'meta_data_change_log', ['audio_collection_id'], unique=False) op.create_table( 'meta_data_change_requests', sa.Column('meta_data_change_request_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('meta_data_change_method_id', sa.INTEGER(), nullable=False), sa.Column('info', postgresql.JSONB(), nullable=False), sa.Column('requested_by', sa.INTEGER(), nullable=False), sa.Column('requested_at', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.Column('status', sa.TEXT(), nullable=False), sa.Column('checked_by', sa.INTEGER(), nullable=False), sa.Column('checked_at', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.CheckConstraint(u"status IN ('Pending', 'Rejected', 'Accepted')"), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['checked_by'], ['users.userid'], ), sa.ForeignKeyConstraint( ['meta_data_change_method_id'], ['meta_data_change_methods.meta_data_change_method_id'], ), sa.ForeignKeyConstraint( ['requested_by'], ['users.userid'], ), sa.PrimaryKeyConstraint('meta_data_change_request_id')) op.create_index('meta_data_change_requests_by_audio_collection_id', 'meta_data_change_requests', ['audio_collection_id'], unique=False) op.create_table( 'recording_platforms', sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_type_id', sa.INTEGER(), nullable=False), sa.Column('storage_location', sa.TEXT(), nullable=True), sa.Column('index', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('audio_importer_id', sa.INTEGER(), nullable=True), sa.Column('default_audio_spec', postgresql.JSONB(), nullable=True), sa.Column('master_script_file', postgresql.JSONB(), nullable=True), sa.Column('master_hypothesis_file', postgresql.JSONB(), nullable=True), sa.Column('config', postgresql.JSONB(), nullable=True), sa.CheckConstraint(u'"index" >= 0'), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['audio_importer_id'], ['audio_importers.audio_importer_id'], ), sa.ForeignKeyConstraint( ['recording_platform_type_id'], ['recording_platform_types.recording_platform_type_id'], ), sa.PrimaryKeyConstraint('recording_platform_id'), sa.UniqueConstraint('audio_collection_id', 'index'), sa.UniqueConstraint('audio_collection_id', 'name')) op.create_index('recording_platforms_by_audio_collection_id', 'recording_platforms', ['audio_collection_id'], unique=False) op.create_table( 'speaker_meta_categories', sa.Column('speaker_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('validator', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.PrimaryKeyConstraint('speaker_meta_category_id'), sa.UniqueConstraint('audio_collection_id', 'name')) op.create_index('speaker_meta_categories_by_audio_collection_id', 'speaker_meta_categories', ['audio_collection_id'], unique=False) op.create_table( 'speakers', sa.Column('speaker_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('identifier', sa.TEXT(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.PrimaryKeyConstraint('speaker_id'), sa.UniqueConstraint('audio_collection_id', 'identifier')) op.create_index('speakers_by_audio_collection_id', 'speakers', ['audio_collection_id'], unique=False) op.create_table( 'albums', sa.Column('album_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('speaker_id', sa.INTEGER(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['speaker_id'], ['speakers.speaker_id'], ), sa.PrimaryKeyConstraint('album_id')) op.create_index('albums_by_audio_collection_id', 'albums', ['audio_collection_id'], unique=False) op.create_index('albums_by_speaker_id', 'albums', ['speaker_id'], unique=False) op.create_table( 'corpus_codes', sa.Column('corpus_code_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('code', sa.TEXT(), nullable=False), sa.Column('requires_cutup', sa.BOOLEAN(), nullable=False), sa.Column('included', sa.BOOLEAN(), nullable=True), sa.Column('regex', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('corpus_code_id'), sa.UniqueConstraint('audio_collection_id', 'code')) op.create_index('corpus_codes_by_audio_collection_id', 'corpus_codes', ['audio_collection_id'], unique=False) op.create_table( 'performance_meta_categories', sa.Column('performance_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('extractor', postgresql.JSONB(), nullable=True), sa.Column('validator', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('performance_meta_category_id'), sa.UniqueConstraint('audio_collection_id', 'name')) op.create_index('performance_meta_categories_by_audio_collection_id', 'performance_meta_categories', ['audio_collection_id'], unique=False) op.create_table( 'recording_meta_categories', sa.Column('recording_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('validator', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('recording_meta_category_id'), sa.UniqueConstraint('audio_collection_id', 'name')) op.create_index('recording_meta_categories_by_audio_collection_id', 'recording_meta_categories', ['audio_collection_id'], unique=False) op.create_table( 'speaker_meta_values', sa.Column('speaker_meta_value_id', sa.INTEGER(), nullable=False), sa.Column('speaker_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('speaker_id', sa.INTEGER(), nullable=False), sa.Column('value', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['speaker_id'], ['speakers.speaker_id'], ), sa.ForeignKeyConstraint( ['speaker_meta_category_id'], ['speaker_meta_categories.speaker_meta_category_id'], ), sa.PrimaryKeyConstraint('speaker_meta_value_id'), sa.UniqueConstraint('speaker_meta_category_id', 'speaker_id')) op.create_index('speaker_meta_values_by_speaker_id', 'speaker_meta_values', ['speaker_id'], unique=False) op.create_index('speaker_meta_values_by_speaker_meta_category_id', 'speaker_meta_values', ['speaker_meta_category_id'], unique=False) op.create_table( 'tracks', sa.Column('track_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('track_index', sa.INTEGER(), nullable=False), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('track_id'), sa.UniqueConstraint('recording_platform_id', 'name'), sa.UniqueConstraint('recording_platform_id', 'track_index')) op.create_index('tracks_by_recording_platform_id', 'tracks', ['recording_platform_id'], unique=False) op.create_table( 'album_meta_values', sa.Column('album_meta_value_id', sa.INTEGER(), nullable=False), sa.Column('album_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('album_id', sa.INTEGER(), nullable=False), sa.Column('value', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['album_id'], ['albums.album_id'], ), sa.ForeignKeyConstraint( ['album_meta_category_id'], ['album_meta_categories.album_meta_category_id'], ), sa.PrimaryKeyConstraint('album_meta_value_id'), sa.UniqueConstraint('album_meta_category_id', 'album_id')) op.create_index('album_meta_values_by_album_id', 'album_meta_values', ['album_id'], unique=False) op.create_index('album_meta_values_by_album_meta_category_id', 'album_meta_values', ['album_meta_category_id'], unique=False) op.create_table( 'performances', sa.Column('performance_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('album_id', sa.INTEGER(), nullable=True), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('script_id', sa.TEXT(), nullable=True), sa.Column('name', sa.TEXT(), nullable=True), sa.Column('data', postgresql.JSONB(), nullable=True), sa.Column('key', sa.VARCHAR(length=8), nullable=True), sa.Column('imported_at', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.ForeignKeyConstraint( ['album_id'], ['albums.album_id'], ), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('performance_id')) op.create_index('performances_by_album_id', 'performances', ['album_id'], unique=False) op.create_index('performances_by_audio_collection_id', 'performances', ['audio_collection_id'], unique=False) op.create_index('performances_by_recording_platform_id', 'performances', ['recording_platform_id'], unique=False) op.create_table( 'performance_meta_values', sa.Column('performance_meta_value_id', sa.INTEGER(), nullable=False), sa.Column('performance_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('performance_id', sa.INTEGER(), nullable=False), sa.Column('value', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['performance_id'], ['performances.performance_id'], ), sa.ForeignKeyConstraint( ['performance_meta_category_id'], ['performance_meta_categories.performance_meta_category_id'], ), sa.PrimaryKeyConstraint('performance_meta_value_id'), sa.UniqueConstraint('performance_meta_category_id', 'performance_id')) op.create_index('performance_meta_values_by_performance_id', 'performance_meta_values', ['performance_id'], unique=False) op.create_index('performance_meta_values_by_performance_meta_category_id', 'performance_meta_values', ['performance_meta_category_id'], unique=False) op.create_table( 'recordings', sa.Column('recording_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('performance_id', sa.INTEGER(), nullable=False), sa.Column('corpus_code_id', sa.INTEGER(), nullable=False), sa.Column('prompt', sa.TEXT(), nullable=True), sa.Column('hypothesis', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['corpus_code_id'], ['corpus_codes.corpus_code_id'], ), sa.ForeignKeyConstraint( ['performance_id'], ['performances.performance_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.PrimaryKeyConstraint('recording_id')) op.create_index('recordings_by_audio_collection_id', 'recordings', ['audio_collection_id'], unique=False) op.create_index('recordings_by_performance_id', 'recordings', ['performance_id'], unique=False) op.create_table( 'audio_files', sa.Column('audio_file_id', sa.INTEGER(), nullable=False), sa.Column('recording_id', sa.INTEGER(), nullable=False), sa.Column('audio_collection_id', sa.INTEGER(), nullable=False), sa.Column('recording_platform_id', sa.INTEGER(), nullable=False), sa.Column('track_id', sa.INTEGER(), nullable=False), sa.Column('file_path', sa.TEXT(), nullable=False), sa.Column('audio_spec', postgresql.JSONB(), nullable=False), sa.Column('audio_data_location', postgresql.JSONB(), nullable=False), sa.Column('duration', postgresql.INTERVAL(), nullable=False), sa.Column('stats', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['audio_collection_id'], ['audio_collections.audio_collection_id'], ), sa.ForeignKeyConstraint( ['recording_id'], ['recordings.recording_id'], ), sa.ForeignKeyConstraint( ['recording_platform_id'], ['recording_platforms.recording_platform_id'], ), sa.ForeignKeyConstraint( ['track_id'], ['tracks.track_id'], ), sa.PrimaryKeyConstraint('audio_file_id'), sa.UniqueConstraint('file_path')) op.create_index('audio_files_by_recording_id', 'audio_files', ['recording_id'], unique=False) op.create_table( 'recording_meta_values', sa.Column('recording_meta_value_id', sa.INTEGER(), nullable=False), sa.Column('recording_meta_category_id', sa.INTEGER(), nullable=False), sa.Column('recording_id', sa.INTEGER(), nullable=False), sa.Column('value', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint( ['recording_id'], ['recordings.recording_id'], ), sa.ForeignKeyConstraint( ['recording_meta_category_id'], ['recording_meta_categories.recording_meta_category_id'], ), sa.PrimaryKeyConstraint('recording_meta_value_id'), sa.UniqueConstraint('recording_meta_category_id', 'recording_id')) op.create_index('recording_meta_values_by_recording_id', 'recording_meta_values', ['recording_id'], unique=False) op.create_index('recording_meta_values_by_recording_meta_category_id', 'recording_meta_values', ['recording_meta_category_id'], unique=False)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'pools', sa.Column('pool_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('meta', sa.TEXT(), nullable=False), sa.Column('task_type_id', sa.INTEGER(), nullable=False), sa.Column('auto_scoring', sa.BOOLEAN(), server_default=sa.text(u'FALSE'), nullable=False), sa.Column('tag_set_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint(['tag_set_id'], [u'tagsets.tagsetid'], name=op.f('fk_pools_tag_set_id_tagsets')), sa.ForeignKeyConstraint(['task_type_id'], [u'tasktypes.tasktypeid'], name=op.f('fk_pools_task_type_id_tasktypes')), sa.PrimaryKeyConstraint('pool_id', name=op.f('pk_pools')), sa.UniqueConstraint('name', name=op.f('uq_pools_name')), schema='q') op.create_table('questions', sa.Column('question_id', sa.INTEGER(), nullable=False), sa.Column('pool_id', sa.INTEGER(), nullable=False), sa.Column('respondent_data', sa.TEXT(), server_default=sa.text(u"'{}'::text"), nullable=False), sa.Column('scorer_data', sa.TEXT(), server_default=sa.text(u"'{}'::text"), nullable=False), sa.Column('auto_scoring', sa.BOOLEAN(), server_default=sa.text(u'FALSE'), nullable=False), sa.Column('point', postgresql.DOUBLE_PRECISION(), server_default=sa.text(u'1.0'), nullable=False), sa.Column('type', sa.TEXT(), server_default=sa.text(u"'text'::bpchar"), nullable=False), sa.ForeignKeyConstraint( ['pool_id'], [u'q.pools.pool_id'], name=op.f('fk_questions_pool_id_pools')), sa.PrimaryKeyConstraint(u'question_id', name=op.f('pk_questions')), schema='q') op.create_table('tests', sa.Column('test_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('description', sa.TEXT(), nullable=True), sa.Column('instruction_page', sa.TEXT(), nullable=True), sa.Column('requirement', sa.TEXT(), server_default=sa.text(u"'{}'::text"), nullable=False), sa.Column('time_limit', postgresql.INTERVAL(), server_default=sa.text(u"'02:00:00'::interval"), nullable=False), sa.Column('tag_set_id', sa.INTEGER(), nullable=True), sa.Column('passing_score', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('test_type', sa.TEXT(), server_default=sa.text(u"'static'::bpchar"), nullable=True), sa.Column('size', sa.INTEGER(), nullable=True), sa.Column('enabled', sa.BOOLEAN(), server_default=sa.text(u'TRUE'), nullable=False), sa.Column('task_type_id', sa.INTEGER(), nullable=False), sa.Column('pool_id', sa.INTEGER(), nullable=False), sa.Column('message_success', sa.TEXT(), nullable=True), sa.Column('message_failure', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint(['pool_id'], [u'q.pools.pool_id'], name=u'q_tests_poolid_fkey'), sa.ForeignKeyConstraint(['tag_set_id'], [u'tagsets.tagsetid'], name=u'q_tests_tagsetid_fkey'), sa.ForeignKeyConstraint(['task_type_id'], [u'tasktypes.tasktypeid'], name=u'q_tests__tasktypeid_fkey'), sa.PrimaryKeyConstraint(u'test_id', name=op.f('pk_tests')), schema='q') op.create_table('answer_sheets', sa.Column('sheet_id', sa.INTEGER(), nullable=False), sa.Column('test_id', sa.INTEGER(), nullable=False), sa.Column('userid', sa.INTEGER(), nullable=False), sa.Column('n_times', sa.INTEGER(), nullable=False), sa.Column('t_started_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.Column('t_expires_by', postgresql.TIMESTAMP(timezone=True), nullable=False), sa.Column('t_expired_at', postgresql.TIMESTAMP(timezone=True), nullable=True), sa.Column('t_finished_at', postgresql.TIMESTAMP(timezone=True), nullable=True), sa.Column('score', postgresql.DOUBLE_PRECISION(), nullable=True), sa.Column('comment', sa.TEXT(), nullable=True), sa.Column('more_attempts', sa.BOOLEAN(), server_default=sa.text(u'false'), nullable=False), sa.ForeignKeyConstraint( ['test_id'], [u'q.tests.test_id'], name=u'q_answer_sheets_testid_fkey'), sa.PrimaryKeyConstraint(u'sheet_id', name=op.f('pk_answer_sheets')), schema='q') op.create_table('sheet_entries', sa.Column('sheet_entry_id', sa.INTEGER(), nullable=False), sa.Column('sheet_id', sa.INTEGER(), nullable=False), sa.Column('index', sa.INTEGER(), nullable=False), sa.Column('question_id', sa.INTEGER(), nullable=False), sa.Column('answer_id', sa.INTEGER(), nullable=True), sa.Column('marking_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint( ['question_id'], [u'q.questions.question_id'], name=op.f('fk_sheet_entries_question_id_questions')), sa.ForeignKeyConstraint( ['sheet_id'], [u'q.answer_sheets.sheet_id'], name=op.f('fk_sheet_entries_sheet_id_answer_sheets')), sa.PrimaryKeyConstraint(u'sheet_entry_id', name=op.f('pk_sheet_entries')), schema='q') op.create_table('answers', sa.Column('answer_id', sa.INTEGER(), nullable=False), sa.Column('sheet_entry_id', sa.INTEGER(), nullable=False), sa.Column('answer', sa.TEXT(), nullable=False), sa.Column('t_created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.ForeignKeyConstraint( ['sheet_entry_id'], [u'q.sheet_entries.sheet_entry_id'], name=op.f('fk_answers_sheet_entry_id_sheet_entries')), sa.PrimaryKeyConstraint(u'answer_id', name=op.f('pk_answers')), schema='q') op.create_index('answer_by_sheet_entry_id', 'answers', ['sheet_entry_id'], unique=False, schema='q') op.create_table('markings', sa.Column('marking_id', sa.INTEGER(), nullable=False), sa.Column('sheet_entry_id', sa.INTEGER(), nullable=False), sa.Column('t_created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text(u'now()'), nullable=False), sa.Column('scorer_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('score', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('comment', sa.TEXT(), nullable=True), sa.ForeignKeyConstraint(['scorer_id'], [u'users.userid'], name=u'q_markings_userid_fkey'), sa.ForeignKeyConstraint( ['sheet_entry_id'], [u'q.sheet_entries.sheet_entry_id'], name=op.f('fk_markings_sheet_entry_id_sheet_entries')), sa.PrimaryKeyConstraint(u'marking_id', u'scorer_id', name=op.f('pk_markings')), schema='q') op.create_index('marking_by_sheet_entry_id', 'markings', ['sheet_entry_id'], unique=False, schema='q')