def test_inspector_constructor_engine(self): with testing.expect_deprecated( r"The __init__\(\) method on Inspector is deprecated and will " r"be removed in a future release."): i1 = reflection.Inspector(testing.db) is_(i1.bind, testing.db)
def upgrade(): migration_context = context.get_context() insp = reflection.Inspector(migration_context.bind) test_indx = insp.get_indexes('tests') test_indx_names = [x['name'] for x in test_indx] test_indx_columns = [x['column_names'][0] for x in test_indx if len(x) == 1] test_run_indx = insp.get_indexes('test_runs') test_run_indx_names = [x['name'] for x in test_run_indx] test_run_indx_columns = [x['column_names'][0] for x in test_run_indx if len(x) == 1] if ('ix_test_id' not in test_indx_names and 'test_id' not in test_indx_columns): op.create_index('ix_test_id', 'tests', ['test_id'], mysql_length=30) # remove auto created indexes (sqlite only) # note the name is with test_runs not test_run if migration_context.dialect.name == 'sqlite': if 'ix_test_runs_test_id' in test_run_indx_names: op.drop_index('ix_test_runs_test_id', 'test_runs') if 'ix_test_runs_run_id' in test_run_indx_names: op.drop_index('ix_test_runs_run_id', 'test_runs') with op.batch_alter_table('test_runs') as batch_op: batch_op.create_unique_constraint('uq_test_runs', ['test_id', 'run_id']) if ('ix_test_run_test_id' not in test_run_indx_names and 'test_id' not in test_run_indx_columns): op.create_index('ix_test_run_test_id', 'test_runs', ['test_id']) if ('ix_test_run_run_id' not in test_run_indx_names and 'run_id' not in test_run_indx_columns): op.create_index('ix_test_run_run_id', 'test_runs', ['run_id'])
def upgrade(): migration_context = context.get_context() insp = reflection.Inspector(migration_context.bind) test_run_meta_indx = insp.get_indexes('test_run_metadata') run_meta_indx = insp.get_indexes('run_metadata') test_meta_indx = insp.get_indexes('test_metadata') runs_indx = insp.get_indexes('runs') attach_indx = insp.get_indexes('attachments') if 'run_id' not in [ x['column_names'][0] for x in run_meta_indx if len(x) == 1 ]: op.create_index('ix_run_id', 'run_metadata', ['run_id']) if 'test_id' not in [ x['column_names'][0] for x in test_meta_indx if len(x) == 1 ]: op.create_index('ix_test_id', 'test_metadata', ['test_id']) if 'test_run_id' not in [ x['column_names'][0] for x in test_run_meta_indx if len(x) == 1 ]: op.create_index('ix_test_run_id', 'test_run_metadata', ['test_run_id']) if 'run_at' not in [ x['column_names'][0] for x in runs_indx if len(x) == 1 ]: op.create_index('ix_run_at', 'runs', ['run_at']) if 'test_run_id' not in [ x['column_names'][0] for x in attach_indx if len(x) == 1 ]: op.create_index('ix_attach_test_run_id', 'attachments', ['test_run_id'])
def _check_b96122f780(self, engine, data): insp = reflection.Inspector(engine) indxs = insp.get_indexes('test_runs') # Check that we don't duplicate indexes anymore tests = [indx for indx in indxs if ['test_id'] == indx['column_names']] runs = [indx for indx in indxs if indx['column_names'] == ['run_id']] self.assertEqual(1, len(tests)) self.assertEqual(1, len(runs))
def test_inspector_constructor_connection(self): with testing.db.connect() as conn: with testing.expect_deprecated( r"The __init__\(\) method on Inspector is deprecated and " r"will be removed in a future release."): i1 = reflection.Inspector(conn) is_(i1.bind, conn) is_(i1.engine, testing.db)
def upgrade(): migration_context = context.get_context() insp = reflection.Inspector(migration_context.bind) test_run_indx = insp.get_indexes('test_runs') test_run_indx_names = [x['name'] for x in test_run_indx] # Cleanup any duplicate indexes on test_runs if 'ix_test_runs_test_id' in test_run_indx_names: if 'ix_test_run_test_id' in test_run_indx_names: op.drop_index('ix_test_run_test_id', 'test_runs') if 'ix_test_runs_run_id' in test_run_indx_names: if 'ix_test_run_run_id' in test_run_indx_names: op.drop_index('ix_test_run_run_id', 'test_runs') # Add an index for test, test_id op.create_index('ix_test_ids', 'tests', ['id', 'test_id'], mysql_length=30)
def upgrade(): migration_context = context.get_context() insp = reflection.Inspector(migration_context.bind) indx_names = [x['name'] for x in insp.get_indexes('test_metadata')] # Prempt duplicate index creation on sqlite if migration_context.dialect.name == 'sqlite': if 'ix_test_key_value' in indx_names: op.drop_index('ix_test_key_value', 'test_metadata') # NOTE(mtreinish) on some mysql versions renaming the column with a fk # constraint errors out so, delete it before the rename and add it back # after if migration_context.dialect.name == 'mysql': op.drop_constraint('test_metadata_ibfk_1', 'test_metadata', 'foreignkey') with op.batch_alter_table('test_metadata') as batch_op: batch_op.alter_column('test_run_id', existing_type=sa.String(36), existing_nullable=False, new_column_name='test_id') if migration_context.dialect.name == 'mysql': op.create_foreign_key('test_metadata_ibfk_1', 'test_metadata', 'tests', ["test_id"], ['id'])
def upgrade(): migration_context = context.get_context() insp = reflection.Inspector(migration_context.bind) run_indx = insp.get_indexes('runs') run_indx_names = [x['name'] for x in run_indx] test_run_indx = insp.get_indexes('test_runs') test_run_indx_names = [x['name'] for x in test_run_indx] test_run_metad_indx = insp.get_indexes('test_run_metadata') test_run_metad_indx_names = [x['name'] for x in test_run_metad_indx] run_metad_indx = insp.get_indexes('run_metadata') run_metad_indx_names = [x['name'] for x in run_metad_indx] test_metad_indx = insp.get_indexes('test_metadata') test_metad_indx_names = [x['name'] for x in test_metad_indx] # Add indexes to time columns these are often used for searches and filters if 'ix_test_start_time' not in test_run_indx_names: op.create_index('ix_test_start_time', 'test_runs', ['start_time']) if 'ix_test_stop_time' not in test_run_indx_names: op.create_index('ix_test_stop_time', 'test_runs', ['stop_time']) if 'ix_run_at' not in run_indx_names: op.create_index('ix_run_at', 'runs', ['run_at']) # Add compound index on metadata tables key, value columns if 'ix_run_key_value' not in run_metad_indx_names: op.create_index('ix_run_key_value', 'run_metadata', ['key', 'value']) if 'ix_test_run_key_value' not in test_run_metad_indx_names: op.create_index('ix_test_run_key_value', 'test_run_metadata', ['key', 'value']) if 'ix_test_key_value' not in test_metad_indx_names: op.create_index('ix_test_key_value', 'test_metadata', ['key', 'value']) # Add compound index on test_id and status and start_time, these are common # graph query patterns if 'ix_test_id_status' not in test_run_indx_names: op.create_index('ix_test_id_status', 'test_runs', ['test_id', 'status'], mysql_length={'status': 30}) if 'ix_test_id_start_time' not in test_run_indx_names: op.create_index('ix_test_id_start_time', 'test_runs', ['test_id', 'start_time'])
def _check_2822a408bdd0(self, engine, data): # Check Primary Keys insp = reflection.Inspector(engine) runs_pk = insp.get_pk_constraint('runs') self.assertEqual(['id'], runs_pk['constrained_columns']) run_meta_pk = insp.get_pk_constraint('run_metadata') self.assertEqual(['id'], run_meta_pk['constrained_columns']) tests_pk = insp.get_pk_constraint('tests') self.assertEqual(['id'], tests_pk['constrained_columns']) test_meta_pk = insp.get_pk_constraint('test_metadata') self.assertEqual(['id'], test_meta_pk['constrained_columns']) test_runs_pk = insp.get_pk_constraint('runs') self.assertEqual(['id'], test_runs_pk['constrained_columns']) test_run_meta_pk = insp.get_pk_constraint('tests') self.assertEqual(['id'], test_run_meta_pk['constrained_columns']) attach_pk = insp.get_pk_constraint('attachments') self.assertEqual(['id'], attach_pk['constrained_columns']) if engine.dialect.name == 'sqlite': new_id_type = sqlalchemy.Integer else: new_id_type = sqlalchemy.BigInteger # Check id column type runs_col = [x for x in insp.get_columns('runs') if x['name'] == 'id'][0] self.assertIsInstance(runs_col['type'], new_id_type) run_meta_col = [ x for x in insp.get_columns('run_metadata') if x['name'] == 'id' ][0] self.assertIsInstance(run_meta_col['type'], new_id_type) tests_col = [ x for x in insp.get_columns('tests') if x['name'] == 'id' ][0] self.assertIsInstance(tests_col['type'], new_id_type) test_meta_col = [ x for x in insp.get_columns('test_metadata') if x['name'] == 'id' ][0] self.assertIsInstance(test_meta_col['type'], new_id_type) test_runs_col = [ x for x in insp.get_columns('test_runs') if x['name'] == 'id' ][0] self.assertIsInstance(test_runs_col['type'], new_id_type) test_run_meta_col = [ x for x in insp.get_columns('test_run_metadata') if x['name'] == 'id' ][0] self.assertIsInstance(test_run_meta_col['type'], new_id_type) attach_col = [ x for x in insp.get_columns('attachments') if x['name'] == 'id' ][0] self.assertIsInstance(attach_col['type'], new_id_type) # Check all the new ids match runs_t = get_table(engine, 'runs') run_ids = [x[1] for x in runs_t.select().execute()] run_metadatas_t = get_table(engine, 'run_metadata') tests_t = get_table(engine, 'tests') test_metadatas_t = get_table(engine, 'test_metadata') test_runs_t = get_table(engine, 'test_runs') test_runs_raw = list(test_runs_t.select().execute()) test_run_test_ids = [x[1] for x in test_runs_raw] test_run_metadatas_t = get_table(engine, 'test_run_metadata') attachments_t = get_table(engine, 'attachments') # Get test we inserted before migration test_row = list(tests_t.select().where( tests_t.c.test_id == data['test']['test_id']).execute())[0] self.assertIn(test_row[0], test_run_test_ids) # Check test run test_run_row = list(test_runs_t.select().where( test_runs_t.c.test_id == test_row[0]).execute())[0] self.assertEqual(test_run_row[3], data['test_run']['status']) self.assertEqual(test_run_row[4].replace(microsecond=0), data['test_run']['start_time'].replace(microsecond=0)) self.assertEqual(test_run_row[5].replace(microsecond=0), data['test_run']['stop_time'].replace(microsecond=0)) self.assertIn(test_run_row[2], run_ids) # Check run run_row = list( runs_t.select().where(runs_t.c.id == test_run_row[2]).execute())[0] self.assertEqual(data['run']['artifacts'], run_row[6]) self.assertEqual(data['run']['id'], run_row[0]) # Check run metadata run_metadata_row = list(run_metadatas_t.select().where( run_metadatas_t.c.run_id == run_row[1]).execute())[0] self.assertEqual(data['run_metadata']['key'], run_metadata_row[1]) self.assertEqual(data['run_metadata']['value'], run_metadata_row[2]) # Check test metadata test_metadata_row = list(test_metadatas_t.select().where( test_metadatas_t.c.test_id == test_row[0]).execute())[0] self.assertEqual(data['test_metadata']['key'], test_metadata_row[1]) self.assertEqual(data['test_metadata']['value'], test_metadata_row[2]) # Check test run metadata test_run_metadata_row = list(test_run_metadatas_t.select().where( test_run_metadatas_t.c.test_run_id == test_run_row[0]).execute()) test_run_metadata_row = test_run_metadata_row[0] self.assertEqual(data['test_run_metadata']['key'], test_run_metadata_row[1]) self.assertEqual(data['test_run_metadata']['value'], test_run_metadata_row[2]) # Check attachment attachment_row = list(attachments_t.select().where( attachments_t.c.test_run_id == test_run_row[0]).execute())[0] self.assertEqual(data['attachment']['label'], attachment_row[2]) self.assertEqual(data['attachment']['attachment'], attachment_row[3])
def upgrade(): from sqlalchemy.engine import reflection inspector = reflection.Inspector(op.get_bind()) existing_primarykeys = set( [inspector.get_pk_constraint('approved_email')['name']]) # Delete rows in the approved_email table that have duplicate idents connection = op.get_bind() concat = func.group_concat if is_sqlite else func.string_agg duplicates = connection.execute( concat(cast(approved_email_table.c.id, String), ',').select().group_by( approved_email_table.c.ident).having(func.count() > 1)) for duplicate_ids in duplicates: duplicate_ids = sorted(duplicate_ids[0].split(','))[1:] if duplicate_ids: connection.execute(approved_email_table.delete().where( approved_email_table.c.id.in_(duplicate_ids))) op.rename_table('approved_email', 'automated_email') if is_sqlite: with op.batch_alter_table( 'automated_email', reflect_kwargs=sqlite_reflect_kwargs) as batch_op: batch_op.add_column( sa.Column('model', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('subject', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('body', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('sender', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('format', sa.Unicode(), server_default='text', nullable=False)) batch_op.add_column( sa.Column('cc', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('bcc', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('approved', sa.Boolean(), server_default='False', nullable=False)) batch_op.add_column( sa.Column('needs_approval', sa.Boolean(), server_default='True', nullable=False)) batch_op.add_column( sa.Column('unapproved_count', sa.Integer(), server_default='0', nullable=False)) batch_op.add_column( sa.Column('allow_post_con', sa.Boolean(), server_default='False', nullable=False)) batch_op.add_column( sa.Column('allow_at_the_con', sa.Boolean(), server_default='False', nullable=False)) batch_op.add_column( sa.Column('active_after', residue.UTCDateTime(), nullable=True)) batch_op.add_column( sa.Column('active_before', residue.UTCDateTime(), nullable=True)) if 'pk_approved_email' in existing_primarykeys: batch_op.drop_constraint('pk_approved_email', type_='primary') if 'approved_email_pkey' in existing_primarykeys: batch_op.drop_constraint('approved_email_pkey', type_='primary') if 'pk_automated_email' not in existing_primarykeys: batch_op.create_primary_key(op.f('pk_automated_email'), ['id']) batch_op.create_unique_constraint(op.f('uq_automated_email_ident'), ['ident']) batch_op.create_index( op.f('ix_automated_email_active_after_active_before'), ['active_after', 'active_before'], unique=False) else: op.add_column( 'automated_email', sa.Column('model', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('subject', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('body', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('sender', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('format', sa.Unicode(), server_default='text', nullable=False)) op.add_column( 'automated_email', sa.Column('cc', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('bcc', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'automated_email', sa.Column('approved', sa.Boolean(), server_default='False', nullable=False)) op.add_column( 'automated_email', sa.Column('needs_approval', sa.Boolean(), server_default='True', nullable=False)) op.add_column( 'automated_email', sa.Column('unapproved_count', sa.Integer(), server_default='0', nullable=False)) op.add_column( 'automated_email', sa.Column('allow_post_con', sa.Boolean(), server_default='False', nullable=False)) op.add_column( 'automated_email', sa.Column('allow_at_the_con', sa.Boolean(), server_default='False', nullable=False)) op.add_column( 'automated_email', sa.Column('active_after', residue.UTCDateTime(), nullable=True)) op.add_column( 'automated_email', sa.Column('active_before', residue.UTCDateTime(), nullable=True)) if 'pk_approved_email' in existing_primarykeys: op.drop_constraint('pk_approved_email', 'automated_email', type_='primary') if 'approved_email_pkey' in existing_primarykeys: op.drop_constraint('approved_email_pkey', 'automated_email', type_='primary') if 'pk_automated_email' not in existing_primarykeys: op.create_primary_key(op.f('pk_automated_email'), 'automated_email', ['id']) op.create_unique_constraint(op.f('uq_automated_email_ident'), 'automated_email', ['ident']) if is_sqlite: with op.batch_alter_table( 'email', reflect_kwargs=sqlite_reflect_kwargs) as batch_op: batch_op.alter_column('dest', new_column_name='to') batch_op.add_column( sa.Column('bcc', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('cc', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('automated_email_id', residue.UUID(), nullable=True)) batch_op.add_column( sa.Column('sender', sa.Unicode(), server_default='', nullable=False)) batch_op.create_foreign_key( op.f('fk_email_automated_email_id_automated_email'), 'automated_email', ['automated_email_id'], ['id'], ondelete='set null') else: op.alter_column('email', 'dest', new_column_name='to') op.add_column( 'email', sa.Column('bcc', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'email', sa.Column('cc', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'email', sa.Column('automated_email_id', residue.UUID(), nullable=True)) op.add_column( 'email', sa.Column('sender', sa.Unicode(), server_default='', nullable=False)) op.create_foreign_key( op.f('fk_email_automated_email_id_automated_email'), 'email', 'automated_email', ['automated_email_id'], ['id'], ondelete='set null') connection.execute(automated_email_table.update().values( {'approved': True})) # Use the ident to try to establish a foreign key relationship # between the email and automated_email tables automated_emails = connection.execute(automated_email_table.select()) for automated_email in automated_emails: connection.execute(email_table.update().where( email_table.c.ident == automated_email.ident).values( {'automated_email_id': automated_email.id})) if is_sqlite: with op.batch_alter_table( 'attendee', reflect_kwargs=sqlite_reflect_kwargs) as batch_op: batch_op.create_index(op.f('ix_attendee_placeholder'), ['placeholder'], unique=False) else: op.create_index(op.f('ix_attendee_placeholder'), 'attendee', ['placeholder'], unique=False)
def upgrade(): # Leftover renaming from the bands -> guests refactor from sqlalchemy.engine import reflection inspector = reflection.Inspector(op.get_bind()) foreignkeys_to_drop = [ ['guest_group', ['fk_band_event_id_event', 'fk_band_group_id_group']], ['guest_bio', ['band_bio_band_id_key', 'fk_band_bio_band_id_band']], [ 'guest_charity', ['band_charity_band_id_key', 'fk_band_charity_band_id_band'] ], ['guest_info', ['band_info_band_id_key', 'fk_band_info_band_id_band']], [ 'guest_merch', ['band_merch_band_id_key', 'fk_band_merch_band_id_band'] ], [ 'guest_panel', ['band_panel_band_id_key', 'fk_band_panel_band_id_band'] ], [ 'guest_stage_plot', ['band_stage_plot_band_id_key', 'fk_band_stage_plot_band_id_band'] ], [ 'guest_taxes', ['band_taxes_band_id_key', 'fk_band_taxes_band_id_band'] ], ['guest_autograph', ['fk_guest_autograph_guest_id_guest_group']], ['guest_interview', ['fk_guest_interview_guest_id_guest_group']], ['guest_travel_plans', ['fk_guest_travel_plans_guest_id_guest_group']], ] primarykeys_to_drop = [ ['guest_group', ['pk_band']], ['guest_bio', ['pk_band_bio']], ['guest_charity', ['pk_band_charity']], ['guest_info', ['pk_band_info']], ['guest_merch', ['pk_band_merch']], ['guest_panel', ['pk_band_panel']], ['guest_stage_plot', ['pk_band_stage_plot']], ['guest_taxes', ['pk_band_taxes']], ] uniqueconstraints_to_drop = [ ['guest_bio', ['uq_band_bio_band_id']], ['guest_charity', ['uq_band_charity_band_id']], ['guest_info', ['uq_band_info_band_id']], ['guest_merch', ['uq_band_merch_band_id']], ['guest_panel', ['uq_band_panel_band_id']], ['guest_stage_plot', ['uq_band_stage_plot_band_id']], ['guest_taxes', ['uq_band_taxes_band_id']], ] for table, foreignkeys in foreignkeys_to_drop: existing_foreignkeys = set( map(lambda x: x['name'], inspector.get_foreign_keys(table))) if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for foreignkey in foreignkeys: if foreignkey in existing_foreignkeys: batch_op.drop_constraint(foreignkey, type_='foreignkey') else: for foreignkey in foreignkeys: if foreignkey in existing_foreignkeys: op.drop_constraint(foreignkey, table, type_='foreignkey') for table, primarykeys in primarykeys_to_drop: existing_primarykeys = set( [inspector.get_pk_constraint(table)['name']]) if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for primarykey in primarykeys: if primarykey in existing_primarykeys: batch_op.drop_constraint(primarykey, type_='primary') else: for primarykey in primarykeys: if primarykey in existing_primarykeys: op.drop_constraint(primarykey, table, type_='primary') for table, uniqueconstraints in uniqueconstraints_to_drop: existing_uniqueconstraints = set( map(lambda x: x['name'], inspector.get_unique_constraints(table))) if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for uniqueconstraint in uniqueconstraints: if uniqueconstraint in existing_uniqueconstraints: batch_op.drop_constraint(uniqueconstraint, type_='unique') else: for uniqueconstraint in uniqueconstraints: if uniqueconstraint in existing_uniqueconstraints: op.drop_constraint(uniqueconstraint, table, type_='unique') uniqueconstraints_to_add = [ ['guest_bio', [('uq_guest_bio_guest_id', 'guest_id')]], ['guest_charity', [('uq_guest_charity_guest_id', 'guest_id')]], ['guest_info', [('uq_guest_info_guest_id', 'guest_id')]], ['guest_merch', [('uq_guest_merch_guest_id', 'guest_id')]], ['guest_panel', [('uq_guest_panel_guest_id', 'guest_id')]], ['guest_stage_plot', [('uq_guest_stage_plot_guest_id', 'guest_id')]], ['guest_taxes', [('uq_guest_taxes_guest_id', 'guest_id')]], ] primarykeys_to_add = [ ['guest_group', [('pk_guest_group', 'id')]], ['guest_bio', [('pk_guest_bio', 'id')]], ['guest_charity', [('pk_guest_charity', 'id')]], ['guest_info', [('pk_guest_info', 'id')]], ['guest_merch', [('pk_guest_merch', 'id')]], ['guest_panel', [('pk_guest_panel', 'id')]], ['guest_stage_plot', [('pk_guest_stage_plot', 'id')]], ['guest_taxes', [('pk_guest_taxes', 'id')]], ] foreignkeys_to_add = [ [ 'guest_group', [('fk_guest_group_event_id_event', 'event', 'event_id', 'id'), ('fk_guest_group_group_id_group', 'group', 'group_id', 'id')] ], [ 'guest_bio', [('fk_guest_bio_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_charity', [('fk_guest_charity_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_info', [('fk_guest_info_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_merch', [('fk_guest_merch_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_panel', [('fk_guest_panel_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_stage_plot', [('fk_guest_stage_plot_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_taxes', [('fk_guest_taxes_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_autograph', [('fk_guest_autograph_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_interview', [('fk_guest_interview_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], [ 'guest_travel_plans', [('fk_guest_travel_plans_guest_id_guest_group', 'guest_group', 'guest_id', 'id')] ], ] for table, primarykeys in primarykeys_to_add: if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for primarykey, column in primarykeys: batch_op.create_primary_key(op.f(primarykey), [column]) else: for primarykey, column in primarykeys: op.create_primary_key(op.f(primarykey), table, [column]) for table, uniqueconstraints in uniqueconstraints_to_add: if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for uniqueconstraint, column in uniqueconstraints: batch_op.create_unique_constraint(op.f(uniqueconstraint), [column]) else: for uniqueconstraint, column in uniqueconstraints: op.create_unique_constraint(op.f(uniqueconstraint), table, [column]) for table, foreignkeys in foreignkeys_to_add: if is_sqlite: with op.batch_alter_table( table, reflect_kwargs=sqlite_reflect_kwargs) as batch_op: for foreignkey, remote_table, column, remote_column in foreignkeys: batch_op.create_foreign_key(op.f(foreignkey), remote_table, [column], [remote_column]) else: for foreignkey, remote_table, column, remote_column in foreignkeys: op.create_foreign_key(op.f(foreignkey), table, remote_table, [column], [remote_column]) if is_sqlite: with op.batch_alter_table( 'guest_merch', reflect_kwargs=sqlite_reflect_kwargs) as batch_op: batch_op.add_column( sa.Column('bringing_boxes', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('extra_info', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('inventory', sideboard.lib.sa.JSON(), server_default='{}', nullable=False)) batch_op.add_column( sa.Column('handlers', sideboard.lib.sa.JSON(), server_default='[]', nullable=False)) batch_op.add_column( sa.Column('poc_email', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_first_name', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_last_name', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_phone', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_zip_code', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_address1', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_address2', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_city', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_region', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_country', sa.Unicode(), server_default='', nullable=False)) batch_op.add_column( sa.Column('poc_is_group_leader', sa.Boolean(), server_default='False', nullable=False)) else: op.add_column( 'guest_merch', sa.Column('bringing_boxes', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('extra_info', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('inventory', sideboard.lib.sa.JSON(), server_default='{}', nullable=False)) op.add_column( 'guest_merch', sa.Column('handlers', sideboard.lib.sa.JSON(), server_default='[]', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_email', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_first_name', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_last_name', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_phone', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_zip_code', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_address1', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_address2', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_city', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_region', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_country', sa.Unicode(), server_default='', nullable=False)) op.add_column( 'guest_merch', sa.Column('poc_is_group_leader', sa.Boolean(), server_default='False', nullable=False))