def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if not is_sqlite(op.get_bind()): op.drop_column('member', 'total_warnings_sent') # pragma: nocover op.drop_column('member', 'last_warning_sent') # pragma: nocover op.drop_column('member', 'last_bounce_received') # pragma: nocover op.drop_column('member', 'bounce_score') # pragma: nocover
def upgrade(): # Create the new table header_match_table = op.create_table( 'headermatch', sa.Column('id', sa.Integer(), nullable=False), sa.Column('mailing_list_id', sa.Integer(), nullable=True), sa.Column('header', sa.Unicode(), nullable=False), sa.Column('pattern', sa.Unicode(), nullable=False), sa.Column('chain', sa.Unicode(), nullable=True), sa.ForeignKeyConstraint( ['mailing_list_id'], ['mailinglist.id'], ), sa.PrimaryKeyConstraint('id')) # Now migrate the data. It can't be offline because we need to read the # pickles. connection = op.get_bind() # Don't import the table definition from the models, it may break this # migration when the model is updated in the future (see the Alembic doc). mlist_table = sa.sql.table('mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType)) for mlist_id, old_matches in connection.execute(mlist_table.select()): for old_match in old_matches: connection.execute(header_match_table.insert().values( mailing_list_id=mlist_id, header=old_match[0], pattern=old_match[1], chain=None)) # Now that data is migrated, drop the old column (except on SQLite which # does not support this) if not is_sqlite(connection): op.drop_column('mailinglist', 'header_matches')
def upgrade(): # Create the new table header_match_table = op.create_table( 'headermatch', sa.Column('id', sa.Integer(), nullable=False), sa.Column('mailing_list_id', sa.Integer(), nullable=True), sa.Column('header', sa.Unicode(), nullable=False), sa.Column('pattern', sa.Unicode(), nullable=False), sa.Column('chain', sa.Unicode(), nullable=True), sa.ForeignKeyConstraint(['mailing_list_id'], ['mailinglist.id'], ), sa.PrimaryKeyConstraint('id') ) # Now migrate the data. It can't be offline because we need to read the # pickles. connection = op.get_bind() # Don't import the table definition from the models, it may break this # migration when the model is updated in the future (see the Alembic doc). mlist_table = sa.sql.table( 'mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType) ) for mlist_id, old_matches in connection.execute(mlist_table.select()): for old_match in old_matches: connection.execute(header_match_table.insert().values( mailing_list_id=mlist_id, header=old_match[0], pattern=old_match[1], chain=None )) # Now that data is migrated, drop the old column (except on SQLite which # does not support this) if not is_sqlite(connection): op.drop_column('mailinglist', 'header_matches')
def upgrade(): op.drop_table('version') if not is_sqlite(op.get_bind()): # SQLite does not support dropping columns. op.drop_column('mailinglist', 'acceptable_aliases_id') op.create_index(op.f('ix_user__user_id'), 'user', ['_user_id'], unique=False) op.drop_index('ix_user_user_id', table_name='user')
def upgrade(): op.drop_table('version') if not is_sqlite(op.get_bind()): # SQLite does not support dropping columns. op.drop_column('mailinglist', 'acceptable_aliases_id') op.create_index(op.f('ix_user__user_id'), 'user', ['_user_id'], unique=False) op.drop_index('ix_user_user_id', table_name='user')
def downgrade(): if not is_sqlite(op.get_bind()): op.drop_column('user', 'is_server_owner') if not exists_in_db(op.get_bind(), 'domain', 'contact_address'): # SQLite may not have removed it. op.add_column( 'domain', sa.Column('contact_address', sa.VARCHAR(), nullable=True)) op.drop_table('domain_owner')
def downgrade(): if not is_sqlite(op.get_bind()): op.drop_column('user', 'is_server_owner') if not exists_in_db(op.get_bind(), 'domain', 'contact_address'): # SQLite may not have removed it. op.add_column( 'domain', sa.Column('contact_address', sa.VARCHAR(), nullable=True)) op.drop_table('domain_owner')
def downgrade(): if is_sqlite(op.get_bind()): # SQLite does not support altering columns. return for table, column in COLUMNS_TO_CHANGE: if op.get_bind().dialect.name == 'postgresql': # PostgreSQL needs the USING clause that Alembic does not support # yet. op.execute( ('ALTER TABLE "{table}" ALTER COLUMN "{column}" ' 'TYPE BYTEA USING decode("{column}", \'UTF8\')').format( table=table, column=column)) else: op.alter_column(table, column, type_=sa.LargeBinary)
def downgrade(): if is_sqlite(op.get_bind()): # SQLite does not support altering columns. return for table, column in COLUMNS_TO_CHANGE: if op.get_bind().dialect.name == 'postgresql': # PostgreSQL needs the USING clause that Alembic does not support # yet. op.execute( ('ALTER TABLE "{table}" ALTER COLUMN "{column}" ' 'TYPE BYTEA USING decode("{column}", \'UTF8\')').format( table=table, column=column)) else: op.alter_column(table, column, type_=sa.LargeBinary)
def upgrade(): op.create_table( 'domain_owner', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('domain_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['domain_id'], ['domain.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'domain_id') ) if not exists_in_db(op.get_bind(), 'user', 'is_server_owner'): # SQLite may not have removed it when downgrading. op.add_column( 'user', sa.Column('is_server_owner', sa.Boolean(), nullable=True)) if not is_sqlite(op.get_bind()): op.drop_column('domain', 'contact_address')
def upgrade(): op.create_table( 'domain_owner', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('domain_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['domain_id'], ['domain.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'domain_id') ) if not exists_in_db(op.get_bind(), 'user', 'is_server_owner'): # SQLite may not have removed it when downgrading. op.add_column( 'user', sa.Column('is_server_owner', sa.Boolean(), nullable=True)) if not is_sqlite(op.get_bind()): op.drop_column('domain', 'contact_address')
def upgrade(): if is_sqlite(op.get_bind()): # SQLite does not support altering columns. return for table, column in COLUMNS_TO_CHANGE: op.alter_column(table, column, type_=SAUnicode)
def downgrade(): if not is_sqlite(op.get_bind()): # SQLite does not support dropping columns. op.drop_column('mailinglist', 'subscription_policy')
def downgrade(): if not is_sqlite(op.get_bind()): # diffcov runs with SQLite so this isn't covered. op.drop_column('headermatch', 'tag') # pragma: nocover
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if not is_sqlite(op.get_bind()): op.drop_column( 'mailinglist', 'member_roster_visibility') # noqa: E501 # pragma: nocover
def upgrade(): if is_sqlite(op.get_bind()): # SQLite does not support altering columns. return for table, column in COLUMNS_TO_CHANGE: op.alter_column(table, column, type_=sa.Unicode)
def upgrade(): if not is_sqlite(op.get_bind()): # SQLite does not support dropping columns. op.drop_column( # pragma: nocover 'mailinglist', 'mime_is_default_digest') # pragma: nocover