def downgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'header_matches'): # SQLite will not have deleted the former column, since it does not # support column deletion. op.add_column( 'mailinglist', sa.Column('header_matches', sa.PickleType, nullable=True)) # Now migrate the data. It can't be offline because we need to read the # pickles. connection = op.get_bind() # Don't import the table definition from the models, it may break this # migration when the model is updated in the future (see the Alembic doc). mlist_table = sa.sql.table('mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType)) header_match_table = sa.sql.table( 'headermatch', sa.sql.column('mailing_list_id', sa.Integer), sa.sql.column('header', sa.Unicode), sa.sql.column('pattern', sa.Unicode), ) for mlist_id, header, pattern in connection.execute( header_match_table.select()).fetchall(): mlist = connection.execute(mlist_table.select().where( mlist_table.c.id == mlist_id)).fetchone() header_matches = mlist['header_matches'] if not header_matches: header_matches = [] header_matches.append((header, pattern)) connection.execute( mlist_table.update().where(mlist_table.c.id == mlist_id).values( header_matches=header_matches)) op.drop_table('headermatch')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if not exists_in_db(op.get_bind(), 'mailinglist', 'member_roster_visibility'): op.add_column( # pragma: nocover 'mailinglist', sa.Column('member_roster_visibility', sa.Integer(), nullable=True))
def downgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'header_matches'): # SQLite will not have deleted the former column, since it does not # support column deletion. op.add_column( 'mailinglist', sa.Column('header_matches', sa.PickleType, nullable=True)) # Now migrate the data. It can't be offline because we need to read the # pickles. connection = op.get_bind() # Don't import the table definition from the models, it may break this # migration when the model is updated in the future (see the Alembic doc). mlist_table = sa.sql.table( 'mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType) ) header_match_table = sa.sql.table( 'headermatch', sa.sql.column('mailing_list_id', sa.Integer), sa.sql.column('header', sa.Unicode), sa.sql.column('pattern', sa.Unicode), ) for mlist_id, header, pattern in connection.execute( header_match_table.select()).fetchall(): mlist = connection.execute(mlist_table.select().where( mlist_table.c.id == mlist_id)).fetchone() header_matches = mlist['header_matches'] if not header_matches: header_matches = [] header_matches.append((header, pattern)) connection.execute(mlist_table.update().where( mlist_table.c.id == mlist_id).values( header_matches=header_matches)) op.drop_table('headermatch')
def downgrade(): op.create_table('version') op.create_index('ix_user_user_id', 'user', ['_user_id'], unique=False) op.drop_index(op.f('ix_user__user_id'), table_name='user') if not exists_in_db(op.get_bind(), 'mailinglist', 'acceptable_aliases_id'): op.add_column( 'mailinglist', sa.Column('acceptable_aliases_id', sa.INTEGER(), nullable=True))
def downgrade(): if not is_sqlite(op.get_bind()): op.drop_column('user', 'is_server_owner') if not exists_in_db(op.get_bind(), 'domain', 'contact_address'): # SQLite may not have removed it. op.add_column( 'domain', sa.Column('contact_address', sa.VARCHAR(), nullable=True)) op.drop_table('domain_owner')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### for table, col, col_type, nullable in ( ('member', 'bounce_score', sa.Integer(), True), ('member', 'last_bounce_received', sa.DateTime(), True), ('member', 'last_warning_sent', sa.DateTime(), True), ('member', 'total_warnings_sent', sa.Integer(), True)): if not exists_in_db(op.get_bind(), table, col): op.add_column(table, sa.Column(col, col_type, nullable=nullable)) # pragma: nocover # noqa: E501
def upgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'usenet_watermark' ): # SQLite may not have removed it when downgrading. op.add_column('mailinglist', sa.Column( 'usenet_watermark', sa.Integer, nullable=True))
def downgrade(): # Add back the original columns to the mailinglist table. for column in CONVERSION_MAPPING: if not exists_in_db(op.get_bind(), 'mailinglist', column): op.add_column('mailinglist', sa.Column(column, SAUnicode, nullable=True)) op.add_column('domain', sa.Column('base_url', SAUnicode)) # Put all the templates with a context mapping the list-id back into the # mailinglist table. No other contexts are supported, so just throw those # away. template_table = sa.sql.table( 'template', sa.sql.column('id', sa.Integer), sa.sql.column('name', SAUnicode), sa.sql.column('context', SAUnicode), sa.sql.column('uri', SAUnicode), sa.sql.column('username', SAUnicode), sa.sql.column('password', SAUnicode), ) mlist_table = sa.sql.table( 'mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('list_id', SAUnicode), sa.sql.column('digest_footer_uri', SAUnicode), sa.sql.column('digest_header_uri', SAUnicode), sa.sql.column('footer_uri', SAUnicode), sa.sql.column('header_uri', SAUnicode), sa.sql.column('goodbye_message_uri', SAUnicode), sa.sql.column('welcome_message_uri', SAUnicode), ) connection = op.get_bind() for (table_id, name, context, uri, username, password) in connection.execute(template_table.select()).fetchall(): mlist = connection.execute(mlist_table.select().where( mlist_table.c.list_id == context)).fetchone() if mlist is None: continue attribute = REVERSE_MAPPING.get(name) if attribute is not None: connection.execute(mlist_table.update().where( mlist_table.c.list_id == context).values(**{attribute: uri})) op.drop_table('file_cache') op.drop_table('template') # Also delete the file cache directories. Don't delete the cache # directory itself though. for path in os.listdir(config.CACHE_DIR): full_path = os.path.join(config.CACHE_DIR, path) if os.path.isdir(full_path): shutil.rmtree(full_path)
def upgrade(): op.create_table( 'domain_owner', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('domain_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['domain_id'], ['domain.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'domain_id') ) if not exists_in_db(op.get_bind(), 'user', 'is_server_owner'): # SQLite may not have removed it when downgrading. op.add_column( 'user', sa.Column('is_server_owner', sa.Boolean(), nullable=True)) if not is_sqlite(op.get_bind()): op.drop_column('domain', 'contact_address')
def upgrade(): # Update the schema. if not exists_in_db(op.get_bind(), 'mailinglist', 'subscription_policy'): # SQLite may not have removed it when downgrading. op.add_column('mailinglist', sa.Column( 'subscription_policy', Enum(SubscriptionPolicy), nullable=True)) # Now migrate the data. Don't import the table definition from the # models, it may break this migration when the model is updated in the # future (see the Alembic doc). mlist = sa.sql.table( 'mailinglist', sa.sql.column('subscription_policy', Enum(SubscriptionPolicy)) ) # There were no enforced subscription policy before, so all lists are # considered open. op.execute(mlist.update().values( {'subscription_policy': op.inline_literal(SubscriptionPolicy.open)}))
def upgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'dmarc_mitigate_action'): # SQLite may not have removed it when downgrading. It should be OK # to just test one. op.add_column( 'mailinglist', sa.Column('dmarc_mitigate_action', Enum(DMARCMitigateAction), nullable=True)) op.add_column( 'mailinglist', sa.Column('dmarc_mitigate_unconditionally', sa.Boolean, nullable=True)) op.add_column( 'mailinglist', sa.Column('dmarc_moderation_notice', SAUnicodeLarge(), nullable=True)) op.add_column( 'mailinglist', sa.Column('dmarc_wrapped_message_text', SAUnicodeLarge(), nullable=True)) # Now migrate the data. Don't import the table definition from the # models, it may break this migration when the model is updated in the # future (see the Alembic doc). mlist = sa.sql.table( 'mailinglist', sa.sql.column('dmarc_mitigate_action', Enum(DMARCMitigateAction)), sa.sql.column('dmarc_mitigate_unconditionally', sa.Boolean), sa.sql.column('dmarc_moderation_notice', SAUnicodeLarge()), sa.sql.column('dmarc_wrapped_message_text', SAUnicodeLarge()), ) # These are all new attributes so just set defaults. op.execute(mlist.update().values( dict( dmarc_mitigate_action=op.inline_literal( DMARCMitigateAction.no_mitigation), dmarc_mitigate_unconditionally=op.inline_literal(False), dmarc_moderation_notice=op.inline_literal(''), dmarc_wrapped_message_text=op.inline_literal(''), )))
def upgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'unsubscription_policy'): # SQLite may not have removed it when downgrading. op.add_column( 'mailinglist', sa.Column('unsubscription_policy', Enum(SubscriptionPolicy), nullable=True)) # Now migrate the data. Don't import the table definition from the # models, it may break this migration when the model is updated in the # future (see the Alembic doc). mlist = sa.sql.table( 'mailinglist', sa.sql.column('unsubscription_policy', Enum(SubscriptionPolicy))) # There was no previous unsubscription policy. op.execute(mlist.update().values({ 'unsubscription_policy': op.inline_literal(SubscriptionPolicy.confirm) })) with op.batch_alter_table('workflowstate') as batch_op: batch_op.drop_column('name')
def test_42756496720_header_matches(self): test_header_matches = [ ('test-header-1', 'test-pattern-1'), ('test-header-2', 'test-pattern-2'), ('test-header-3', 'test-pattern-3'), ] mlist_table = sa.sql.table( 'mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType)) header_match_table = sa.sql.table( 'headermatch', sa.sql.column('mailing_list_id', sa.Integer), sa.sql.column('header', SAUnicode), sa.sql.column('pattern', SAUnicode), ) # Bring the DB to the revision that is being tested. alembic.command.downgrade(alembic_cfg, '42756496720') # Test downgrading. config.db.store.execute(mlist_table.insert().values(id=1)) config.db.store.execute(header_match_table.insert().values([{ 'mailing_list_id': 1, 'header': hm[0], 'pattern': hm[1] } for hm in test_header_matches])) config.db.store.commit() alembic.command.downgrade(alembic_cfg, '2bb9b382198') results = config.db.store.execute(mlist_table.select()).fetchall() self.assertEqual(results[0].header_matches, test_header_matches) self.assertFalse(exists_in_db(config.db.engine, 'headermatch')) config.db.store.commit() # Test upgrading. alembic.command.upgrade(alembic_cfg, '42756496720') results = config.db.store.execute( header_match_table.select()).fetchall() self.assertEqual(results, [(1, hm[0], hm[1]) for hm in test_header_matches])
def test_42756496720_header_matches(self): test_header_matches = [ ('test-header-1', 'test-pattern-1'), ('test-header-2', 'test-pattern-2'), ('test-header-3', 'test-pattern-3'), ] mlist_table = sa.sql.table( 'mailinglist', sa.sql.column('id', sa.Integer), sa.sql.column('header_matches', sa.PickleType) ) header_match_table = sa.sql.table( 'headermatch', sa.sql.column('mailing_list_id', sa.Integer), sa.sql.column('header', sa.Unicode), sa.sql.column('pattern', sa.Unicode), ) # Bring the DB to the revision that is being tested. alembic.command.downgrade(alembic_cfg, '42756496720') # Test downgrading. config.db.store.execute(mlist_table.insert().values(id=1)) config.db.store.execute(header_match_table.insert().values( [{'mailing_list_id': 1, 'header': hm[0], 'pattern': hm[1]} for hm in test_header_matches])) config.db.store.commit() alembic.command.downgrade(alembic_cfg, '2bb9b382198') results = config.db.store.execute( mlist_table.select()).fetchall() self.assertEqual(results[0].header_matches, test_header_matches) self.assertFalse(exists_in_db(config.db.engine, 'headermatch')) config.db.store.commit() # Test upgrading. alembic.command.upgrade(alembic_cfg, '42756496720') results = config.db.store.execute( header_match_table.select()).fetchall() self.assertEqual( results, [(1, hm[0], hm[1]) for hm in test_header_matches])
def upgrade(): if not exists_in_db(op.get_bind(), 'domain', 'alias_domain'): op.add_column('domain', sa.Column('alias_domain', SAUnicode, nullable=True))
def upgrade(): if not exists_in_db(op.get_bind(), 'headermatch', 'tag'): # SQLite may not have removed it when downgrading. op.add_column('headermatch', sa.Column('tag', type_=SAUnicode, nullable=True))
def downgrade(): if not exists_in_db(op.get_bind(), 'mailinglist', 'mime_is_default_digest'): op.add_column( 'mailinglist', sa.Column('mime_is_default_digest', sa.BOOLEAN(), nullable=True))