def downgrade():
    tmp_item = op.create_table('tmp_item',
    sa.Column('item_id', sa.Integer(), nullable=False),
    sa.Column('role_id', sa.Integer(), nullable=False),
    sa.Column('item_type', sa.String(), nullable=True),
    sa.Column('content', sa.Text(), nullable=True),
    sa.Column('is_active', sa.Boolean(), server_default='1', nullable=False),
    sa.Column('value_type', sa.String(), nullable=True),
    sa.ForeignKeyConstraint(['role_id'], ['role.role_id'], name='fk_item_role' ),
    sa.PrimaryKeyConstraint('item_id')
    )

    for item in connection.execute(item_helper2.select()):
        op.bulk_insert(
            tmp_item,
            [
                {
                    'item_id': item.item_id,
                    'role_id' : item.role_focus_id,
                    'item_type' : item.item_type,
                    'content' : item.content,
                },
            ]
        )

    op.drop_table('item')
    op.rename_table('tmp_item', 'item')
def downgrade():
    op.drop_column('user', 'password', schema='bookbrainz')

    op.add_column('rel_type', sa.Column('reverse_template', sa.TEXT(),
                  autoincrement=False, nullable=False), schema='bookbrainz')

    op.alter_column(
        'rel_text', 'relationship_data_id',
        new_column_name='relationship_tree_id', schema='bookbrainz'
    )
    op.alter_column(
        'rel_revision', 'relationship_data_id',
        new_column_name='relationship_tree_id', schema='bookbrainz'
    )

    op.alter_column(
        'rel_entity', 'relationship_data_id',
        new_column_name='relationship_tree_id', schema='bookbrainz'
    )

    op.alter_column(
        'rel_data', 'relationship_data_id',
        new_column_name='relationship_tree_id', schema='bookbrainz'
    )

    op.rename_table('rel_data', 'rel_tree', schema='bookbrainz')
def upgrade():
    entity_types.create(op.get_bind())
    op.rename_table('work_data_language', 'work_data__language', schema='bookbrainz')
    op.add_column('entity', sa.Column('_type', entity_types, nullable=False), schema='bookbrainz')

    # EntityTree -> Entity
    op.create_table('entity_data__alias',
    sa.Column('entity_data_id', sa.Integer(), nullable=False),
    sa.Column('alias_id', sa.Integer(), nullable=False),
    sa.ForeignKeyConstraint(['alias_id'], ['bookbrainz.alias.alias_id'], ),
    sa.ForeignKeyConstraint(['entity_data_id'], ['bookbrainz.entity_data.entity_data_id'], ),
    sa.PrimaryKeyConstraint('entity_data_id', 'alias_id'),
    schema='bookbrainz'
    )
    op.drop_table('entity_tree_alias', schema='bookbrainz')

    op.drop_constraint(u'entity_revision_entity_tree_id_fkey', 'entity_revision', schema='bookbrainz', type_='foreignkey')
    op.drop_table('entity_tree', schema='bookbrainz')

    op.add_column('entity_data', sa.Column('annotation_id', sa.Integer(), nullable=True), schema='bookbrainz')
    op.add_column('entity_data', sa.Column('default_alias_id', sa.Integer(), nullable=True), schema='bookbrainz')
    op.add_column('entity_data', sa.Column('disambiguation_id', sa.Integer(), nullable=True), schema='bookbrainz')
    op.create_foreign_key(u'entity_data_annotation_id_fkey', 'entity_data', 'annotation', ['annotation_id'], ['annotation_id'], source_schema='bookbrainz', referent_schema='bookbrainz')
    op.create_foreign_key(u'entity_data_disambiguation_id_fkey', 'entity_data', 'disambiguation', ['disambiguation_id'], ['disambiguation_id'], source_schema='bookbrainz', referent_schema='bookbrainz')
    op.create_foreign_key(u'entity_data_default_alias_id_fkey', 'entity_data', 'alias', ['default_alias_id'], ['alias_id'], source_schema='bookbrainz', referent_schema='bookbrainz')

    op.add_column('entity_revision', sa.Column('entity_data_id', sa.Integer(), nullable=False), schema='bookbrainz')
    op.create_foreign_key(u'entity_revision_entity_data_id_fkey', 'entity_revision', 'entity_data', ['entity_data_id'], ['entity_data_id'], source_schema='bookbrainz', referent_schema='bookbrainz')
    op.drop_column('entity_revision', 'entity_tree_id', schema='bookbrainz')
    op.alter_column('rel_type', 'forward_template', new_column_name='template', schema='bookbrainz')
Пример #4
0
 def test_rename_table_schema_casesens(self):
     context = op_fixture("mssql")
     op.rename_table("TeeOne", "TeeTwo", schema="FooBar")
     # yup, ran this in SQL Server 2014, the two levels of quoting
     # seems to be understood.  Can't do the two levels on the
     # target name though !
     context.assert_contains("EXEC sp_rename '[FooBar].[TeeOne]', [TeeTwo]")
def upgrade():
    # Rename relationship tree to relationship data
    op.rename_table('rel_tree', 'rel_data', schema='bookbrainz')
    op.alter_column(
        'rel_data', 'relationship_tree_id',
        new_column_name='relationship_data_id', schema='bookbrainz'
    )

    op.alter_column(
        'rel_entity', 'relationship_tree_id',
        new_column_name='relationship_data_id', schema='bookbrainz'
    )

    op.alter_column(
        'rel_revision', 'relationship_tree_id',
        new_column_name='relationship_data_id', schema='bookbrainz'
    )

    op.alter_column(
        'rel_text', 'relationship_tree_id',
        new_column_name='relationship_data_id', schema='bookbrainz'
    )

    # Drop reverse templates
    op.drop_column('rel_type', 'reverse_template', schema='bookbrainz')

    # Add user password field (stores 'bytes' hashes)
    op.add_column('user', sa.Column('password', sa.Text(), nullable=False),
                  schema='bookbrainz')
def downgrade():
    op.rename_table('legacy_records_mirror', 'inspire_prod_records')
    op.execute('ALTER SEQUENCE legacy_records_mirror_recid_seq RENAME TO inspire_prod_records_recid_seq')
    op.execute('ALTER INDEX pk_legacy_records_mirror RENAME TO pk_inspire_prod_records')
    op.execute('ALTER INDEX ix_legacy_records_mirror_last_updated RENAME TO ix_inspire_prod_records_last_updated')
    op.execute('ALTER INDEX ix_legacy_records_mirror_recid RENAME TO ix_inspire_prod_records_recid')
    op.execute('ALTER INDEX ix_legacy_records_mirror_valid RENAME TO ix_inspire_prod_records_valid')
def upgrade():
    # n.b. all this does is move the current methods from the species to the
    # default form; I'm just gonna add ones for alternate forms manually and
    # let the CSV reload take care of that

    op.rename_table('pokemon_species_evolution', 'evolution_methods')

    op.add_column(
        'evolution_methods',
        sa.Column('evolved_form_id', sa.Integer,
                  sa.ForeignKey('pokemon_forms.id'))
    )

    subquery = sa.select(
        [pokemon_forms.c.id],
        sa.and_(
            pokemon_forms.c.species_id ==
                evolution_methods.c.evolved_species_id,
            pokemon_forms.c.is_default
        )
    )

    op.execute(evolution_methods.update()
               .values({'evolved_form_id': subquery}))

    op.drop_column('evolution_methods', 'evolved_species_id')
    op.alter_column('evolution_methods', 'evolved_form_id', nullable=False)
    op.create_primary_key(None, 'evolution_methods', ['evolved_form_id'])
Пример #8
0
def downgrade():
    ### commands auto generated by Alembic - please adjust! ###
    img = op.create_table('images_new',
                          sa.Column('id', sa.Integer, nullable=False),
                          sa.Column('filename', sa.String(120), nullable=False),
                          sa.Column('location', sa.String(120), nullable=False),
                          sa.Column('url', sa.String(120)),
                          sa.Column('type', sa.Integer, nullable=False),
                          sa.Column('active',
                                    sa.Boolean,
                                    nullable=False,
                                    default=False),
                          sa.Column('timestamp', sa.DateTime, nullable=False),

                          sa.PrimaryKeyConstraint('id'))

    bind = op.get_bind()

    results = bind.execute('select * from images').fetchall()
    images = [{'id': r[0],
               'filename': r[1],
               'location': r[3],
               'url': r[4], # Change to 7 afterwards?
               'type': r[5],
               'active': r[6],
               'timestamp': datetime\
                   .datetime.strptime(r[7],
                                      '%Y-%m-%d %H:%M:%S.%f')} for r in results
                   if r[1]]
    op.bulk_insert(img, images)

    op.drop_table('images')
    op.rename_table('images_new', 'images')
Пример #9
0
    def test_rename_table_schema_hard_quoting(self):
        from sqlalchemy.sql.schema import quoted_name

        context = op_fixture("postgresql")
        op.rename_table("t1", "t2", schema=quoted_name("some.schema", quote=True))

        context.assert_('ALTER TABLE "some.schema".t1 RENAME TO t2')
Пример #10
0
def downgrade_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables["imapaccount"]

    # Get data from table-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id, ImapAccount_.imap_host).all()
    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Drop columns, add new columns + insert data
    op.drop_column("account", "type")
    op.add_column("account", sa.Column("imap_host", sa.String(512)))

    table_ = table("account", column("imap_host", sa.String(512)), column("id", sa.Integer))

    for r in to_insert:
        op.execute(table_.update().where(table_.c.id == r["id"]).values({"imap_host": r["imap_host"]}))

    # Table switch-over
    op.drop_constraint("imapuid_ibfk_1", "imapuid", type_="foreignkey")
    op.drop_constraint("uidvalidity_ibfk_1", "uidvalidity", type_="foreignkey")
    op.drop_constraint("foldersync_ibfk_1", "foldersync", type_="foreignkey")
    op.drop_table("imapaccount")

    op.rename_table("account", "imapaccount")

    op.create_foreign_key("imapuid_ibfk_1", "imapuid", "imapaccount", ["imapaccount_id"], ["id"], ondelete="CASCADE")
    op.create_foreign_key(
        "uidvalidity_ibfk_1", "uidvalidity", "imapaccount", ["imapaccount_id"], ["id"], ondelete="CASCADE"
    )
    op.create_foreign_key("foldersync_ibfk_1", "foldersync", "imapaccount", ["account_id"], ["id"], ondelete="CASCADE")
def downgrade():
    LOG.info("Creating volume_type_extra_specs table")
    vt_es = op.create_table(
        'volume_type_extra_specs',
        sa.Column('created_at', sa.DateTime),
        sa.Column('updated_at', sa.DateTime),
        sa.Column('deleted_at', sa.DateTime),
        sa.Column('deleted', sa.Boolean),
        sa.Column('id', sa.Integer, primary_key=True, nullable=False),
        sa.Column('volume_type_id', sa.String(length=36),
                  sa.ForeignKey('share_types.id'), nullable=False),
        sa.Column('key', sa.String(length=255)),
        sa.Column('value', sa.String(length=255)),
        mysql_engine='InnoDB')

    LOG.info("Migrating share_type_extra_specs to "
             "volume_type_extra_specs")
    _copy_records(destination_table=vt_es, up_migration=False)

    LOG.info("Dropping share_type_extra_specs table")
    op.drop_table("share_type_extra_specs")

    LOG.info("Renaming share_types table to volume_types")
    op.drop_constraint('st_name_uc', 'share_types', type_='unique')
    op.create_unique_constraint('vt_name_uc', 'share_types',
                                ['name', 'deleted'])
    op.rename_table("share_types", "volume_types")

    LOG.info("Renaming column name shares.share_type_id to "
             "shares.volume_type.id")
    op.alter_column("shares", "share_type_id",
                    new_column_name="volume_type_id",
                    type_=sa.String(length=36))
Пример #12
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.rename_table("transaction", "old_transaction")
    op.rename_table("card_transaction", "old_card_transaction")

    op.create_table(
        "transaction",
        sa.Column("object_id", sa.Integer(), nullable=False),
        sa.Column("payment_method", sa.Enum(u"Battels", u"Card", u"Free"), nullable=False),
        sa.Column("paid", sa.Boolean(), nullable=False),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("user_id", sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(["user_id"], [u"user.object_id"]),
        sa.PrimaryKeyConstraint("object_id"),
    )
    op.create_table(
        "battels_transaction",
        sa.Column("object_id", sa.Integer(), nullable=False),
        sa.Column("battels_term", sa.Unicode(length=4), nullable=True),
        sa.ForeignKeyConstraint(["object_id"], [u"transaction.object_id"]),
        sa.PrimaryKeyConstraint("object_id"),
    )
    op.create_table(
        "card_transaction",
        sa.Column("object_id", sa.Integer(), nullable=False),
        sa.Column("completed", sa.DateTime(), nullable=True),
        sa.Column("access_code", sa.Unicode(length=200), nullable=True),
        sa.Column("result_code", sa.Unicode(length=2), nullable=True),
        sa.Column("eway_id", sa.Integer(), nullable=True),
        sa.Column("refunded", sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(["object_id"], [u"transaction.object_id"]),
        sa.PrimaryKeyConstraint("object_id"),
    )
def upgrade():
    table_prefix = context.config.get_main_option('table_prefix')
    op.drop_table(table_prefix + 'template')
    table_name = table_prefix + 'environment_schema_values'
    with op.batch_alter_table(table_name) as batch:
        batch.drop_constraint(table_name + '_schema_id_fkey', 'foreignkey')
        batch.alter_column(
            'schema_id',
            new_column_name='resource_definition_id',
            existing_type=sa.Integer(),
        )
    op.rename_table(table_name, table_prefix + 'resource_values')
    op.rename_table(table_prefix + 'schema',
                    table_prefix + 'resource_definition')
    with op.batch_alter_table(table_prefix + 'resource_definition') as batch:
        batch.drop_column('namespace_id')
    op.drop_table(table_prefix + 'namespace')
    table_name = table_prefix + 'resource_values'
    with op.batch_alter_table(table_name) as batch:
        batch.create_foreign_key(
            table_name + '_resource_definition_id_fkey',
            table_prefix + 'resource_definition',
            ['resource_definition_id'],
            ['id'],
        )
Пример #14
0
def drop_column_sqlite(tablename, columns):
    """ column dropping functionality for SQLite """

    # we need copy to make a deep copy of the column attributes
    from copy import copy

    # get the db engine and reflect database tables
    engine = op.get_bind()
    meta = sa.MetaData(bind=engine)
    meta.reflect()

    # create a select statement from the old table
    old_table = meta.tables[tablename]
    select = sa.sql.select([c for c in old_table.c if c.name not in columns])

    # get remaining columns without table attribute attached
    remaining_columns = [copy(c) for c in old_table.columns if c.name not in columns]
    for column in remaining_columns:
        column.table = None

    # create a temporary new table
    new_tablename = "{0}_new".format(tablename)
    op.create_table(new_tablename, *remaining_columns)
    meta.reflect()
    new_table = meta.tables[new_tablename]

    # copy data from old table
    insert = sa.sql.insert(new_table).from_select([c.name for c in remaining_columns], select)
    engine.execute(insert)

    # drop the old table and rename the new table to take the old tables
    # position
    op.drop_table(tablename)
    op.rename_table(new_tablename, tablename)
def downgrade():

    op.drop_constraint('roles_casework_users_casework_users_id_fkey', 'roles_users', type='foreignkey')

    op.rename_table('casework_users', 'users')

    op.create_foreign_key('roles_users_users_id_fkey', 'roles_users', 'users', ['users_id'], ['id'])
def upgrade():

    if migration.schema_has_table('nsxv3_lbaas_l7rules'):
        op.drop_constraint('fk_nsxv3_lbaas_l7rules_id', 'nsxv3_lbaas_l7rules',
                           'foreignkey')
        op.drop_constraint('l7rule_id', 'nsxv3_lbaas_l7rules', 'primary')
        op.drop_column('nsxv3_lbaas_l7rules', 'loadbalancer_id')
        op.drop_column('nsxv3_lbaas_l7rules', 'l7rule_id')
        op.rename_table('nsxv3_lbaas_l7rules', 'nsxv3_lbaas_l7policies')

        if migration.schema_has_table('lbaas_l7policies'):
            op.create_foreign_key(
                'fk_nsxv3_lbaas_l7policies_id', 'nsxv3_lbaas_l7policies',
                'lbaas_l7policies', ['l7policy_id'], ['id'],
                ondelete='CASCADE')
    else:
        op.create_table(
            'nsxv3_lbaas_l7policies',
            sa.Column('l7policy_id', sa.String(36), nullable=False),
            sa.Column('lb_rule_id', sa.String(36), nullable=False),
            sa.Column('lb_vs_id', sa.String(36), nullable=False),
            sa.Column('created_at', sa.DateTime(), nullable=True),
            sa.Column('updated_at', sa.DateTime(), nullable=True),
            sa.PrimaryKeyConstraint('l7policy_id'))

        if migration.schema_has_table('lbaas_l7policies'):
            op.create_foreign_key(
                'fk_nsxv3_lbaas_l7policies_id', 'nsxv3_lbaas_l7policies',
                'lbaas_l7policies', ['l7policy_id'], ['id'],
                ondelete='CASCADE')
def upgrade(pyramid_env):
    with context.begin_transaction():
        op.create_table(
            'abstract_agent_account',
            sa.Column('id', sa.Integer(), nullable=False),
            sa.Column('type', sa.String(length=60), nullable=True),
            sa.PrimaryKeyConstraint('id')
        )
        op.rename_table(u'email_account', 'agent_email_account')
        op.rename_table(u'idprovider_account', 'idprovider_agent_account')
        op.execute("INSERT INTO abstract_agent_account (SELECT id, 'agent_email_account' FROM agent_email_account)")
        op.execute("UPDATE idprovider_agent_account SET id = id + (SELECT max(id) FROM agent_email_account)")
        op.execute("INSERT INTO abstract_agent_account (SELECT id, 'idprovider_agent_account' FROM idprovider_agent_account)")
        op.execute("select setval('abstract_agent_account_id_seq', (SELECT max(id)+1 FROM abstract_agent_account), false)")
        op.execute("alter table agent_email_account alter column id drop default")
        op.execute("alter table idprovider_agent_account alter column id drop default")
        op.create_foreign_key('fk_id', 'agent_email_account', 'abstract_agent_account', ['id'], ['id'], ondelete='CASCADE')
        op.create_foreign_key('fk_id', 'idprovider_agent_account', 'abstract_agent_account', ['id'], ['id'], ondelete='CASCADE')
        op.execute('drop sequence email_account_id_seq')
        op.execute('drop sequence idprovider_account_id_seq')

        ### end Alembic commands ###

    # Do stuff with the app's models here.
    with transaction.manager:
        pass
Пример #18
0
def force_rename_table(old, new):
    from autonomie.models import DBSESSION
    conn = DBSESSION.connection()
    if table_exists(old):
        if table_exists(new):
            op.drop_table(new)
        op.rename_table(old, new)
Пример #19
0
def downgrade():
    op.drop_column('task', 'action')
    op.rename_table('task', 'deployment')

    op.create_table(
        'task',
        sa.Column('id', sa.String(length=36), nullable=False))

    helpers.transform_table(
        'status', {'task_id': 'deployment_id'}, {},
        sa.Column('created', sa.DateTime(), nullable=False),
        sa.Column('updated', sa.DateTime(), nullable=False),
        sa.Column('id', sa.String(length=36), nullable=False),
        sa.Column('entity_id', sa.String(length=255), nullable=True),
        sa.Column('entity', sa.String(length=10), nullable=True),
        sa.Column('deployment_id', sa.String(length=36), nullable=True),
        sa.Column('text', sa.Text(), nullable=False),
        sa.Column('level', sa.String(length=32), nullable=False),
        sa.Column('details', sa.Text(), nullable=True),
        sa.ForeignKeyConstraint(['deployment_id'], ['deployment.id'], ),
        sa.PrimaryKeyConstraint('id'),
        mysql_engine=MYSQL_ENGINE,
        mysql_charset=MYSQL_CHARSET
    )

    op.drop_table('task')
Пример #20
0
def genericize_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()

    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Rename table, add new columns.
    op.rename_table('imapaccount', 'account')
    op.add_column('account', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table('imapaccount',
                    sa.Column('imap_host', sa.String(512)),
                    sa.Column('id', sa.Integer()),
                    sa.ForeignKeyConstraint(['id'], ['account.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapaccount',
                   column('imap_host', sa.String()),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('account', 'imap_host')
def upgrade(active_plugins=None, options=None):
    if not migration.should_run(active_plugins, migration_for_plugins):
        return

    op.create_table(
        'nvp_multi_provider_networks',
        sa.Column('network_id', sa.String(length=36), nullable=False),
        sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
                                ondelete='CASCADE'),
        sa.PrimaryKeyConstraint('network_id'),
        mysql_engine='InnoDB'
    )
    op.create_table('rename_nvp_network_bindings',
                    sa.Column('network_id', sa.String(length=36),
                              primary_key=True),
                    sa.Column('binding_type',
                              sa.Enum(
                                  'flat', 'vlan', 'stt', 'gre', 'l3_ext',
                                  name=(
                                      'nvp_network_bindings_binding_type')),
                              nullable=False, primary_key=True),
                    sa.Column('phy_uuid', sa.String(36), primary_key=True,
                              nullable=True),
                    sa.Column('vlan_id', sa.Integer, primary_key=True,
                              nullable=True, autoincrement=False))
    # copy data from nvp_network_bindings into rename_nvp_network_bindings
    op.execute("INSERT INTO rename_nvp_network_bindings SELECT network_id, "
               "binding_type, phy_uuid, vlan_id from nvp_network_bindings")

    op.drop_table('nvp_network_bindings')
    op.rename_table('rename_nvp_network_bindings', 'nvp_network_bindings')
def downgrade(active_plugins=None, options=None):
    if not migration.should_run(active_plugins, migration_for_plugins):
        return

    # Delete the multi_provider_network entries from nvp_network_bindings
    op.execute("DELETE from nvp_network_bindings WHERE network_id IN "
               "(SELECT network_id from nvp_multi_provider_networks)")

    # create table with previous contains
    op.create_table(
        'rename_nvp_network_bindings',
        sa.Column('network_id', sa.String(length=36), primary_key=True),
        sa.Column('binding_type',
                  sa.Enum('flat', 'vlan', 'stt', 'gre', 'l3_ext',
                          name=('nvp_network_bindings_binding_type')),
                  nullable=False),
        sa.Column('phy_uuid', sa.String(36), nullable=True),
        sa.Column('vlan_id', sa.Integer, nullable=True, autoincrement=False))

    # copy data from nvp_network_bindings into rename_nvp_network_bindings
    op.execute("INSERT INTO rename_nvp_network_bindings SELECT network_id, "
               "binding_type, phy_uuid, vlan_id from nvp_network_bindings")

    op.drop_table('nvp_network_bindings')
    op.rename_table('rename_nvp_network_bindings', 'nvp_network_bindings')
    op.drop_table('nvp_multi_provider_networks')
def upgrade():
    conn = op.get_bind()

    if conn.engine.driver == "psycopg2":
        conn.execute(
            "ALTER TABLE errors ALTER COLUMN message TYPE text "
            "USING message::text"
        )
    elif conn.engine.driver == "mysqldb":
        conn.execute(
            "ALTER TABLE errors MODIFY message text"
        )
    elif conn.engine.driver == "pysqlite":
        old_errors = conn.execute(
            "SELECT id, message, task_id FROM errors"
        ).fetchall()

        errors = []
        for error in old_errors:
            errors.append(dict(zip(("id", "message", "task_id"), error)))

        op.rename_table("errors", "old_errors")
        op.drop_table("old_errors")
        op.create_table(
            "errors",
            sa.Column("id", sa.Integer(), primary_key=True),
            sa.Column("message", sa.Text(), nullable=False),
            sa.Column("task_id", sa.Integer(), sa.ForeignKey("tasks.id"), nullable=False),
        )
        op.bulk_insert(Error.__table__, errors)
def downgrade():
    """ Set all the table names back to be camel case. """
    # Invert the dict
    old_tables = {TABLES[table]: table for table in TABLES}

    for table in TABLES:
        op.rename_table(table, TABLES[table])
def upgrade():
    op.drop_table('role')
    op.rename_table('tmp_role', 'role')
    op.rename_table('role_focus', 'tmp_role_focus')

    role_focus = op.create_table('role_focus',
        sa.Column('role_focus_id', sa.Integer(), nullable=False),
        sa.Column('role_id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('focus_name', sa.String(), nullable=True),
        sa.ForeignKeyConstraint(['role_id'], ['role.role_id'], name='fk_focus_role'),
        sa.PrimaryKeyConstraint('role_focus_id')
    )

    for focus in connection.execute(focus_helper.select()):
        op.bulk_insert(
            role_focus,
            [
                {
                    'role_focus_id': focus.role_id,
                    'role_id' : focus.role_id,
                    'user_id' : focus.user_id,
                    'focus_name' : focus.focus_name,
                },
            ]
        )

    op.drop_table('tmp_role_focus')
def downgrade():
    #rename instead of add/drop, because of existing fk constraints
    op.rename_table('radio_scheduledprogram','radio_scheduledepisode')
    op.rename_table('onair_program','onair_episode')

    op.alter_column(u'telephony_call', 'onairprogram_id', new_column_name='onairepisode_id')
    op.alter_column(u'telephony_message', 'onairprogram_id', new_column_name='onairepisode_id')
def downgrade():
    op.rename_table('map_areas', 'aspects', schema='roombooking')
    op.execute('''
        ALTER INDEX roombooking.ix_uq_map_areas_is_default RENAME TO ix_uq_aspects_is_default;
        ALTER SEQUENCE roombooking.map_areas_id_seq RENAME TO aspects_id_seq;
        ALTER TABLE roombooking.aspects RENAME CONSTRAINT pk_map_areas TO pk_aspects;
    ''')
def downgrade():
    op.rename_table('all_business_licenses', 'business_licenses')
    op.alter_column('all_business_licenses', 
                    'name',
                    type_=sa.types.VARCHAR(length=200))

    op.alter_column('all_business_licenses', 
                    'business_class',
                    type_=sa.types.VARCHAR(length=40))
    op.alter_column('all_business_licenses', 
                    'business_service_description',
                    type_=sa.types.VARCHAR(length=100))
    op.alter_column('all_business_licenses', 
                    'business_product',
                    type_=sa.types.VARCHAR(length=40))
    op.alter_column('all_business_licenses', 
                    'business_address',
                    type_=sa.types.VARCHAR(length=200))
    op.alter_column('all_business_licenses',
                    'business_zip',
                    type_=sa.types.VARCHAR(length=20))

    op.drop_column('all_business_licenses', 'business_street_number')
    op.drop_column('all_business_licenses', 'business_street_prefix')
    op.drop_column('all_business_licenses', 'business_street_name')
    op.drop_column('all_business_licenses', 'business_street_type')
    op.drop_column('all_business_licenses', 'business_street_suffix')
    op.drop_column('all_business_licenses', 'business_zip')
Пример #29
0
def downgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.drop_column(u'postage', 'cancelled')

    op.drop_table('new_card_transaction')
    op.drop_table('eway_transaction')

    conn = op.get_bind()
    conn.execute(sql.text("""
        UPDATE `transaction`
        SET `payment_method`=:new_payment_method
        WHERE `payment_method`=:old_payment_method
    """), old_payment_method='OldCard', new_payment_method='Card')

    op.alter_column('transaction', 'payment_method',
                    existing_type=sa.Enum(
                        'Battels',
                        'Card',
                        'OldCard',
                        'Free',
                        'Dummy'
                    ),
                    type_=sa.Enum(
                        'Battels',
                        'Card',
                        'Free',
                        'Dummy'
                    ),
                    nullable=False)

    op.rename_table('old_card_transaction', 'card_transaction')
def downgrade_export_locations_table(connection):
    op.rename_table('share_instance_export_locations',
                    'share_export_locations')
    op.add_column(
        'share_export_locations',
        Column('share_id', String(36),
               ForeignKey('shares.id', name="sel_id_fk"))
    )

    # Convert share_instance_id to share_id
    share_el_table = utils.load_table('share_export_locations', connection)
    share_instances_table = utils.load_table('share_instances', connection)
    for export in connection.execute(share_el_table.select()):
        share_instance = connection.execute(
            share_instances_table.select().where(
                share_instances_table.c.id == export.share_instance_id)
        ).first()

        op.execute(
            share_el_table.update().where(
                share_el_table.c.id == export.id
            ).values({'share_id': six.text_type(share_instance.share_id)})
        )

    with op.batch_alter_table("share_export_locations") as batch_op:
        batch_op.drop_constraint('sel_instance_id_fk', type_='foreignkey')
        batch_op.drop_column('share_instance_id')
def upgrade():
    op.rename_table('indie_game_screenshot', 'indie_game_image')
    if is_sqlite:
        with op.batch_alter_table(
                'indie_game_image',
                reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
            batch_op.add_column(
                sa.Column('is_screenshot',
                          sa.Boolean(),
                          server_default='True',
                          nullable=False))
            batch_op.add_column(
                sa.Column('use_in_promo',
                          sa.Boolean(),
                          server_default='False',
                          nullable=False))

            batch_op.drop_constraint(
                'fk_indie_game_screenshot_game_id_indie_game',
                type_='foreignkey')
            batch_op.create_foreign_key(
                op.f('fk_indie_game_image_game_id_indie_game'), 'indie_game',
                ['game_id'], ['id'])

            batch_op.drop_constraint('pk_indie_game_screenshot',
                                     type_='primary')
            batch_op.create_primary_key(op.f('pk_indie_game_image'), ['id'])

        with op.batch_alter_table(
                'indie_game',
                reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
            batch_op.add_column(
                sa.Column('facebook',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('has_multiplayer',
                          sa.Boolean(),
                          server_default='False',
                          nullable=False))
            batch_op.add_column(
                sa.Column('leaderboard_challenge',
                          sa.Boolean(),
                          server_default='False',
                          nullable=False))
            batch_op.add_column(
                sa.Column('link_to_promo_video',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('link_to_webpage',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('multiplayer_game_length',
                          sa.Integer(),
                          nullable=True))
            batch_op.add_column(
                sa.Column('other_social_media',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('player_count',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('tournament_at_event',
                          sa.Boolean(),
                          server_default='False',
                          nullable=False))
            batch_op.add_column(
                sa.Column('tournament_prizes',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
            batch_op.add_column(
                sa.Column('twitter',
                          sa.Unicode(),
                          server_default='',
                          nullable=False))
    else:
        op.add_column(
            'indie_game_image',
            sa.Column('is_screenshot',
                      sa.Boolean(),
                      server_default='True',
                      nullable=False))
        op.add_column(
            'indie_game_image',
            sa.Column('use_in_promo',
                      sa.Boolean(),
                      server_default='False',
                      nullable=False))

        op.drop_constraint('fk_indie_game_screenshot_game_id_indie_game',
                           'indie_game_image',
                           type_='foreignkey')
        op.create_foreign_key(op.f('fk_indie_game_image_game_id_indie_game'),
                              'indie_game_image', 'indie_game', ['game_id'],
                              ['id'])

        op.drop_constraint('pk_indie_game_screenshot',
                           'indie_game_image',
                           type_='primary')
        op.create_primary_key(op.f('pk_indie_game_image'), 'indie_game_image',
                              ['id'])

        op.add_column(
            'indie_game',
            sa.Column('facebook',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('has_multiplayer',
                      sa.Boolean(),
                      server_default='False',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('leaderboard_challenge',
                      sa.Boolean(),
                      server_default='False',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('link_to_promo_video',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('link_to_webpage',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('multiplayer_game_length', sa.Integer(), nullable=True))
        op.add_column(
            'indie_game',
            sa.Column('other_social_media',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('player_count',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('tournament_at_event',
                      sa.Boolean(),
                      server_default='False',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('tournament_prizes',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
        op.add_column(
            'indie_game',
            sa.Column('twitter',
                      sa.Unicode(),
                      server_default='',
                      nullable=False))
Пример #32
0
def downgrade():
    op.alter_column('DSK8', 'complaint_number', type_=sa.BigInteger)
    op.rename_table('DSK8', 'DKS8')
Пример #33
0
def upgrade():
    op.rename_table('DKS8', 'DSK8')
    op.alter_column('DSK8', 'complaint_number', type_=sa.String)
Пример #34
0
 def test_rename_table_schema(self):
     context = op_fixture("mssql")
     op.rename_table("t1", "t2", schema="foobar")
     context.assert_contains("EXEC sp_rename 'foobar.t1', t2")
Пример #35
0
 def test_rename_table_postgresql(self):
     context = op_fixture("postgresql")
     op.rename_table('t1', 't2')
     context.assert_("ALTER TABLE t1 RENAME TO t2")
Пример #36
0
def rename_table_if_exists(old_table_name, new_table_name):
    if schema_has_table(old_table_name):
        op.rename_table(old_table_name, new_table_name)
Пример #37
0
def upgrade():
    db.session.close()
    metadata = db.inspect(db.engine).get_table_names()
    if 'powerconnect55xx' in metadata:
        op.rename_table('powerconnect55xx', 'power_connect55xx')
def downgrade():
    op.drop_constraint('proc_figurelabtable_ibfk_1', 'proc_FigureLabTable',
                       'foreignkey')
    op.drop_column('proc_FigureLabTable', 'lab_id')
    op.rename_table('proc_FigureLabTable', 'proc_FigureSamplesTable')
def upgrade():
    # BEWARE: be prepared to really spaghetti code. To deal with SQLite limitations in Alembic we coded some workarounds.

    # Migrations are supported starting form Cuckoo 0.6 and Cuckoo 1.0; I need a way to figure out if from which release
    # it will start because both schema are missing alembic release versioning.
    # I check for tags table to distinguish between Cuckoo 0.6 and 1.0.
    conn = op.get_bind()

    if conn.engine.dialect.has_table(conn.engine.connect(), "machines_tags"):
        # If this table exist we are on Cuckoo 1.0 or above.
        # So skip SQL migration.
        pass
    else:
        # We are on Cuckoo < 1.0, hopefully 0.6.
        # So run SQL migration.

        # Create table used by Tag.
        op.create_table(
            "tags",
            sa.Column("id", sa.Integer(), primary_key=True),
            sa.Column("name",
                      sa.String(length=255),
                      nullable=False,
                      unique=True),
        )

        # Create secondary table used in association Machine - Tag.
        op.create_table(
            "machines_tags",
            sa.Column("machine_id", sa.Integer, sa.ForeignKey("machines.id")),
            sa.Column("tag_id", sa.Integer, sa.ForeignKey("tags.id")),
        )

        # Add columns to Machine.
        op.add_column(
            "machines",
            sa.Column("interface", sa.String(length=255), nullable=True))
        op.add_column(
            "machines",
            sa.Column("snapshot", sa.String(length=255), nullable=True))
        # TODO: change default value, be aware sqlite doesn't support that kind of ALTER statement.
        op.add_column(
            "machines",
            sa.Column("resultserver_ip",
                      sa.String(length=255),
                      server_default="192.168.56.1",
                      nullable=False))
        # TODO: change default value, be aware sqlite doesn't support that kind of ALTER statement.
        op.add_column(
            "machines",
            sa.Column("resultserver_port",
                      sa.String(length=255),
                      server_default="2042",
                      nullable=False))

        # Deal with Alembic shit.
        # Alembic is so ORMish that it was impossible to write code which works on different DBMS.
        if conn.engine.driver == "psycopg2":
            # We don"t provide a default value and leave the column as nullable because o further data migration.
            op.add_column(
                "tasks",
                sa.Column("clock", sa.DateTime(timezone=False), nullable=True))
            # NOTE: We added this new column so we force clock time to the added_on for old analyses.
            conn.execute("update tasks set clock=added_on")
            # Add the not null constraint.
            op.alter_column("tasks",
                            "clock",
                            nullable=False,
                            existing_nullable=True)
            # Altering status ENUM.
            # This shit of raw SQL is here because alembic doesn't deal well with alter_colum of ENUM type.
            op.execute(
                'COMMIT'
            )  # Commit because SQLAlchemy doesn't support ALTER TYPE in a transaction.
            conn.execute("ALTER TYPE status_type ADD VALUE 'completed'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'reported'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'recovered'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'running'")
            conn.execute(
                "ALTER TYPE status_type RENAME ATTRIBUTE success TO completed")
            conn.execute(
                "ALTER TYPE status_type DROP ATTRIBUTE IF EXISTS failure")
        elif conn.engine.driver == "mysqldb":
            # We don"t provide a default value and leave the column as nullable because o further data migration.
            op.add_column(
                "tasks",
                sa.Column("clock", sa.DateTime(timezone=False), nullable=True))
            # NOTE: We added this new column so we force clock time to the added_on for old analyses.
            conn.execute("update tasks set clock=added_on")
            # Add the not null constraint.
            op.alter_column("tasks",
                            "clock",
                            nullable=False,
                            existing_nullable=True,
                            existing_type=sa.DateTime(timezone=False))
            # NOTE: To workaround limitations in Alembic and MySQL ALTER statement (cannot remove item from ENUM).
            # Read data.
            tasks_data = []
            old_tasks = conn.execute(
                "select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, added_on, started_on, completed_on, status, sample_id from tasks"
            ).fetchall()
            for item in old_tasks:
                d = {}
                d["id"] = item[0]
                d["target"] = item[1]
                d["category"] = item[2]
                d["timeout"] = item[3]
                d["priority"] = item[4]
                d["custom"] = item[5]
                d["machine"] = item[6]
                d["package"] = item[7]
                d["options"] = item[8]
                d["platform"] = item[9]
                d["memory"] = item[10]
                d["enforce_timeout"] = item[11]
                if isinstance(item[12], datetime):
                    d["added_on"] = item[12]
                else:
                    d["added_on"] = parse(item[12])
                if isinstance(item[13], datetime):
                    d["started_on"] = item[13]
                else:
                    d["started_on"] = parse(item[13])
                if isinstance(item[14], datetime):
                    d["completed_on"] = item[14]
                else:
                    d["completed_on"] = parse(item[14])
                d["status"] = item[15]
                d["sample_id"] = item[16]

                # Force clock.
                # NOTE: We added this new column so we force clock time to the added_on for old analyses.
                d["clock"] = d["added_on"]
                # Enum migration, "success" isn"t a valid state now.
                if d["status"] == "success":
                    d["status"] = "completed"
                tasks_data.append(d)

            # Rename original table.
            op.rename_table("tasks", "old_tasks")
            # Drop old table.
            op.drop_table("old_tasks")
            # Drop old Enum.
            sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
            # Create new table with 1.0 schema.
            op.create_table(
                "tasks", sa.Column("id", sa.Integer(), nullable=False),
                sa.Column("target", sa.String(length=255), nullable=False),
                sa.Column("category", sa.String(length=255), nullable=False),
                sa.Column("timeout",
                          sa.Integer(),
                          server_default="0",
                          nullable=False),
                sa.Column("priority",
                          sa.Integer(),
                          server_default="1",
                          nullable=False),
                sa.Column("custom", sa.String(length=255), nullable=True),
                sa.Column("machine", sa.String(length=255), nullable=True),
                sa.Column("package", sa.String(length=255), nullable=True),
                sa.Column("options", sa.String(length=255), nullable=True),
                sa.Column("platform", sa.String(length=255), nullable=True),
                sa.Column("memory",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("enforce_timeout",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("clock",
                          sa.DateTime(timezone=False),
                          server_default=sa.func.now(),
                          nullable=False),
                sa.Column("added_on",
                          sa.DateTime(timezone=False),
                          nullable=False),
                sa.Column("started_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("completed_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("status",
                          sa.Enum("pending",
                                  "running",
                                  "completed",
                                  "reported",
                                  "recovered",
                                  name="status_type"),
                          server_default="pending",
                          nullable=False),
                sa.Column("sample_id",
                          sa.Integer,
                          sa.ForeignKey("samples.id"),
                          nullable=True), sa.PrimaryKeyConstraint("id"))

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)
        elif conn.engine.driver == "pysqlite":
            # Edit task status enumeration in Task.
            # NOTE: To workaround limitations in SQLite we have to create a temporary table, create the new schema and copy data.
            # Read data.
            tasks_data = []
            old_tasks = conn.execute(
                "select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, added_on, started_on, completed_on, status, sample_id from tasks"
            ).fetchall()
            for item in old_tasks:
                d = {}
                d["id"] = item[0]
                d["target"] = item[1]
                d["category"] = item[2]
                d["timeout"] = item[3]
                d["priority"] = item[4]
                d["custom"] = item[5]
                d["machine"] = item[6]
                d["package"] = item[7]
                d["options"] = item[8]
                d["platform"] = item[9]
                d["memory"] = item[10]
                d["enforce_timeout"] = item[11]
                if isinstance(item[12], datetime):
                    d["added_on"] = item[12]
                else:
                    d["added_on"] = parse(item[12])
                if isinstance(item[13], datetime):
                    d["started_on"] = item[13]
                else:
                    d["started_on"] = parse(item[13])
                if isinstance(item[14], datetime):
                    d["completed_on"] = item[14]
                else:
                    d["completed_on"] = parse(item[14])
                d["status"] = item[15]
                d["sample_id"] = item[16]

                # Force clock.
                # NOTE: We added this new column so we force clock time to the added_on for old analyses.
                d["clock"] = d["added_on"]
                # Enum migration, "success" isn"t a valid state now.
                if d["status"] == "success":
                    d["status"] = "completed"
                tasks_data.append(d)

            # Rename original table.
            op.rename_table("tasks", "old_tasks")
            # Drop old table.
            op.drop_table("old_tasks")
            # Drop old Enum.
            sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
            # Create new table with 1.0 schema.
            op.create_table(
                "tasks", sa.Column("id", sa.Integer(), nullable=False),
                sa.Column("target", sa.Text(), nullable=False),
                sa.Column("category", sa.String(length=255), nullable=False),
                sa.Column("timeout",
                          sa.Integer(),
                          server_default="0",
                          nullable=False),
                sa.Column("priority",
                          sa.Integer(),
                          server_default="1",
                          nullable=False),
                sa.Column("custom", sa.String(length=255), nullable=True),
                sa.Column("machine", sa.String(length=255), nullable=True),
                sa.Column("package", sa.String(length=255), nullable=True),
                sa.Column("options", sa.String(length=255), nullable=True),
                sa.Column("platform", sa.String(length=255), nullable=True),
                sa.Column("memory",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("enforce_timeout",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("clock",
                          sa.DateTime(timezone=False),
                          server_default=sa.func.now(),
                          nullable=False),
                sa.Column("added_on",
                          sa.DateTime(timezone=False),
                          nullable=False),
                sa.Column("started_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("completed_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("status",
                          sa.Enum("pending",
                                  "running",
                                  "completed",
                                  "reported",
                                  "recovered",
                                  name="status_type"),
                          server_default="pending",
                          nullable=False),
                sa.Column("sample_id",
                          sa.Integer,
                          sa.ForeignKey("samples.id"),
                          nullable=True), sa.PrimaryKeyConstraint("id"))

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)

    # Migrate mongo.
    mongo_upgrade()
Пример #40
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        "downloaderrors",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("name", sa.String(length=36), nullable=False),
        sa.PrimaryKeyConstraint("id", name=op.f("pk_downloaderrors")),
        sa.UniqueConstraint("name", name=op.f("uq_downloaderrors_name")),
    )

    conn = op.get_bind()
    for name, member in db.DownloadErrorCodes.__members__.items():
        conn.execute("""INSERT INTO downloaderrors (name) VALUES (:name);""",
                     name)

    op.rename_table("files", "files_tmp")
    op.rename_table("messages", "messages_tmp")
    op.rename_table("replies", "replies_tmp")

    conn.execute(CREATE_TABLE_FILES_NEW)
    conn.execute(CREATE_TABLE_MESSAGES_NEW)
    conn.execute(CREATE_TABLE_REPLIES_NEW)

    conn.execute("""
        INSERT INTO files
        (
            id, uuid, filename, file_counter, size, download_url,
            is_downloaded, is_read, is_decrypted, download_error_id, source_id,
            last_updated
        )
        SELECT id, uuid, filename, file_counter, size, download_url,
               is_downloaded, is_read, is_decrypted, NULL, source_id, CURRENT_TIMESTAMP
        FROM files_tmp
    """)

    conn.execute("""
        INSERT INTO messages
        (
            id, uuid, source_id, filename, file_counter, size, content, is_decrypted,
            is_downloaded, is_read, download_error_id, download_url, last_updated
        )
        SELECT id, uuid, source_id, filename, file_counter, size, content, is_decrypted,
               is_downloaded, is_read, NULL, download_url, CURRENT_TIMESTAMP
        FROM messages_tmp
    """)

    conn.execute("""
        INSERT INTO replies
        (
            id, uuid, source_id, filename, file_counter, size, content, is_decrypted,
            is_downloaded, download_error_id, journalist_id, last_updated
        )
        SELECT id, uuid, source_id, filename, file_counter, size, content, is_decrypted,
              is_downloaded, NULL, journalist_id, CURRENT_TIMESTAMP
        FROM replies_tmp
    """)

    # Delete the old tables.
    op.drop_table("files_tmp")
    op.drop_table("messages_tmp")
    op.drop_table("replies_tmp")
Пример #41
0
 def test_rename_table(self):
     context = op_fixture()
     op.rename_table("t1", "t2")
     context.assert_("ALTER TABLE t1 RENAME TO t2")
def upgrade():
    op.rename_table('eve_user', 'user')
def downgrade():
    op.rename_table('user', 'eve_user')
def _perform(upgrade):
    conn = op.get_bind()

    sample_list = conn.execute("SELECT id, file_size, file_type, md5, crc32, "
                               "sha1, sha256, sha512, ssdeep FROM samples")

    samples = []
    for sample in sample_list:
        samples.append({
            "id": sample[0],
            "file_size": sample[1],
            "file_type": sample[2],
            "md5": sample[3],
            "crc32": sample[4],
            "sha1": sample[5],
            "sha256": sample[6],
            "sha512": sample[7],
            "ssdeep": sample[8],
        })

    # PostgreSQL and MySQL have different names for the foreign key of
    # Task.sample_id -> Sample.id; for SQLite we don't drop/recreate the
    # foreign key.
    fkey_name = {
        "mysql": "tasks_ibfk_1",
        "postgresql": "tasks_sample_id_fkey",
    }

    fkey = fkey_name.get(db.Database(schema_check=False).engine.name)

    # First drop the foreign key.
    if fkey:
        op.drop_constraint(fkey, "tasks", type_="foreignkey")

    # Rename original table.
    op.rename_table("samples", "old_samples")

    # Drop old table.
    op.drop_table("old_samples")

    if upgrade:
        file_type = sa.Text()
    else:
        file_type = sa.String(255)

        # As downgrading implies trimming file_type's to 255 bytes we force
        # this for every available record.
        for sample in samples:
            sample["file_type"] = sample["file_type"][:255]

    # Create the new table with 1.2 schema.
    # Changelog:
    # * file_type changed its type from String(255) to Text().
    op.create_table("samples", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("file_size", sa.Integer(), nullable=False),
                    sa.Column("file_type", file_type, nullable=False),
                    sa.Column("md5", sa.String(32), nullable=False),
                    sa.Column("crc32", sa.String(8), nullable=False),
                    sa.Column("sha1", sa.String(40), nullable=False),
                    sa.Column("sha256", sa.String(64), nullable=False),
                    sa.Column("sha512", sa.String(128), nullable=False),
                    sa.Column("ssdeep", sa.Text(), nullable=True),
                    sa.PrimaryKeyConstraint("id"))

    # Insert data.
    op.bulk_insert(db.Sample.__table__, samples)

    # Restore the indices.
    op.create_index("hash_index",
                    "samples", ["md5", "crc32", "sha1", "sha256", "sha512"],
                    unique=True)

    # Create the foreign key.
    if fkey:
        op.create_foreign_key(fkey, "tasks", "samples", ["sample_id"], ["id"])
Пример #45
0
def downgrade():
    op.rename_table('power_connect55xx', 'powerconnect55xx')
Пример #46
0
def rename_table(old_table_name: Text, new_table_name: Text) -> None:
    if migration_utils.table_exists(old_table_name):
        op.rename_table(old_table_name, new_table_name)
Пример #47
0
def downgrade():
    op.rename_table("linotp_audit", "audit")
Пример #48
0
 def test_rename_table_schema(self):
     context = op_fixture('mssql')
     op.rename_table('t1', 't2', schema="foobar")
     context.assert_contains("EXEC sp_rename 'foobar.t1', t2")
Пример #49
0
def upgrade():
    op.rename_table("audit", "linotp_audit")
Пример #50
0
def downgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.rename_table('stream_submission', 'stream_sub')
def downgrade():
    op.rename_table('new_cookies', 'cookies', schema='practice')
def upgrade():
    """ Set all the table names to lower case to make it easier to write
    sql query manually on postgresql. """
    for table in TABLES:
        op.rename_table(table, TABLES[table])
Пример #53
0
def downgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.rename_table('collection_like', 'user_like_collection')
def upgrade():
    from inbox.models.session import session_scope
    from inbox.models.folder import Folder
    from inbox.sqlalchemy_ext.util import JSON
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)

    ### foldersync => imapfoldersyncstatus
    # note that renaming a table does in fact migrate constraints + indexes too
    op.rename_table('foldersync', 'imapfoldersyncstatus')

    op.alter_column('imapfoldersyncstatus',
                    '_sync_status',
                    existing_type=JSON(),
                    nullable=True,
                    new_column_name='_metrics')

    op.add_column('imapfoldersyncstatus',
                  sa.Column('folder_id', sa.Integer(), nullable=False))

    ### uidvalidity => imapfolderinfo
    op.rename_table('uidvalidity', 'imapfolderinfo')
    op.alter_column('imapfolderinfo',
                    'uid_validity',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='uidvalidity')
    op.alter_column('imapfolderinfo',
                    'highestmodseq',
                    existing_type=sa.Integer(),
                    nullable=True)

    op.drop_constraint('imapfolderinfo_ibfk_1',
                       'imapfolderinfo',
                       type_='foreignkey')
    op.alter_column('imapfolderinfo',
                    'imapaccount_id',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='account_id')
    op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo',
                          'imapaccount', ['account_id'], ['id'])

    op.add_column('imapfolderinfo',
                  sa.Column('folder_id', sa.Integer(), nullable=False))

    ### imapuid
    op.drop_constraint('imapuid_ibfk_1', 'imapuid', type_='foreignkey')
    op.alter_column('imapuid',
                    'imapaccount_id',
                    existing_type=sa.Integer(),
                    nullable=False,
                    new_column_name='account_id')
    op.create_foreign_key('imapuid_ibfk_1', 'imapuid', 'imapaccount',
                          ['account_id'], ['id'])

    ### migrate data and add new constraints
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    if 'easfoldersync' in Base.metadata.tables:
        op.rename_table('easfoldersync', 'easfoldersyncstatus')
        op.add_column('easfoldersyncstatus',
                      sa.Column('folder_id', sa.Integer(), nullable=False))
        op.alter_column('easfoldersyncstatus',
                        '_sync_status',
                        existing_type=JSON(),
                        nullable=True,
                        new_column_name='_metrics')
        Base.metadata.reflect(engine)

        class EASFolderSyncStatus(Base):
            __table__ = Base.metadata.tables['easfoldersyncstatus']

    class ImapFolderSyncStatus(Base):
        __table__ = Base.metadata.tables['imapfoldersyncstatus']

    class ImapFolderInfo(Base):
        __table__ = Base.metadata.tables['imapfolderinfo']

    with session_scope(versioned=False) \
            as db_session:
        folder_id_for = dict([((account_id, name.lower()), id_)
                              for id_, account_id, name in db_session.query(
                                  Folder.id, Folder.account_id, Folder.name)])
        for status in db_session.query(ImapFolderSyncStatus):
            print "migrating", status.folder_name
            status.folder_id = folder_id_for[(status.account_id,
                                              status.folder_name.lower())]
        db_session.commit()
        if 'easfoldersyncstatus' in Base.metadata.tables:
            for status in db_session.query(EASFolderSyncStatus):
                print "migrating", status.folder_name
                folder_id = folder_id_for.get(
                    (status.account_id, status.folder_name.lower()))
                if folder_id is not None:
                    status.folder_id = folder_id
                else:
                    # EAS folder rows *may* not exist if have no messages
                    folder = Folder(account_id=status.account_id,
                                    name=status.folder_name)
                    db_session.add(folder)
                    db_session.commit()
                    status.folder_id = folder.id
            db_session.commit()
            # some weird alembic bug? need to drop and recreate this FK
            op.drop_constraint('easfoldersyncstatus_ibfk_1',
                               'easfoldersyncstatus',
                               type_='foreignkey')
            op.drop_column('easfoldersyncstatus', 'folder_name')
            op.create_foreign_key('easfoldersyncstatus_ibfk_1',
                                  'easfoldersyncstatus', 'easaccount',
                                  ['account_id'], ['id'])
            op.create_foreign_key('easfoldersyncstatus_ibfk_2',
                                  'easfoldersyncstatus', 'folder',
                                  ['folder_id'], ['id'])
            op.create_unique_constraint('account_id', 'easfoldersyncstatus',
                                        ['account_id', 'folder_id'])

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint('imapfoldersyncstatus_ibfk_1',
                       'imapfoldersyncstatus',
                       type_='foreignkey')
    op.drop_constraint('account_id', 'imapfoldersyncstatus', type_='unique')
    op.drop_column('imapfoldersyncstatus', 'folder_name')
    op.create_foreign_key('imapfoldersyncstatus_ibfk_1',
                          'imapfoldersyncstatus', 'imapaccount',
                          ['account_id'], ['id'])
    op.create_foreign_key('imapfoldersyncstatus_ibfk_2',
                          'imapfoldersyncstatus', 'folder', ['folder_id'],
                          ['id'])
    op.create_unique_constraint('account_id', 'imapfoldersyncstatus',
                                ['account_id', 'folder_id'])

    with session_scope(versioned=False) \
            as db_session:
        for info in db_session.query(ImapFolderInfo):
            print "migrating", info.folder_name
            info.folder_id = folder_id_for[(info.account_id,
                                            info.folder_name.lower())]
        db_session.commit()

    # some weird alembic bug? need to drop and recreate this FK
    op.drop_constraint('imapfolderinfo_ibfk_1',
                       'imapfolderinfo',
                       type_='foreignkey')
    op.drop_constraint('imapaccount_id', 'imapfolderinfo', type_='unique')
    op.drop_column('imapfolderinfo', 'folder_name')
    op.create_foreign_key('imapfolderinfo_ibfk_1', 'imapfolderinfo',
                          'imapaccount', ['account_id'], ['id'])
    op.create_foreign_key('imapfolderinfo_ibfk_2', 'imapfolderinfo', 'folder',
                          ['folder_id'], ['id'])
    op.create_unique_constraint('imapaccount_id', 'imapfolderinfo',
                                ['account_id', 'folder_id'])
Пример #55
0
 def test_rename_table_schema_postgresql(self):
     context = op_fixture("postgresql")
     op.rename_table("t1", "t2", schema="foo")
     context.assert_("ALTER TABLE foo.t1 RENAME TO t2")
def downgrade() -> None:
    schema = config["schema"]

    op.rename_table("metadata", "ui_metadata", schema=schema)
Пример #57
0
def upgrade():
    op.rename_table(OLD_REFERRED_TABLE_NAME, NEW_REFERRED_TABLE_NAME)
Пример #58
0
 def test_rename_table(self):
     context = op_fixture("mssql")
     op.rename_table("t1", "t2")
     context.assert_contains("EXEC sp_rename 't1', t2")
Пример #59
0
def upgrade():
    conn = op.get_bind()

    if conn.engine.driver == "psycopg2":
        conn.execute("ALTER TABLE tasks ALTER COLUMN options TYPE text "
                     "USING options::text")
        conn.execute("ALTER TABLE tasks ALTER COLUMN custom TYPE text "
                     "USING custom::text")
    elif conn.engine.driver == "mysqldb":
        conn.execute("ALTER TABLE tasks MODIFY options text")
        conn.execute("ALTER TABLE tasks MODIFY custom text")
    elif conn.engine.driver == "pysqlite":
        old_tasks = conn.execute("SELECT %s FROM tasks" %
                                 ", ".join(columns)).fetchall()

        tasks = []
        for task in old_tasks:
            tasks.append(dict(zip(columns, task)))
            parse_dates(tasks[-1], "clock", "added_on", "started_on",
                        "completed_on")

        op.rename_table("tasks", "old_tasks")
        op.drop_table("old_tasks")
        op.create_table(
            "tasks", sa.Column("id", sa.Integer(), primary_key=True),
            sa.Column("target", sa.Text(), nullable=False),
            sa.Column("category", sa.String(255), nullable=False),
            sa.Column("timeout",
                      sa.Integer(),
                      server_default="0",
                      nullable=False),
            sa.Column("priority",
                      sa.Integer(),
                      server_default="1",
                      nullable=False),
            sa.Column("custom", sa.Text(), nullable=True),
            sa.Column("owner", sa.String(64), nullable=True),
            sa.Column("machine", sa.String(255), nullable=True),
            sa.Column("package", sa.String(255), nullable=True),
            sa.Column("options", sa.Text(), nullable=True),
            sa.Column("platform", sa.String(255), nullable=True),
            sa.Column("memory", sa.Boolean, nullable=False, default=False),
            sa.Column("enforce_timeout",
                      sa.Boolean,
                      nullable=False,
                      default=False),
            sa.Column("clock",
                      sa.DateTime(timezone=False),
                      default=datetime.datetime.now,
                      nullable=False),
            sa.Column("added_on",
                      sa.DateTime(timezone=False),
                      default=datetime.datetime.now,
                      nullable=False),
            sa.Column("started_on", sa.DateTime(timezone=False),
                      nullable=True),
            sa.Column("completed_on",
                      sa.DateTime(timezone=False),
                      nullable=True),
            sa.Column("status",
                      status_type,
                      server_default=TASK_PENDING,
                      nullable=False),
            sa.Column("sample_id",
                      sa.Integer,
                      sa.ForeignKey("samples.id"),
                      nullable=True),
            sa.Column("processing", sa.String(16), nullable=True),
            sa.Column("route", sa.String(16), nullable=True))

        op.bulk_insert(Task.__table__, tasks)
Пример #60
0
 def test_rename_table(self):
     context = op_fixture('mssql')
     op.rename_table('t1', 't2')
     context.assert_contains("EXEC sp_rename 't1', t2")