def upgrade():
    if context.get_context().dialect.name != "sqlite":
        op.add_column("rses", sa.Column("staging_area", sa.Boolean(name="RSE_STAGING_AREA_CHK"), default=False))

    if context.get_context().dialect.name not in ("sqlite", "mysql"):
        op.drop_constraint("REQUESTS_TYPE_CHK", "requests", type_="check")
        op.create_check_constraint(
            name="REQUESTS_TYPE_CHK", source="requests", condition="request_type in ('U', 'D', 'T', 'I', '0')"
        )
def downgrade():
    if context.get_context().dialect.name is 'postgresql':
        op.drop_constraint('configs_pk', 'configs', type_='primary')
        op.drop_constraint('configs_created_nn', 'configs', type_='check')
        op.drop_constraint('configs_updated_nn', 'configs', type_='check')
    op.drop_table('configs')
    if context.get_context().dialect.name is 'postgresql':
        op.drop_constraint('configs_history_pk', 'configs_history', type_='check')
    op.drop_table('configs_history')
def upgrade():
    schema = context.get_context().config.get_main_option('schema')
    staticschema = schema + '_static'
    parentschema = context.get_context().config.get_main_option('parentschema')

    engine = op.get_bind().engine
    if op.get_context().dialect.has_table(
        engine, 'user', schema=staticschema
    ):  # pragma: nocover
        return

    op.create_table(
        'user',
        Column('type', String(10), nullable=False),
        Column('id', Integer, primary_key=True),
        Column('username', Unicode, unique=True, nullable=False),
        Column('password', Unicode, nullable=False),
        Column('email', Unicode, nullable=False),
        Column('is_password_changed', Boolean, default=False),
        Column('role_name', String),
        schema=staticschema,
    )
    parent_column = ''
    parent_select = ''
    parent_join = ''
    if parentschema is not None and parentschema is not '':  # pragma: nocover
        op.add_column(
            'user',
            Column('parent_role_name', String),
            schema=staticschema
        )
        parent_column = ', parent_role_name'
        parent_select = ', pr.name'
        parent_join = (
            'LEFT OUTER JOIN %(parentschema)s.role AS pr ON (pr.id = u.parent_role_id)' % {
                'parentschema': parentschema,
            }
        )

    op.execute(
        'INSERT INTO %(staticschema)s.user '
        '(type, username, password, email, is_password_changed, role_name%(parent_column)s) ('
        'SELECT u.type, u.username, u.password, u.email, '
        'u.is_password_changed, r.name%(parent_select)s '
        'FROM %(schema)s.user AS u '
        'LEFT OUTER JOIN %(schema)s.role AS r ON (r.id = u.role_id) %(parent_join)s'
        ')' % {
            'staticschema': staticschema,
            'schema': schema,
            'parent_select': parent_select,
            'parent_column': parent_column,
            'parent_join': parent_join,
        }
    )

    op.drop_table('user', schema=schema)
def upgrade():
    schema = context.get_context().config.get_main_option('schema')
    staticschema = schema + '_static'
    parentschema = context.get_context().config.get_main_option('parentschema')

    engine = op.get_bind().engine
    if op.get_context().dialect.has_table(
        engine, 'user', schema=staticschema
    ):  # pragma: nocover
        return

    op.create_table(
        'user',
        Column('type', String(10), nullable=False),
        Column('id', Integer, primary_key=True),
        Column('username', Unicode, unique=True, nullable=False),
        Column('password', Unicode, nullable=False),
        Column('email', Unicode, nullable=False),
        Column('is_password_changed', Boolean, default=False),
        Column('role_name', String),
        schema=staticschema,
    )
    parent_column = ""
    parent_select = ""
    parent_join = ""
    if parentschema is not None and parentschema is not "":  # pragma: nocover
        op.add_column(
            'user',
            Column('parent_role_name', String),
            schema=staticschema
        )
        parent_column = ", parent_role_name"
        parent_select = ", pr.name"
        parent_join = (
            "LEFT OUTER JOIN %(parentschema)s.role AS pr ON (pr.id = u.parent_role_id)" % {
                "parentschema": parentschema,
            }
        )

    op.execute(
        "INSERT INTO %(staticschema)s.user "
        "(type, username, password, email, is_password_changed, role_name%(parent_column)s) ("
        "SELECT u.type, u.username, u.password, u.email, "
        "u.is_password_changed, r.name%(parent_select)s "
        "FROM %(schema)s.user AS u "
        "LEFT OUTER JOIN %(schema)s.role AS r ON (r.id = u.role_id) %(parent_join)s"
        ")" % {
            "staticschema": staticschema,
            "schema": schema,
            "parent_select": parent_select,
            "parent_column": parent_column,
            "parent_join": parent_join,
        }
    )

    op.drop_table('user', schema=schema)
def downgrade():
    schema = context.get_context().config.get_main_option("schema")
    staticschema = schema + "_static"
    parentschema = context.get_context().config.get_main_option("parentschema")

    op.create_table(
        "user",
        Column("type", String(10), nullable=False),
        Column("id", Integer, primary_key=True),
        Column("username", Unicode, unique=True, nullable=False),
        Column("password", Unicode, nullable=False),
        Column("email", Unicode, nullable=False),
        Column("is_password_changed", Boolean, default=False),
        Column("role_id", Integer, ForeignKey(schema + ".role.id"), nullable=False),
        schema=schema,
    )
    parent_column = ""
    parent_select = ""
    parent_join = ""
    if parentschema is not None and parentschema is not "":  # pragma: nocover
        op.add_column(
            "user",
            Column("parent_role_id", Integer, ForeignKey(parentschema + ".role.id")),
            schema=schema
        )
        parent_column = ", parent_role_id"
        parent_select = ", pr.id"
        parent_join = (
            "LEFT OUTER JOIN %(parentschema)s.role AS pr ON (pr.name = u.parent_role_name)" % {
                "parentschema": parentschema,
            }
        )

    op.execute(
        "INSERT INTO %(schema)s.user "
        "(type, username, password, email, is_password_changed, role_id%(parent_column)s) ("
        "SELECT u.type, u.username, u.password, u.email, "
        "u.is_password_changed, r.id%(parent_select)s "
        "FROM %(staticschema)s.user AS u "
        "LEFT OUTER JOIN %(schema)s.role AS r ON (r.name = u.role_name) %(parent_join)s"
        ")" % {
            "staticschema": staticschema,
            "schema": schema,
            "parent_select": parent_select,
            "parent_column": parent_column,
            "parent_join": parent_join,
        }
    )

    op.drop_table("user", schema=staticschema)
def downgrade():
    schema = context.get_context().config.get_main_option('main_schema')
    staticschema = context.get_context().config.get_main_option('static_schema')
    parentschema = context.get_context().config.get_main_option('parentschema')

    op.create_table(
        'user',
        Column('type', String(10), nullable=False),
        Column('id', Integer, primary_key=True),
        Column('username', Unicode, unique=True, nullable=False),
        Column('password', Unicode, nullable=False),
        Column('email', Unicode, nullable=False),
        Column('is_password_changed', Boolean, default=False),
        Column('role_id', Integer, ForeignKey(schema + '.role.id'), nullable=False),
        schema=schema,
    )
    parent_column = ''
    parent_select = ''
    parent_join = ''
    if parentschema is not None and parentschema is not '':  # pragma: no cover
        op.add_column(
            'user',
            Column('parent_role_id', Integer, ForeignKey(parentschema + '.role.id')),
            schema=schema
        )
        parent_column = ', parent_role_id'
        parent_select = ', pr.id'
        parent_join = (
            'LEFT OUTER JOIN {parentschema}.role AS pr ON (pr.name = u.parent_role_name)'.format(
                parentschema=parentschema,
            )
        )

    op.execute(
        'INSERT INTO %(schema)s.user '
        '(type, username, password, email, is_password_changed, role_id%(parent_column)s) ('
        'SELECT u.type, u.username, u.password, u.email, '
        'u.is_password_changed, r.id%(parent_select)s '
        'FROM %(staticschema)s.user AS u '
        'LEFT OUTER JOIN %(schema)s.role AS r ON (r.name = u.role_name) %(parent_join)s'
        ')' % {
            'staticschema': staticschema,
            'schema': schema,
            'parent_select': parent_select,
            'parent_column': parent_column,
            'parent_join': parent_join,
        }
    )

    op.drop_table('user', schema=staticschema)
def upgrade():
    migration_file = ('1679b5bc102c_add_subsecond_columns_to_test_runs_table.'
                      'mysql_upgrade.sql')
    migration_dir = os.path.dirname(os.path.realpath(__file__))
    sql_path = os.path.join(migration_dir, migration_file)
    migration_context = context.get_context()
    if migration_context.dialect.name == 'mysql':
        with open(sql_path, 'r') as sql_file:
            for line in sql_file.read().splitlines():
                # don't execute empty or commented lines
                if line and not line.startswith('--'):
                    op.execute(line)
    else:
        op.add_column('test_runs', sa.Column('start_time_microsecond',
                                             sa.Integer(), default=0))
        op.add_column('test_runs', sa.Column('stop_time_microsecond',
                                             sa.Integer(), default=0))
        if not CONF.disable_microsecond_data_migration:
            bind = op.get_bind()
            metadata = sa.schema.MetaData()
            metadata.bind = bind
            test_runs = sa.Table('test_runs', metadata, autoload=True)
            res = test_runs.select().execute()
            for test_run in res:
                start_micro = test_run[4].microsecond
                stop_micro = test_run[5].microsecond
                values = {'start_time_microsecond': start_micro,
                          'stop_time_microsecond': stop_micro}
                op.execute(test_runs.update().where(
                    test_runs.c.id == test_run[0]).values(values))
            res.close()
def upgrade():
    op.create_table('profile',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('image_id', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['image_id'], ['image.id'], ),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
        sa.PrimaryKeyConstraint('id')
    )

    opts = context.get_context().opts
    signals = opts.get('signals')
    if signals:
        def apply_data(sender):
            print "Apply data"
            from rdr.application.database import db
            from rdr.modules.users.models import User, Profile
            users = User.query.all()
            for user in users:
                if not user.profile:
                    prof = Profile(user_id=user.id)
                    db.session.add(prof)
            db.session.commit()

        signals('on_complete').connect(apply_data, weak=False)
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.create_table(
        'lux_layer_internal_wms',
        sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False),
        sa.Column(
            'url', sa.VARCHAR(length=255), autoincrement=False,
            nullable=True),
        sa.Column(
            'layers', sa.VARCHAR(length=1000),  autoincrement=False,
            nullable=True),
        sa.Column('is_poi', sa.BOOLEAN(), autoincrement=False, nullable=True),
        sa.Column(
            'collection_id', sa.INTEGER(), autoincrement=False, nullable=True),
        sa.ForeignKeyConstraint(['id'], [schema + '.layer_internal_wms.id'],
                                name=u'lux_layer_internal_wms_fk1',
                                onupdate=u'CASCADE',
                                ondelete=u'CASCADE'),
        sa.PrimaryKeyConstraint('id', name=u'lux_layer_internal_wms_pkey'),
        schema=schema
    )

    op.create_table(
        'lux_layer_external_wms',
        sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False),
        sa.Column(
            'category_id', sa.INTEGER(), autoincrement=False, nullable=True),
        sa.ForeignKeyConstraint(['id'], [schema + '.layer_external_wms.id'],
                                name=u'lux_layer_external_wms_fk1',
                                onupdate=u'CASCADE', ondelete=u'CASCADE'),
        sa.PrimaryKeyConstraint('id', name=u'lux_layer_external_wms_pkey'),
        schema=schema
    )
def upgrade():
    c = get_context()
    # drop foreign keys for mysql
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        insp = Inspector.from_engine(c.connection.engine)
        for t in [
            "groups_resources_permissions",
            "users_resources_permissions",
            "resources",
        ]:
            for constraint in insp.get_foreign_keys(t):
                if constraint["referred_columns"] == ["resource_id"]:
                    op.drop_constraint(constraint["name"], t, type="foreignkey")

    op.alter_column(
        "resources",
        "resource_id",
        type_=sa.Integer(),
        existing_type=sa.BigInteger(),
        autoincrement=True,
        nullable=False,
    )
    op.alter_column(
        "resources", "parent_id", type_=sa.Integer(), existing_type=sa.BigInteger()
    )
    op.alter_column(
        "users_resources_permissions",
        "resource_id",
        type_=sa.Integer(),
        existing_type=sa.BigInteger(),
        nullable=False,
    )
    op.alter_column(
        "groups_resources_permissions",
        "resource_id",
        type_=sa.Integer(),
        existing_type=sa.BigInteger(),
        nullable=False,
    )

    # recreate foreign keys for mysql
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        op.create_foreign_key(
            "groups_resources_permissions_resource_fk",
            "groups_resources_permissions",
            "resources",
            ["resource_id"],
            ["resource_id"],
            onupdate="CASCADE",
            ondelete="CASCADE",
        )
        op.create_foreign_key(
            "users_resources_permissions_fk",
            "users_resources_permissions",
            "resources",
            ["resource_id"],
            ["resource_id"],
            onupdate="CASCADE",
            ondelete="CASCADE",
        )
def downgrade():
    schema = context.get_context().config.get_main_option('schema')
    schema_stats = schema + '_stats'
    op.drop_column('lux_print_job',
                   'is_error',
                   schema=schema
                   )
    op.drop_column('lux_print_job',
                   'print_url',
                   schema=schema
                   )
    op.drop_column('lux_getfeature_definition',
                   'has_profile',
                   schema=schema
                   )
    op.drop_column('lux_getfeature_definition',
                   'columns_order',
                   schema=schema
                   )
    op.drop_column('lux_getfeature_definition',
                   'id_column',
                   schema=schema
                   )
    op.drop_table('lux_predefined_wms', schema=schema)
    op.drop_table('pag_download', schema=schema_stats)
    op.drop_table('sketch_download', schema=schema_stats)
    op.drop_table('measurement_download', schema=schema_stats)
    op.drop_table('connections', schema=schema_stats)
示例#12
0
def run_migration(engine):
    """Run migration within the current EnvironmentContext"""
    try:
        # Configure EnvironmentContext with database connection
        connection = engine.connect()
        context.configure(connection=connection)

        # Compare current and head revisions
        log.debug("Checking if SQLite database migration is needed.")
        migration_context = context.get_context()

        head_rev = context.get_head_revision()
        log.debug("Head Alembic revision: %s", head_rev)
        alembic_initialized = _has_table(migration_context, 'alembic_version')
        if not alembic_initialized:
            log.debug("Alembic is not initialized, setting current revision"
                      " to head revision: %s", head_rev)
            _create_version_table(migration_context)
            migration_context._update_current_rev(None, head_rev)
        current_rev = migration_context.get_current_revision()
        log.debug("Current Alembic revision: %s", current_rev)

        # Only process migration if current revision is different from
        # head revision
        if current_rev == head_rev:
            log.debug("No migration to process as current Alembic revision in"
                      " SQLite database is already the head revision.")
        else:
            with context.begin_transaction():
                context.run_migrations()
            log.debug('Ended SQLite database migration')
    finally:
        connection.close()
def upgrade():
    c = get_context()
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        insp = Inspector.from_engine(c.connection.engine)
        for t in ['groups_permissions', 'groups_resources_permissions',
                  'users_groups', 'resources']:
            for constraint in insp.get_foreign_keys(t):
                if constraint['referred_columns'] == ['group_name']:
                    op.drop_constraint(constraint['name'], t,
                                       type='foreignkey')

    op.drop_column('groups', 'id')
    op.alter_column('groups', 'group_name',
                    type_=sa.String(128),
                    existing_type=sa.String(50),
                    )
    op.create_primary_key('groups_pkey', 'groups', cols=['group_name'])

    if isinstance(c.connection.engine.dialect, MySQLDialect):
        op.create_foreign_key(None, 'groups_permissions', 'groups',
                              remote_cols=['group_name'],
                              local_cols=['group_name'], onupdate='CASCADE',
                              ondelete='CASCADE')
        op.create_foreign_key(None, 'groups_resources_permissions', 'groups',
                              remote_cols=['group_name'],
                              local_cols=['group_name'], onupdate='CASCADE',
                              ondelete='CASCADE')
        op.create_foreign_key(None, 'users_groups', 'groups',
                              remote_cols=['group_name'],
                              local_cols=['group_name'], onupdate='CASCADE',
                              ondelete='CASCADE')
        op.create_foreign_key(None, 'resources', 'groups',
                              remote_cols=['group_name'],
                              local_cols=['owner_group_name'], onupdate='CASCADE',
                              ondelete='SET NULL')
def upgrade():
    migration_context = context.get_context()
    insp = reflection.Inspector(migration_context.bind)
    test_indx = insp.get_indexes('tests')
    test_indx_names = [x['name'] for x in test_indx]
    test_indx_columns = [x['column_names'][0] for x in test_indx
                         if len(x) == 1]
    test_run_indx = insp.get_indexes('test_runs')
    test_run_indx_names = [x['name'] for x in test_run_indx]
    test_run_indx_columns = [x['column_names'][0] for x in test_run_indx
                             if len(x) == 1]
    if ('ix_test_id' not in test_indx_names and
        'test_id' not in test_indx_columns):
        op.create_index('ix_test_id', 'tests', ['test_id'], mysql_length=30)

    # remove auto created indexes (sqlite only)
    # note the name is with test_runs not test_run
    if migration_context.dialect.name == 'sqlite':
        if 'ix_test_runs_test_id' in test_run_indx_names:
            op.drop_index('ix_test_runs_test_id', 'test_runs')
        if 'ix_test_runs_run_id' in test_run_indx_names:
            op.drop_index('ix_test_runs_run_id', 'test_runs')

    with op.batch_alter_table('test_runs') as batch_op:
        batch_op.create_unique_constraint('uq_test_runs',
                                          ['test_id', 'run_id'])

    if ('ix_test_run_test_id' not in test_run_indx_names and
        'test_id' not in test_run_indx_columns):
        op.create_index('ix_test_run_test_id', 'test_runs', ['test_id'])
    if ('ix_test_run_run_id' not in test_run_indx_names and
        'run_id' not in test_run_indx_columns):
        op.create_index('ix_test_run_run_id', 'test_runs', ['run_id'])
def upgrade():
    c = get_context()
    # drop foreign keys for mysql
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        insp = Inspector.from_engine(c.connection.engine)
        for t in ['groups_resources_permissions', 'users_resources_permissions', 'resources']:
            for constraint in insp.get_foreign_keys(t):
                if constraint['referred_columns'] == ['resource_id']:
                    op.drop_constraint(constraint['name'], t, type='foreignkey')

    with op.batch_alter_table('resources', schema=None) as batch_op:
        batch_op.alter_column('resource_id',
                    type_=sa.Integer(), existing_type=sa.BigInteger(),
                    autoincrement=True)
    with op.batch_alter_table('resources', schema=None) as batch_op:
        batch_op.alter_column('parent_id',
                    type_=sa.Integer(), existing_type=sa.BigInteger())
    with op.batch_alter_table('users_resources_permissions', schema=None) as batch_op:
        batch_op.alter_column('resource_id',
                        type_=sa.Integer(), existing_type=sa.BigInteger())
        batch_op.alter_column('resource_id',
                        type_=sa.Integer(), existing_type=sa.BigInteger())

    # recreate foreign keys for mysql
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        op.create_foreign_key("groups_resources_permissions_resource_fk",
                              'groups_resources_permissions',
                              "resources", ["resource_id"], ["resource_id"],
                              onupdate='CASCADE', ondelete='CASCADE')
        op.create_foreign_key("users_resources_permissions_fk",
                              'users_resources_permissions',
                              "resources", ["resource_id"], ["resource_id"],
                              onupdate='CASCADE', ondelete='CASCADE')
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.create_primary_key(
        'layergroup_treeitem_pkey', 'layergroup_treeitem', ['id'],
        schema=schema
    )
示例#17
0
def upgrade():
    cntxt = context.get_context()
    Session = sessionmaker(bind=cntxt.bind)

    op.create_table(u'event_teachers',
        sa.Column(u'user_id', sa.Integer(), nullable=False),
        sa.Column(u'event_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['event_id'], [u'events.id'], ),
        sa.ForeignKeyConstraint(['user_id'], [u'users.id'], ),
        sa.PrimaryKeyConstraint(u'user_id', u'event_id')
    )
    session = Session()
    event_teachers = session.query(Event).all()
    for e in event_teachers:
        e.teachers = [e._teacher]
    session.commit()

    op.create_table(u'lesson_tutors',
        sa.Column(u'user_id', sa.Integer(), nullable=False),
        sa.Column(u'lesson_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['lesson_id'], [u'lessons.id'], ),
        sa.ForeignKeyConstraint(['user_id'], [u'users.id'], ),
        sa.PrimaryKeyConstraint(u'user_id', u'lesson_id')
    )
    session = Session()
    lesson_tutors = session.query(Lesson).all()
    for l in lesson_tutors:
        l.tutors = [l._tutor]
    session.commit()

    op.alter_column('lessons', u'tutor_id',
               existing_type=sa.Integer(),
               nullable=True)
def upgrade():
    migration_file = ('1679b5bc102c_add_subsecond_columns_to_test_runs_table.'
                      'mysql_upgrade.sql')
    migration_dir = os.path.dirname(os.path.realpath(__file__))
    sql_path = os.path.join(migration_dir, migration_file)
    migration_context = context.get_context()
    if migration_context.dialect.name == 'mysql':
        with open(sql_path, 'r') as sql_file:
            op.execute(sql_file.read())
    else:
        op.add_column('test_runs', sa.Column('start_time_microsecond',
                                             sa.Integer(), default=0))
        op.add_column('test_runs', sa.Column('stop_time_microsecond',
                                             sa.Integer(), default=0))
        if not CONF.disable_microsecond_data_migration:
            session = db_api.get_session()
            query = db_utils.model_query(models.TestRun, session).values(
                models.TestRun.id, models.TestRun.start_time,
                models.TestRun.stop_time)
            for test_run in query:
                start_micro = test_run[1].microsecond
                stop_micro = test_run[2].microsecond
                values = {'start_time_microsecond': start_micro,
                          'stop_time_microsecond': stop_micro}
                db_api.update_test_run(values, test_run[0], session)
            session.close()
示例#19
0
def upgrade():
    # Only run this for somehow supported data types at the date we started naming constraints
    # Among others, these will probably fail on MySQL
    if context.get_bind().engine.name not in ('sqlite', 'postgresql'):
        return

    metadata = context.get_context().opts['target_metadata']

    # Drop every constraint on every table
    with op.batch_alter_table('alias', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('alias_pkey', type_="primary")
        batch_op.drop_constraint('alias_domain_name_fkey', type_="foreignkey")
    with op.batch_alter_table('alternative', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('alternative_pkey', type_="primary")
        batch_op.drop_constraint('alternative_domain_name_fkey', type_="foreignkey")
    with op.batch_alter_table('manager', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('manager_domain_name_fkey', type_="foreignkey")
        batch_op.drop_constraint('manager_user_email_fkey', type_="foreignkey")
    with op.batch_alter_table('token', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('token_pkey', type_="primary")
        batch_op.drop_constraint('token_user_email_fkey', type_="foreignkey")
    with op.batch_alter_table('fetch', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('fetch_pkey', type_="primary")
        batch_op.drop_constraint('fetch_user_email_fkey', type_="foreignkey")
    with op.batch_alter_table('relay', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('relay_pkey', type_="primary")
    with op.batch_alter_table('config', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('config_pkey', type_="primary")
    with op.batch_alter_table('user', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('user_pkey', type_="primary")
        batch_op.drop_constraint('user_domain_name_fkey', type_="foreignkey")
    with op.batch_alter_table('domain', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.drop_constraint('domain_pkey', type_="primary")

    # Recreate constraints with proper names
    with op.batch_alter_table('domain', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('domain_pkey', ['name'])
    with op.batch_alter_table('alias', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('alias_pkey', ['email'])
        batch_op.create_foreign_key('alias_domain_name_fkey', 'domain', ['domain_name'], ['name'])
    with op.batch_alter_table('user', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('user_pkey', ['email'])
        batch_op.create_foreign_key('user_domain_name_fkey', 'domain', ['domain_name'], ['name'])
    with op.batch_alter_table('alternative', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('alternative_pkey', ['name'])
        batch_op.create_foreign_key('alternative_domain_name_fkey', 'domain', ['domain_name'], ['name'])
    with op.batch_alter_table('manager', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_foreign_key('manager_domain_name_fkey', 'domain', ['domain_name'], ['name'])
        batch_op.create_foreign_key('manager_user_email_fkey', 'user', ['user_email'], ['email'])
    with op.batch_alter_table('token', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('token_pkey', ['id'])
        batch_op.create_foreign_key('token_user_email_fkey', 'user', ['user_email'], ['email'])
    with op.batch_alter_table('fetch', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('fetch_pkey', ['id'])
        batch_op.create_foreign_key('fetch_user_email_fkey', 'user', ['user_email'], ['email'])
    with op.batch_alter_table('relay', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('relay_pkey', ['name'])
    with op.batch_alter_table('config', naming_convention=metadata.naming_convention) as batch_op:
        batch_op.create_primary_key('config_pkey', ['name'])
示例#20
0
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.execute("""
        UPDATE "{schema}".ogc_server
        SET url = 'config://local/mapserv'
        WHERE url = 'config://internal/mapserv'
    """.format(schema=schema))
def downgrade():
    if context.get_context().dialect.name not in ("sqlite", "mysql"):
        op.drop_constraint("RSE_STAGING_AREA_CHK", "rses", type_="check")
        op.drop_constraint("REQUESTS_TYPE_CHK", "requests", type_="check")
        op.create_check_constraint(
            name="REQUESTS_TYPE_CHK", source="requests", condition="request_type in ('U', 'D', 'T')"
        )
    op.drop_column("rses", "staging_area")
def upgrade():
    schema = context.get_context().config.get_main_option("schema")
    op.create_table(
        "lux_print_job",
        Column("id", String(100), primary_key=True),
        Column("spec", Unicode),
        Column("creation", DateTime),
        schema=schema,
    )
def upgrade():
    schema = context.get_context().config.get_main_option("schema")
    op.create_table(
        "lux_print_servers",
        Column("id", Integer, primary_key=True, autoincrement=True, nullable=False),
        Column("url", Unicode, nullable=False),
        Column("creation", DateTime),
        schema=schema,
    )
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.drop_index('tsearch_search_index', schema=schema)

    op.drop_column('tsearch', 'interface_id', schema=schema)
    op.drop_column('tsearch', 'lang', schema=schema)
    op.drop_column('tsearch', 'actions', schema=schema)
    op.drop_column('tsearch', 'from_theme', schema=schema)
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    # Instructions
    for table in ['layerv1', 'layer_internal_wms', 'layer_external_wms']:
        op.add_column(table, Column('time_widget', Unicode(10), default=u'slider'), schema=schema)
        op.execute("UPDATE %(schema)s.%(table)s SET time_widget = 'slider'" % {
            'schema': schema, 'table': table
        })
def downgrade():
    if context.get_context().dialect.name == 'postgresql':
        op.drop_constraint('ACCOUNT_ATTR_MAP_PK', 'account_attr_map', type_='primary')
        op.drop_constraint('ACCOUNT_ATTR_MAP_CREATED_NN', 'account_attr_map')
        op.drop_constraint('ACCOUNT_ATTR_MAP_UPDATED_NN', 'account_attr_map')
        op.drop_constraint('ACCOUNT_ATTR_MAP_ACCOUNT_FK', 'account_attr_map')
        op.drop_constraint('ACCOUNT_ATTR_MAP_RSE_ID_FK', 'account_attr_map')
        op.drop_index('ACCOUNT_ATTR_MAP_KEY_VALUE_IDX', 'account_attr_map')
    op.drop_table('account_attr_map')
def upgrade():
    schema = context.get_context().config.get_main_option("schema")

    # Instructions
    for table in ["layerv1", "layer_internal_wms", "layer_external_wms"]:
        op.add_column(table, Column("time_widget", Unicode(10), default=u"slider"), schema=schema)
        op.execute("UPDATE %(schema)s.%(table)s SET time_widget = 'slider'" % {
            "schema": schema, "table": table
        })
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.execute(
        "UPDATE ONLY {schema}.interface AS i "
        "SET name = 'main' where name = 'desktop'".format(
            schema=schema
        )
    )
def upgrade():
    c = get_context()
    if isinstance(c.connection.engine.dialect, MySQLDialect):
        insp = Inspector.from_engine(c.connection.engine)
        for t in [
            "groups_permissions",
            "groups_resources_permissions",
            "users_groups",
            "resources",
        ]:
            for constraint in insp.get_foreign_keys(t):
                if constraint["referred_columns"] == ["group_name"]:
                    op.drop_constraint(constraint["name"], t, type="foreignkey")

    op.drop_column("groups", "id")
    op.alter_column(
        "groups", "group_name", type_=sa.Unicode(128), existing_type=sa.Unicode(50)
    )
    op.create_primary_key("groups_pkey", "groups", cols=["group_name"])

    if isinstance(c.connection.engine.dialect, MySQLDialect):
        op.create_foreign_key(
            None,
            "groups_permissions",
            "groups",
            remote_cols=["group_name"],
            local_cols=["group_name"],
            onupdate="CASCADE",
            ondelete="CASCADE",
        )
        op.create_foreign_key(
            None,
            "groups_resources_permissions",
            "groups",
            remote_cols=["group_name"],
            local_cols=["group_name"],
            onupdate="CASCADE",
            ondelete="CASCADE",
        )
        op.create_foreign_key(
            None,
            "users_groups",
            "groups",
            remote_cols=["group_name"],
            local_cols=["group_name"],
            onupdate="CASCADE",
            ondelete="CASCADE",
        )
        op.create_foreign_key(
            None,
            "resources",
            "groups",
            remote_cols=["group_name"],
            local_cols=["owner_group_name"],
            onupdate="CASCADE",
            ondelete="SET NULL",
        )
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    # Instructions
    for table in ['layerv1', 'layer_internal_wms', 'layer_external_wms']:
        op.add_column(table, Column('time_widget', Unicode(10), default='slider'), schema=schema)
        op.execute("UPDATE {schema!s}.{table!s} SET time_widget = 'slider'".format(
            schema=schema, table=table
        ))
示例#31
0
def downgrade_actions(all_commands):
    ctx = context.get_context()
    session = sessionmaker(bind=ctx.bind)()
    connection = session.connection()
    connection.execute(get_enable_disable_fk_command(False))

    try:
        for cmd in reversed(all_commands):
            if isinstance(cmd[1], str):
                connection.execute(cmd[1])
            elif isinstance(cmd[1], list):
                for row in cmd[1]:
                    connection.execute(row)
            else:
                cmd[1]()
    except:
        session.rollback()
        raise
    connection.execute(get_enable_disable_fk_command(True))
    session.commit()
def upgrade():
    migration_context = context.get_context()
    if migration_context.dialect.name == 'sqlite':
        id_type = sa.Integer
    else:
        id_type = sa.BigInteger

    op.create_table('coverages',
                    sa.Column('id',
                              id_type,
                              autoincrement=True,
                              primary_key=True),
                    sa.Column('project_name', sa.String(256), nullable=False),
                    sa.Column('coverage_rate', sa.Float()),
                    sa.Column('report_time', sa.DateTime()),
                    sa.Column('report_time_microsecond',
                              sa.Integer(),
                              default=0),
                    mysql_engine='InnoDB')
    op.create_index('ix_project_name', 'coverages', ['project_name'])
def downgrade():
    schema = context.get_context().config.get_main_option('schema')
    op.add_column('treeitem', Column('order', Integer), schema=schema)
    op.execute(
        'UPDATE ONLY %(schema)s.treeitem AS ti SET "order" = lt.ordering '
        'FROM %(schema)s.layergroup_treeitem AS lt WHERE ti.id = lt.treeitem_id ' % {
            'schema': schema
        }
    )
    op.execute(
        'UPDATE ONLY %(schema)s.treeitem AS ti SET "order" = t.ordering '
        'FROM %(schema)s.theme AS t WHERE ti.id = t.id ' % {'schema': schema}
    )
    op.drop_column('theme', 'ordering', schema=schema)
    op.drop_column('layergroup_treeitem', 'ordering', schema=schema)
    op.drop_column('layergroup_treeitem', 'id', schema=schema)
    op.create_primary_key(
        'layergroup_treeitem_pkey', 'layergroup_treeitem',
        ['treegroup_id', 'treeitem_id'], schema=schema
    )
示例#34
0
def downgrade():
    schema = context.get_context().config.get_main_option("schema")

    op.drop_table("theme_functionality", schema=schema)
    op.drop_table("theme", schema=schema)
    op.drop_table("layergroup_treeitem", schema=schema)
    op.drop_table("layer_restrictionarea", schema=schema)
    op.drop_table("layergroup", schema=schema)
    op.drop_table("user_functionality", schema=schema)
    op.drop_table("role_functionality", schema=schema)
    op.drop_table("user", schema=schema)
    op.drop_table("treegroup", schema=schema)
    op.drop_table("tsearch", schema=schema)
    op.drop_table("role_restrictionarea", schema=schema)
    op.drop_table("layer", schema=schema)
    op.drop_table("role", schema=schema)
    op.drop_table("shorturl", schema=schema + "_static")
    op.drop_table("restrictionarea", schema=schema)
    op.drop_table("treeitem", schema=schema)
    op.drop_table("functionality", schema=schema)
示例#35
0
def upgrade():
    '''
    upgrade method
    '''
    create_table('account_attr_map', sa.Column('account', sa.String(25)),
                 sa.Column('key', sa.String(255)),
                 sa.Column('value', sa.String(255)),
                 sa.Column('updated_at', sa.DateTime),
                 sa.Column('created_at', sa.DateTime))
    if context.get_context().dialect.name != 'sqlite':
        create_primary_key('ACCOUNT_ATTR_MAP_PK', 'account_attr_map',
                           ['account', 'key'])
        create_check_constraint('ACCOUNT_ATTR_MAP_CREATED_NN',
                                'account_attr_map', 'created_at is not null')
        create_check_constraint('ACCOUNT_ATTR_MAP_UPDATED_NN',
                                'account_attr_map', 'updated_at is not null')
        create_foreign_key('ACCOUNT_ATTR_MAP_ACCOUNT_FK', 'account_attr_map',
                           'accounts', ['account'], ['account'])
        create_index('ACCOUNT_ATTR_MAP_KEY_VALUE_IDX', 'account_attr_map',
                     ['key', 'value'])
示例#36
0
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table('transfer_hops',
                     sa.Column('request_id', GUID()),
                     sa.Column('next_hop_request_id', GUID()),
                     sa.Column('initial_request_id', GUID()),
                     sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow),
                     sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow))

        create_primary_key('TRANSFER_HOPS_PK', 'transfer_hops', ['request_id', 'next_hop_request_id', 'initial_request_id'])
        create_foreign_key('TRANSFER_HOPS_INIT_REQ_ID_FK', 'transfer_hops', 'requests', ['initial_request_id'], ['id'])
        create_foreign_key('TRANSFER_HOPS_REQ_ID_FK', 'transfer_hops', 'requests', ['request_id'], ['id'])
        create_foreign_key('TRANSFER_HOPS_NH_REQ_ID_FK', 'transfer_hops', 'requests', ['next_hop_request_id'], ['id'])
        create_check_constraint('TRANSFER_HOPS_CREATED_NN', 'transfer_hops', 'created_at is not null')
        create_check_constraint('TRANSFER_HOPS_UPDATED_NN', 'transfer_hops', 'updated_at is not null')
        create_index('TRANSFER_HOPS_INITIAL_REQ', 'transfer_hops', ['initial_request_id'])
示例#37
0
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.drop_table('theme_functionality', schema=schema)
    op.drop_table('theme', schema=schema)
    op.drop_table('layergroup_treeitem', schema=schema)
    op.drop_table('layer_restrictionarea', schema=schema)
    op.drop_table('layergroup', schema=schema)
    op.drop_table('user_functionality', schema=schema)
    op.drop_table('role_functionality', schema=schema)
    op.drop_table('user', schema=schema)
    op.drop_table('treegroup', schema=schema)
    op.drop_table('tsearch', schema=schema)
    op.drop_table('role_restrictionarea', schema=schema)
    op.drop_table('layer', schema=schema)
    op.drop_table('role', schema=schema)
    op.drop_table('shorturl', schema=schema + "_static")
    op.drop_table('restrictionarea', schema=schema)
    op.drop_table('treeitem', schema=schema)
    op.drop_table('functionality', schema=schema)
def upgrade():
    '''
    upgrade method
    '''
    if context.get_context().dialect.name != 'sqlite':
        add_column('dids', sa.Column('closed_at', sa.DateTime))
        add_column('contents_history', sa.Column('deleted_at', sa.DateTime))
        create_table('naming_conventions',
                     sa.Column('scope', sa.String(25)),
                     sa.Column('regexp', sa.String(255)),
                     sa.Column('convention_type', sa.String(1)),
                     sa.Column('updated_at', sa.DateTime),
                     sa.Column('created_at', sa.DateTime))
        create_primary_key('NAMING_CONVENTIONS_PK', 'naming_conventions', ['scope'])
        create_foreign_key('NAMING_CONVENTIONS_SCOPE_FK', 'naming_conventions',
                           'scopes', ['scope'], ['scope'])
        create_check_constraint('NAMING_CONVENTIONS_CREATED_NN', 'naming_conventions',
                                'created_at is not null')
        create_check_constraint('NAMING_CONVENTIONS_UPDATED_NN', 'naming_conventions',
                                'updated_at is not null')
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table('rse_transfer_limits',
                     sa.Column('rse_id', GUID()),
                     sa.Column('activity', sa.String(50)),
                     sa.Column('rse_expression', sa.String(3000)),
                     sa.Column('max_transfers', sa.BigInteger),
                     sa.Column('transfers', sa.BigInteger),
                     sa.Column('waitings', sa.BigInteger),
                     sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow),
                     sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow))

        create_primary_key('RSE_TRANSFER_LIMITS_PK', 'rse_transfer_limits', ['rse_id', 'activity'])
        create_check_constraint('RSE_TRANSFER_LIMITS_CREATED_NN', 'rse_transfer_limits', 'created_at is not null')
        create_check_constraint('RSE_TRANSFER_LIMITS_UPDATED_NN', 'rse_transfer_limits', 'updated_at is not null')
        create_foreign_key('RSE_TRANSFER_LIMITS_RSE_ID_FK', 'rse_transfer_limits', 'rses', ['rse_id'], ['id'])
示例#40
0
def upgrade():
    ctx = context.get_context()
    session = sessionmaker(bind=ctx.bind)()
    connection = session.connection()

    try:
        connection.execute('SET FOREIGN_KEY_CHECKS=0;')
        for cmd in all_commands:
            if isinstance(cmd[0], str):
                connection.execute(cmd[0])
            elif isinstance(cmd[0], list):
                for row in cmd[0]:
                    connection.execute(row)
            else:
                cmd[0]()
        connection.execute('SET FOREIGN_KEY_CHECKS=1;')
    except:
        session.rollback()
        raise
    session.commit()
def downgrade():
    ctx = context.get_context()
    session = sessionmaker(bind=ctx.bind)()
    connection = session.connection()
    connection.execute('SET foreign_key_checks = 0;')

    try:
        for cmd in reversed(all_commands):
            if isinstance(cmd[1], str):
                connection.execute(cmd[1])
            elif isinstance(cmd[1], list):
                for row in cmd[1]:
                    connection.execute(row)
            else:
                cmd[1]()
    except:
        session.rollback()
        raise
    connection.execute('SET foreign_key_checks = 1;')
    session.commit()
def upgrade():
    '''
    upgrade method
    '''
    create_table('distances',
                 sa.Column('src_rse_id', GUID()),
                 sa.Column('dest_rse_id', GUID()),
                 sa.Column('ranking', sa.Integer),
                 sa.Column('agis_distance', sa.Integer),
                 sa.Column('geoip_distance', sa.Integer),
                 sa.Column('updated_at', sa.DateTime),
                 sa.Column('created_at', sa.DateTime))

    if context.get_context().dialect.name != 'sqlite':
        create_primary_key('DISTANCES_PK', 'distances', ['src_rse_id', 'dest_rse_id'])
        create_foreign_key('DISTANCES_SRC_RSES_FK', 'distances', 'rses', ['src_rse_id'], ['id'])
        create_foreign_key('DISTANCES_DEST_RSES_FK', 'distances', 'rses', ['dest_rse_id'], ['id'])
        create_check_constraint('DISTANCES_CREATED_NN', 'distances', 'created_at is not null')
        create_check_constraint('DISTANCES_UPDATED_NN', 'distances', 'updated_at is not null')
        create_index('DISTANCES_DEST_RSEID_IDX', 'distances', ['dest_rse_id'])
def upgrade():
    schema = context.get_context().config.get_main_option("schema")

    op.drop_constraint("layergroup_treeitem_pkey",
                       "layergroup_treeitem",
                       schema=schema)
    op.add_column("layergroup_treeitem",
                  Column("id", Integer, primary_key=True),
                  schema=schema)
    op.add_column("layergroup_treeitem",
                  Column("ordering", Integer),
                  schema=schema)
    op.execute(
        'UPDATE ONLY %(schema)s.layergroup_treeitem AS lt SET ordering = ti."order" '
        'FROM %(schema)s.treeitem AS ti WHERE ti.id = lt.treeitem_id ' %
        {"schema": schema})
    op.add_column("theme", Column("ordering", Integer), schema=schema)
    op.execute('UPDATE ONLY %(schema)s.theme AS t SET ordering = ti."order" '
               'FROM %(schema)s.treeitem AS ti WHERE ti.id = t.id ' %
               {"schema": schema})
    op.drop_column("treeitem", "order", schema=schema)
示例#44
0
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    for source, dest in [
        ('role_functionality', 'role'),
        ('role_functionality', 'functionality'),
        ('theme_functionality', 'theme'),
        ('theme_functionality', 'functionality'),
    ]:
        op.drop_constraint('{}_{}_id_fkey'.format(source, dest),
                           source,
                           schema=schema)
        op.create_foreign_key(
            '{}_{}_id_fkey'.format(source, dest),
            source,
            source_schema=schema,
            local_cols=['{}_id'.format(dest)],
            referent_table=dest,
            referent_schema=schema,
            remote_cols=['id'],
        )
示例#45
0
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.drop_constraint('layergroup_treeitem_pkey',
                       'layergroup_treeitem',
                       schema=schema)
    op.add_column('layergroup_treeitem',
                  Column('id', Integer, primary_key=True),
                  schema=schema)
    op.add_column('layergroup_treeitem',
                  Column('ordering', Integer),
                  schema=schema)
    op.execute(
        'UPDATE ONLY %(schema)s.layergroup_treeitem AS lt SET ordering = ti."order" '
        'FROM %(schema)s.treeitem AS ti WHERE ti.id = lt.treeitem_id ' %
        {'schema': schema})
    op.add_column('theme', Column('ordering', Integer), schema=schema)
    op.execute('UPDATE ONLY %(schema)s.theme AS t SET ordering = ti."order" '
               'FROM %(schema)s.treeitem AS ti WHERE ti.id = t.id ' %
               {'schema': schema})
    op.drop_column('treeitem', 'order', schema=schema)
示例#46
0
文件: command.py 项目: sungitly/isr
def _run_with_alembic_context(func, *args, **kwargs):
    from alembic.runtime.environment import EnvironmentContext
    from alembic import context
    from alembic.script import ScriptDirectory
    from sqlalchemy import engine_from_config
    from alembic.operations import Operations
    from sqlalchemy import pool

    config = _get_config()
    script_directory = ScriptDirectory.from_config(config)
    with EnvironmentContext(config, script_directory):
        connectable = engine_from_config(config.get_section(
            config.config_ini_section),
                                         prefix='sqlalchemy.',
                                         poolclass=pool.NullPool)

        with connectable.connect() as connection:
            context.configure(connection=connection)
            with context.begin_transaction():
                with Operations.context(context.get_context()):
                    func(*args, **kwargs)
def downgrade():
    ctx = context.get_context()
    session = sessionmaker(bind=ctx.bind)()
    connection = session.connection()

    try:
        for cmd in reversed(all_commands):
            if isinstance(cmd[1], str):
                cmds = cmd[1].split(';')
                for new_cmd in cmds:
                    if new_cmd.strip():
                        connection.execute(new_cmd)
            elif isinstance(cmd[1], list):
                for row in cmd[1]:
                    connection.execute(row)
            else:
                cmd[1]()
    except:
        session.rollback()
        raise
    session.commit()
示例#48
0
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table(
            'lifetime_except', sa.Column('id', GUID()),
            sa.Column('scope', sa.String(25)),
            sa.Column('name', sa.String(255)),
            sa.Column(
                'did_type',
                sa.Enum(DIDType,
                        name='LIFETIME_EXCEPT_TYPE_CHK',
                        values_callable=lambda obj: [e.value for e in obj])),
            sa.Column('account', sa.String(25)),
            sa.Column('comments', sa.String(4000)),
            sa.Column('pattern', sa.String(255)),
            sa.Column(
                'state',
                sa.Enum(LifetimeExceptionsState,
                        name='LIFETIME_EXCEPT_STATE_CHK',
                        values_callable=lambda obj: [e.value for e in obj])),
            sa.Column('created_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow),
            sa.Column('updated_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow,
                      onupdate=datetime.datetime.utcnow),
            sa.Column('expires_at', sa.DateTime))

        create_primary_key('LIFETIME_EXCEPT_PK', 'lifetime_except',
                           ['id', 'scope', 'name', 'did_type', 'account'])
        create_check_constraint('LIFETIME_EXCEPT_SCOPE_NN', 'lifetime_except',
                                'scope is not null')
        create_check_constraint('LIFETIME_EXCEPT_NAME_NN', 'lifetime_except',
                                'name is not null')
        create_check_constraint('LIFETIME_EXCEPT_DID_TYPE_NN',
                                'lifetime_except', 'did_type is not null')
def upgrade():
    op.add_column('taggable', Column('resource_unid', UnicodeText))
    op.create_index('idx_taggable_unid',
                    'taggable',
                    ['owner_id', 'resource_parent_id', 'resource_unid'],
                    unique=True,
                    mysql_length={'resource_unid': 255})

    from bq.data_service.model.tag_model import Taggable, ModuleExecution

    cntxt = context.get_context()
    SessionMaker = sessionmaker(bind=cntxt.bind)
    DBSession = SessionMaker()

    session, request = create_fake_env()

    initial_mex = DBSession.query(ModuleExecution).filter_by(
        resource_name='initialization').first()
    if initial_mex is None:
        initial_mex = ModuleExecution(owner_id=False, mex_id=False)
        initial_mex.mex = initial_mex
        initial_mex.name = "initialization"
        initial_mex.type = "initialization"
        initial_mex.hidden = True
        DBSession.add(initial_mex)
        DBSession.flush()
    request.identity['bisque.mex_id'] = initial_mex.id

    users = DBSession.query(Taggable).filter_by(resource_parent_id=None,
                                                resource_type='user')
    for user in users:
        #set_current_user (user.resource_name)
        root_store = Taggable(resource_type='store',
                              owner_id=user.id,
                              mex_id=initial_mex.id)
        root_store.resource_name = '(root)'
        root_store.resource_unid = '(root)'

        DBSession.add(root_store)
    DBSession.flush()
def upgrade():
    cntxt = context.get_context()
    Session = sessionmaker(bind=cntxt.bind)

    op.create_table(u'event_teachers',
                    sa.Column(u'user_id', sa.Integer(), nullable=False),
                    sa.Column(u'event_id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(
                        ['event_id'],
                        [u'events.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        [u'users.id'],
                    ), sa.PrimaryKeyConstraint(u'user_id', u'event_id'))
    session = Session()
    event_teachers = session.query(Event).all()
    for e in event_teachers:
        e.teachers = [e._teacher]
    session.commit()

    op.create_table(u'lesson_tutors',
                    sa.Column(u'user_id', sa.Integer(), nullable=False),
                    sa.Column(u'lesson_id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(
                        ['lesson_id'],
                        [u'lessons.id'],
                    ), sa.ForeignKeyConstraint(
                        ['user_id'],
                        [u'users.id'],
                    ), sa.PrimaryKeyConstraint(u'user_id', u'lesson_id'))
    session = Session()
    lesson_tutors = session.query(Lesson).all()
    for l in lesson_tutors:
        l.tutors = [l._tutor]
    session.commit()

    op.alter_column('lessons',
                    u'tutor_id',
                    existing_type=sa.Integer(),
                    nullable=True)
def downgrade():
    '''
    Downgrade the database to the previous revision
    '''

    schema = context.get_context(
    ).version_table_schema + '.' if context.get_context(
    ).version_table_schema else ''

    if context.get_context().dialect.name == 'oracle':
        drop_column('rules', 'ignore_account_limit')
        drop_constraint('RULES_STATE_CHK', 'rules')
        create_check_constraint('RULES_STATE_CHK', 'rules',
                                "state IN ('S', 'R', 'U', 'O')")

    elif context.get_context().dialect.name == 'postgresql':
        drop_column('rules', 'ignore_account_limit', schema=schema[:-1])
        op.execute(
            'ALTER TABLE ' + schema +
            'rules DROP CONSTRAINT IF EXISTS "RULES_STATE_CHK", ALTER COLUMN state TYPE CHAR'
        )  # pylint: disable=no-member
        create_check_constraint('RULES_STATE_CHK', 'rules',
                                "state IN ('S', 'R', 'U', 'O')")

    elif context.get_context().dialect.name == 'mysql' and context.get_context(
    ).dialect.server_version_info[0] == 5:
        drop_column('rules', 'ignore_account_limit', schema=schema[:-1])
        create_check_constraint('RULES_STATE_CHK', 'rules',
                                "state IN ('S', 'R', 'U', 'O')")

    elif context.get_context().dialect.name == 'mysql' and context.get_context(
    ).dialect.server_version_info[0] == 8:
        drop_column('rules', 'ignore_account_limit', schema=schema[:-1])
        op.execute('ALTER TABLE ' + schema +
                   'rules DROP CHECK RULES_STATE_CHK')  # pylint: disable=no-member
        create_check_constraint('RULES_STATE_CHK', 'rules',
                                "state IN ('S', 'R', 'U', 'O')")
示例#52
0
def upgrade():
    migration_context = context.get_context()
    insp = reflection.Inspector(migration_context.bind)
    indx_names = [x['name'] for x in insp.get_indexes('test_metadata')]
    # Prempt duplicate index creation on sqlite
    if migration_context.dialect.name == 'sqlite':
        if 'ix_test_key_value' in indx_names:
            op.drop_index('ix_test_key_value', 'test_metadata')
    # NOTE(mtreinish) on some mysql versions renaming the column with a fk
    # constraint errors out so, delete it before the rename and add it back
    # after
    if migration_context.dialect.name == 'mysql':
        op.drop_constraint('test_metadata_ibfk_1', 'test_metadata',
                           'foreignkey')
    with op.batch_alter_table('test_metadata') as batch_op:
        batch_op.alter_column('test_run_id',
                              existing_type=sa.String(36),
                              existing_nullable=False,
                              new_column_name='test_id')
    if migration_context.dialect.name == 'mysql':
        op.create_foreign_key('test_metadata_ibfk_1', 'test_metadata', 'tests',
                              ["test_id"], ['id'])
示例#53
0
def upgrade():
    '''
    upgrade method
    '''
    create_table('collection_replicas',
                 sa.Column('scope', sa.String(25)),
                 sa.Column('name', sa.String(255)),
                 sa.Column('did_type', DIDType.db_type(name='COLLECTION_REPLICAS_TYPE_CHK')),
                 sa.Column('rse_id', GUID()),
                 sa.Column('bytes', sa.BigInteger),
                 sa.Column('length', sa.BigInteger),
                 sa.Column('state', ReplicaState.db_type(name='COLLECTION_REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE),
                 sa.Column('accessed_at', sa.DateTime),
                 sa.Column('updated_at', sa.DateTime),
                 sa.Column('created_at', sa.DateTime))
    if context.get_context().dialect.name != 'sqlite':
        create_primary_key('COLLECTION_REPLICAS_PK', 'collection_replicas', ['scope', 'name', 'rse_id'])
        create_foreign_key('COLLECTION_REPLICAS_LFN_FK', 'collection_replicas', 'dids', ['scope', 'name'], ['scope', 'name'])
        create_foreign_key('COLLECTION_REPLICAS_RSE_ID_FK', 'collection_replicas', 'rses', ['rse_id'], ['id'])
        create_check_constraint('COLLECTION_REPLICAS_SIZE_NN', 'collection_replicas', 'bytes IS NOT NULL')
        create_check_constraint('COLLECTION_REPLICAS_STATE_NN', 'collection_replicas', 'state IS NOT NULL')
        create_index('COLLECTION_REPLICAS_RSE_ID_IDX', 'collection_replicas', ['rse_id'])
示例#54
0
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    engine = op.get_bind().engine
    if op.get_context().dialect.has_table(
        engine, 'restricted_role_theme', schema=schema
    ):  # pragma: nocover
        return

    op.add_column('theme', Column(
        'public', Boolean, server_default='t', nullable=False
    ), schema=schema)
    op.create_table(
        'restricted_role_theme',
        Column(
            'role_id', Integer, ForeignKey(schema + '.role.id'), primary_key=True
        ),
        Column(
            'theme_id', Integer, ForeignKey(schema + '.theme.id'), primary_key=True
        ),
        schema=schema
    )
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table(
            'sources', sa.Column('request_id', GUID()),
            sa.Column('scope',
                      sa.String(25)), sa.Column('name', sa.String(255)),
            sa.Column('rse_id', GUID()), sa.Column('dest_rse_id', GUID()),
            sa.Column('url',
                      sa.String(2048)), sa.Column('ranking', sa.Integer),
            sa.Column('bytes', sa.BigInteger),
            sa.Column('created_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow),
            sa.Column('updated_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow,
                      onupdate=datetime.datetime.utcnow))

        create_primary_key('SOURCES_PK', 'sources',
                           ['request_id', 'rse_id', 'scope', 'name'])
        create_foreign_key('SOURCES_REQ_ID_FK', 'sources', 'requests',
                           ['request_id'], ['id'])
        create_foreign_key('SOURCES_REPLICA_FK', 'sources', 'replicas',
                           ['scope', 'name', 'rse_id'],
                           ['scope', 'name', 'rse_id'])
        create_foreign_key('SOURCES_RSES_FK', 'sources', 'rses', ['rse_id'],
                           ['id'])
        create_foreign_key('SOURCES_DST_RSES_FK', 'sources', 'rses',
                           ['dest_rse_id'], ['id'])
        create_check_constraint('SOURCES_CREATED_NN', 'sources',
                                'created_at is not null')
        create_check_constraint('SOURCES_UPDATED_NN', 'sources',
                                'updated_at is not null')
        create_index('SOURCES_SRC_DST_IDX', 'sources',
                     ['rse_id', 'dest_rse_id'])
def downgrade():
    schema = context.get_context().config.get_main_option('schema')

    for source, dest in [
        ('layer_wmts', 'layer'),
        ('layerv1', 'layer'),
        ('theme', 'treegroup'),
    ]:
        op.drop_constraint(
            '{}_id_fkey'.format(source),
            source,
            schema=schema,
        )
        op.create_foreign_key(
            '{}_id_fkey'.format(source),
            source,
            source_schema=schema,
            local_cols=['id'],
            referent_table=dest,
            referent_schema=schema,
            remote_cols=['id'],
        )
示例#57
0
def upgrade():
    c = get_context()
    if isinstance(c.connection.engine.dialect, PGDialect):
        op.execute("""
        CREATE UNIQUE INDEX groups_unique_group_name_key
          ON groups
          USING btree
          (lower(group_name::text));
          """)

        op.execute("""
        ALTER TABLE groups_permissions
            ADD CONSTRAINT groups_permissions_perm_name_check CHECK (perm_name::text = lower(perm_name::text));
        """)

        op.execute("""
        ALTER TABLE groups_resources_permissions
              ADD CONSTRAINT groups_resources_permissions_perm_name_check CHECK (
                    perm_name::text = lower(perm_name::text)
              );
        """)

        op.execute("""
        ALTER TABLE users_permissions
          ADD CONSTRAINT user_permissions_perm_name_check CHECK (perm_name::text = lower(perm_name::text));
        """)

        op.execute("""
        ALTER TABLE users_resources_permissions
          ADD CONSTRAINT users_resources_permissions_perm_name_check CHECK (perm_name::text = lower(perm_name::text));
        """)

        op.execute("""
        CREATE INDEX users_email_key2 ON users (lower(email::text));
        """)

        op.execute("""
        CREATE INDEX users_username_uq2 ON users (lower(user_name::text));
        """)
示例#58
0
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table('bad_replicas',
                     sa.Column('scope', sa.String(25)),
                     sa.Column('name', sa.String(255)),
                     sa.Column('rse_id', GUID()),
                     sa.Column('reason', sa.String(255)),
                     sa.Column('state', sa.String(1)),
                     sa.Column('account', sa.String(25)),
                     sa.Column('created_at', sa.DateTime, default=datetime.datetime.utcnow),
                     sa.Column('updated_at', sa.DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow))

        create_primary_key('BAD_REPLICAS_STATE_PK', 'bad_replicas', ['scope', 'name', 'rse_id', 'created_at'])
        create_check_constraint('BAD_REPLICAS_SCOPE_NN', 'bad_replicas', 'scope is not null')
        create_check_constraint('BAD_REPLICAS_NAME_NN', 'bad_replicas', 'name is not null')
        create_check_constraint('BAD_REPLICAS_RSE_ID_NN', 'bad_replicas', 'rse_id is not null')
        create_foreign_key('BAD_REPLICAS_ACCOUNT_FK', 'bad_replicas', 'accounts', ['account'], ['account'])
        create_index('BAD_REPLICAS_STATE_IDX', 'bad_replicas', ['rse_id', 'state'])
def upgrade():
    schema = context.get_context().config.get_main_option('schema')

    op.create_table(
        'lux_layer_internal_wms',
        sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False),
        sa.Column('url',
                  sa.VARCHAR(length=255),
                  autoincrement=False,
                  nullable=True),
        sa.Column('layers',
                  sa.VARCHAR(length=1000),
                  autoincrement=False,
                  nullable=True),
        sa.Column('is_poi', sa.BOOLEAN(), autoincrement=False, nullable=True),
        sa.Column('collection_id',
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
        sa.ForeignKeyConstraint(['id'], [schema + '.layer_internal_wms.id'],
                                name=u'lux_layer_internal_wms_fk1',
                                onupdate=u'CASCADE',
                                ondelete=u'CASCADE'),
        sa.PrimaryKeyConstraint('id', name=u'lux_layer_internal_wms_pkey'),
        schema=schema)

    op.create_table(
        'lux_layer_external_wms',
        sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False),
        sa.Column('category_id',
                  sa.INTEGER(),
                  autoincrement=False,
                  nullable=True),
        sa.ForeignKeyConstraint(['id'], [schema + '.layer_external_wms.id'],
                                name=u'lux_layer_external_wms_fk1',
                                onupdate=u'CASCADE',
                                ondelete=u'CASCADE'),
        sa.PrimaryKeyConstraint('id', name=u'lux_layer_external_wms_pkey'),
        schema=schema)
示例#60
0
def upgrade():
    '''
    Upgrade the database to this revision
    '''

    if context.get_context().dialect.name in ['oracle', 'mysql', 'postgresql']:
        create_table(
            'collection_replicas', sa.Column('scope', sa.String(25)),
            sa.Column('name', sa.String(255)),
            sa.Column('did_type', DIDType.db_type()),
            sa.Column('rse_id', GUID()), sa.Column('bytes', sa.BigInteger),
            sa.Column('length', sa.BigInteger),
            sa.Column('state',
                      ReplicaState.db_type(),
                      default=ReplicaState.UNAVAILABLE),
            sa.Column('accessed_at', sa.DateTime),
            sa.Column('created_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow),
            sa.Column('updated_at',
                      sa.DateTime,
                      default=datetime.datetime.utcnow,
                      onupdate=datetime.datetime.utcnow))

        create_primary_key('COLLECTION_REPLICAS_PK', 'collection_replicas',
                           ['scope', 'name', 'rse_id'])
        create_foreign_key('COLLECTION_REPLICAS_LFN_FK', 'collection_replicas',
                           'dids', ['scope', 'name'], ['scope', 'name'])
        create_foreign_key('COLLECTION_REPLICAS_RSE_ID_FK',
                           'collection_replicas', 'rses', ['rse_id'], ['id'])
        create_check_constraint('COLLECTION_REPLICAS_SIZE_NN',
                                'collection_replicas', 'bytes IS NOT NULL')
        create_check_constraint('COLLECTION_REPLICAS_STATE_NN',
                                'collection_replicas', 'state IS NOT NULL')
        create_check_constraint('COLLECTION_REPLICAS_STATE_CHK',
                                'collection_replicas',
                                "state in ('A', 'U', 'C', 'B', 'D', 'S')")
        create_index('COLLECTION_REPLICAS_RSE_ID_IDX', 'collection_replicas',
                     ['rse_id'])