def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    conn = op.get_bind()
    default_aspect_id = conn.execute('SELECT default_aspect_id FROM roombooking.locations WHERE is_default').scalar()
    op.add_column('aspects', sa.Column('is_default', sa.Boolean, nullable=False, server_default='false'),
                  schema='roombooking')
    op.alter_column('aspects', 'is_default', server_default=None, schema='roombooking')
    op.alter_column('aspects', 'top_left_latitude', type_=sa.Float, schema='roombooking',
                    postgresql_using='top_left_latitude::double precision')
    op.alter_column('aspects', 'top_left_longitude', type_=sa.Float, schema='roombooking',
                    postgresql_using='top_left_longitude::double precision')
    op.alter_column('aspects', 'bottom_right_latitude', type_=sa.Float, schema='roombooking',
                    postgresql_using='bottom_right_latitude::double precision')
    op.alter_column('aspects', 'bottom_right_longitude', type_=sa.Float, schema='roombooking',
                    postgresql_using='bottom_right_longitude::double precision')
    op.create_index(None, 'aspects', ['is_default'], unique=True, schema='roombooking',
                    postgresql_where=sa.text('is_default'))
    op.drop_column('aspects', 'center_latitude', schema='roombooking')
    op.drop_column('aspects', 'location_id', schema='roombooking')
    op.drop_column('aspects', 'zoom_level', schema='roombooking')
    op.drop_column('aspects', 'center_longitude', schema='roombooking')
    op.drop_column('locations', 'default_aspect_id', schema='roombooking')
    if default_aspect_id is not None:
        conn.execute('UPDATE roombooking.aspects SET is_default = true WHERE id = %s', (default_aspect_id,))
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    conn = op.get_bind()
    op.add_column('blocking_principals', sa.Column('local_group_id', sa.Integer(), nullable=True),
                  schema='roombooking')
    op.add_column('blocking_principals', sa.Column('mp_group_name', sa.String(), nullable=True),
                  schema='roombooking')
    op.add_column('blocking_principals', sa.Column('mp_group_provider', sa.String(), nullable=True),
                  schema='roombooking')
    op.add_column('blocking_principals', sa.Column('type', PyIntEnum(PrincipalType), nullable=True),
                  schema='roombooking')
    op.add_column('blocking_principals', sa.Column('user_id', sa.Integer(), nullable=True), schema='roombooking')
    res = conn.execute("SELECT id, principal FROM roombooking.blocking_principals")
    for id_, principal in res:
        args = _principal_to_args(principal)
        conn.execute("UPDATE roombooking.blocking_principals SET type = %s, user_id = %s, local_group_id = %s, "
                     "mp_group_provider = %s, mp_group_name = %s WHERE id = %s", args + (id_,))
    op.drop_column('blocking_principals', 'principal', schema='roombooking')
    op.create_index(None, 'blocking_principals', ['local_group_id'], schema='roombooking')
    op.create_index(None, 'blocking_principals', ['user_id'], schema='roombooking')
    op.create_foreign_key(None,
                          'blocking_principals', 'users',
                          ['user_id'], ['id'],
                          source_schema='roombooking', referent_schema='users')
    op.create_foreign_key(None,
                          'blocking_principals', 'groups',
                          ['local_group_id'], ['id'],
                          source_schema='roombooking', referent_schema='users')
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    if _has_legacy_ids('categories.category_index', 'id') or _has_legacy_ids('users.favorite_categories', 'target_id'):
        raise Exception('Please run the legacy_categories zodb importer first.')
    op.execute('ALTER TABLE categories.category_index ALTER COLUMN id TYPE int USING id::int')
    op.execute('ALTER TABLE users.favorite_categories ALTER COLUMN target_id TYPE int USING target_id::int')
コード例 #4
0
ファイル: __init__.py プロジェクト: VeenaSL/sriov
def create_table_if_not_exist(table_name, *args, **kwargs):
    if not context.is_offline_mode():
        bind = op.get_bind()
        insp = sa.engine.reflection.Inspector.from_engine(bind)
        if table_name in insp.get_table_names():
            return

    op.create_table(table_name, *args, **kwargs)
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    conn = op.get_bind()
    res = conn.execute("SELECT routine_schema FROM information_schema.routines WHERE routine_name = 'indico_unaccent'")
    schema = res.fetchone()[0]
    if schema != 'indico':
        op.execute("ALTER FUNCTION {}.indico_unaccent(TEXT) SET SCHEMA indico".format(schema))
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    if (_has_legacy_ids('events.settings', 'event_id') or _has_legacy_ids('events.settings_principals', 'event_id') or
            _has_legacy_ids('events.event_index', 'id')):
        raise Exception('Please run the legacy_events zodb importer first.')
    op.execute('ALTER TABLE events.event_index ALTER COLUMN id TYPE int USING id::int')
    op.execute('ALTER TABLE events.settings ALTER COLUMN event_id TYPE int USING event_id::int')
    op.execute('ALTER TABLE events.settings_principals ALTER COLUMN event_id TYPE int USING event_id::int')
コード例 #7
0
ファイル: migrations.py プロジェクト: Gr1N/wuffi
def run():
    """
    Run migrations.
    """
    logging.config.dictConfig(MIGRATIONS_LOGGING)

    if context.is_offline_mode():
        run_offline(context)
    else:
        run_online(context)
def upgrade():
    if context.is_offline_mode():
        raise AssertionError('This migration can not be run in offline mode.')
    connection = context.get_context().connection
    
    for key, value in SEO_SETTINGS:
        query = settings.select(settings.c.key == key)
        result = connection.execute(query).first()
        if result is None:
            insert_setting(key, value)
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')

    _create_index('users', 'first_name')
    _create_index('users', 'last_name')
    _create_index('users', 'phone')
    _create_index('users', 'address')
    _create_index('affiliations', 'name')
    _create_index('emails', 'email')
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')

    if has_extension(op.get_bind(), 'unaccent'):
        print 'Unaccent extension is available - indico_unaccent will use it'
        op.execute(SQL_FUNCTION_UNACCENT)
    else:
        print 'Unaccent extension is NOT available - indico_unaccent will not touch its argument'
        op.execute(SQL_FUNCTION_NOOP)
def upgrade():
    if not context.is_offline_mode():
        # sanity check to avoid running w/o categories migrated
        conn = op.get_bind()
        has_categories = conn.execute("SELECT EXISTS (SELECT 1 FROM categories.categories)").fetchone()[0]
        if not has_categories:
            raise Exception('Upgrade to {} and run the category zodb import first!'.format(down_revision))
    op.create_foreign_key(None,
                          'favorite_categories', 'categories',
                          ['target_id'], ['id'],
                          source_schema='users', referent_schema='categories')
コード例 #12
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###

    # We need to create the enum first otherwise SQLAlchemy complains about it
    # not existing and the migration fails
    icon_types = sa.Enum('finance', 'human-resources', 'logistics', 'call-centre', 'web-store', 'operations', 'it', name='icons')
    check = False if context.is_offline_mode() else True
    icon_types.create(op.get_bind(), checkfirst=check)

    op.add_column('applications', sa.Column('icon', icon_types))
    op.add_column('departments', sa.Column('icon', icon_types))
コード例 #13
0
def downgrade():
    if context.is_offline_mode():
        raise RuntimeError("This migration script cannot be run in offline mode.")

    # drop the report table on all the existing bauble schemas
    stmt = "select schema_name from information_schema.schemata where schema_name like 'bbl_%%';"
    for result in db.engine.execute(stmt):
        schema = result[0]
        if table_exists('report', schema):
            continue
        print("dropping table: {}.report".format(schema))
        op.drop_table("report", schema=schema)
コード例 #14
0
def downgrade():
    # Convert to UUID for PostreSQL or to CHAR(32) for others
    if op.get_context().dialect.name == 'sqlite':
        pass # No difference between varchar and char in SQLite
    elif op.get_context().dialect.name == 'postgresql':
        drop_user_id_fkeys()
        for table, col in ( ("user", "id"),
                            ("sender", "user_id"),
                            ("vote", "user_id") ):
            op.alter_column(table, col, type_=sa.Unicode(255),
                            existing_type=types.UUID,
                            existing_nullable=False)
        # Need cascade for data conversion below, it will be removed by the
        # last operation (or the loop on FKEYS_CASCADE if offline).
        create_user_id_fkeys("CASCADE")
    else:
        # Untested on other engines
        for table, col in ( ("user", "id"),
                            ("sender", "user_id"),
                            ("vote", "user_id") ):
            op.alter_column(table, col, type_=sa.Unicode(255),
                            existing_type=types.UUID,
                            existing_nullable=False)
    if not context.is_offline_mode():
        connection = op.get_bind()
        # Create a new MetaData instance here because the data is UUIDs and we
        # want to convert to simple strings
        metadata = sa.MetaData()
        metadata.bind = connection
        User = Base.metadata.tables["user"].tometadata(metadata)
        User = sa.Table("user", metadata,
            sa.Column("id", sa.Unicode(255), primary_key=True),
            extend_existing=True)
        transaction = connection.begin()
        for user in User.select().execute():
            try:
                new_user_id = UUID(user.id).int
            except ValueError:
                continue # Already converted
            User.update().where(
                User.c.id == user.id
            ).values(id=new_user_id).execute()
        transaction.commit()
        if connection.dialect.name != "sqlite":
            drop_user_id_fkeys()
            create_user_id_fkeys(None)
    # Now remove onupdate=CASCADE from some foreign keys
    rebuild_fkeys(None)
コード例 #15
0
def upgrade():
    # Convert existing data into UUID strings
    if not context.is_offline_mode():
        connection = op.get_bind()
        # Create a new MetaData instance here because the data is not proper
        # UUIDs yet so it'll error out.
        metadata = sa.MetaData()
        metadata.bind = connection
        User = Base.metadata.tables["user"].tometadata(metadata)
        User = sa.Table("user", metadata,
            sa.Column("id", sa.Unicode(255), primary_key=True),
            extend_existing=True)
        if connection.dialect.name != "sqlite":
            drop_user_id_fkeys()
            create_user_id_fkeys("CASCADE")
        transaction = connection.begin()
        for user in User.select().execute():
            try:
                new_user_id = unicode(UUID(int=int(user.id)))
            except ValueError:
                continue # Already converted
            User.update().where(
                User.c.id == user.id
            ).values(id=new_user_id).execute()
        transaction.commit()
    # Convert to UUID for PostreSQL or to CHAR(32) for others
    if op.get_context().dialect.name == 'sqlite':
        pass # No difference between varchar and char in SQLite
    elif op.get_context().dialect.name == 'postgresql':
        drop_user_id_fkeys()
        for table, col in ( ("user", "id"),
                            ("sender", "user_id"),
                            ("vote", "user_id") ):
            op.execute('''
                ALTER TABLE "{table}"
                    ALTER COLUMN {col} TYPE UUID USING {col}::uuid
                '''.format(table=table, col=col))
        create_user_id_fkeys("CASCADE")
    else:
        # Untested on other engines
        for table, col in ( ("user", "id"),
                            ("sender", "user_id"),
                            ("vote", "user_id") ):
            op.alter_column(table, col, type_=types.UUID,
                            existing_type=sa.Unicode(255),
                            existing_nullable=False)
    # Now add onupdate=CASCADE to some foreign keys.
    rebuild_fkeys("CASCADE")
コード例 #16
0
def upgrade():

    if context.is_offline_mode():
        raise Exception('Cannot migrate blob files in offline mode')

    base_dir = os.path.normpath(context.config.get_section_option('app:main', 'studies.blob.dir'))
    conn = op.get_bind()

    if os.path.isdir(base_dir):
        shutil.rmtree(base_dir)
    os.mkdir(base_dir)

    for table_name in ('value_blob', 'value_blob_audit'):
        op.add_column(table_name, sa.Column('file_name', sa.Unicode))
        op.add_column(table_name, sa.Column('mime_type', sa.String))

    op.add_column('value_blob', sa.Column('placeholder_path', sa.String))

    for row in conn.execute('SELECT * FROM value_blob WHERE value IS NOT NULL').fetchall():
        relative_path = os.path.join(*str(uuid.uuid4()).split('-'))
        absolute_path = os.path.join(base_dir, relative_path)
        os.makedirs(os.path.dirname(absolute_path))
        with open(absolute_path, 'w+b') as fp:
            shutil.copyfileobj(six.BytesIO(row.value), fp)
        conn.execute(sa.text(
            """
            UPDATE value_blob
            SET placeholder_path = :path,
                mime_type = :mime_type,
                file_name = :file_name
            WHERE id = :id
            """),
            id=row.id,
            file_name=os.path.basename(absolute_path),
            mime_type=check_output(['file', '-b', '-i', absolute_path]).strip(),
            path=relative_path)

    for table_name in ('value_blob', 'value_blob_audit'):
        op.drop_column(table_name, 'value')
        op.add_column(table_name, sa.Column('value', sa.String))

    conn.execute('UPDATE value_blob SET value = placeholder_path')
    op.drop_column('value_blob', 'placeholder_path')

    op.create_check_constraint(
        'ck_name_has_value',
        'value_blob',
        'CASE WHEN value IS NOT NULL THEN file_name IS NOT NULL END')
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')

    has_trgm = has_extension(op.get_bind(), 'pg_trgm')
    if has_trgm:
        print 'pg_trgm extension is available - creating trigram indexes'
    else:
        print 'pg_trgm extension is not available - creating normal indexes'

    _create_index(has_trgm, 'users', 'first_name')
    _create_index(has_trgm, 'users', 'last_name')
    _create_index(has_trgm, 'users', 'phone')
    _create_index(has_trgm, 'users', 'address')
    _create_index(has_trgm, 'affiliations', 'name')
    _create_index(has_trgm, 'emails', 'email')
コード例 #18
0
def upgrade():
    # This migration has to run on a live database and can't be used to
    # generate sql statements.
    if context.is_offline_mode():
        raise RuntimeError("This migration script cannot be run in offline mode.")

    # create the report table on all the existing bauble schemas
    stmt = "select schema_name from information_schema.schemata where schema_name like 'bbl_%%';"
    for result in db.engine.execute(stmt):
        schema = result[0]
        if table_exists('report', schema):
            continue

        columns = [c.copy() for c in sa.inspect(Report).columns]
        print("creating table: {}.report".format(schema))
        op.create_table("report", *columns, schema=schema)
コード例 #19
0
def upgrade():
    if not context.is_offline_mode():
        # sanity check to avoid running w/o abstracts migrated
        conn = op.get_bind()
        has_new_abstracts = conn.execute("SELECT EXISTS (SELECT 1 FROM event_abstracts.abstracts)").fetchone()[0]
        has_old_abstracts = (conn.execute("SELECT EXISTS (SELECT 1 FROM event_abstracts.legacy_abstracts)")
                             .fetchone())[0]
        if has_new_abstracts != has_old_abstracts:
            raise Exception('Upgrade to {} and run the event_abstracts zodb import first!'.format(down_revision))
    op.create_foreign_key(None,
                          'abstract_field_values', 'abstracts',
                          ['abstract_id'], ['id'],
                          source_schema='event_abstracts', referent_schema='event_abstracts')
    op.create_foreign_key(None,
                          'contributions', 'abstracts',
                          ['abstract_id'], ['id'],
                          source_schema='events', referent_schema='event_abstracts')
コード例 #20
0
ファイル: heal_script.py プロジェクト: AsherBond/quantum
def heal():
    # This is needed else the heal script will start spewing
    # a lot of pointless warning messages from alembic.
    LOG.setLevel(logging.INFO)
    if context.is_offline_mode():
        return
    models_metadata = frozen_models.get_metadata()
    # Compare metadata from models and metadata from migrations
    # Diff example:
    # [ ( 'add_table',
    #      Table('bat', MetaData(bind=None),
    #            Column('info', String(), table=<bat>), schema=None)),
    # ( 'remove_table',
    #   Table(u'bar', MetaData(bind=None),
    #         Column(u'data', VARCHAR(), table=<bar>), schema=None)),
    # ( 'add_column',
    #    None,
    #   'foo',
    #   Column('data', Integer(), table=<foo>)),
    # ( 'remove_column',
    #   None,
    #  'foo',
    #  Column(u'old_data', VARCHAR(), table=None)),
    # [ ( 'modify_nullable',
    #     None,
    #     'foo',
    #     u'x',
    #     { 'existing_server_default': None,
    #     'existing_type': INTEGER()},
    #     True,
    #     False)]]
    opts = {
        'compare_type': _compare_type,
        'compare_server_default': _compare_server_default,
    }
    mc = alembic.migration.MigrationContext.configure(op.get_bind(), opts=opts)

    diff1 = autogen.compare_metadata(mc, models_metadata)
    # Alembic does not contain checks for foreign keys. Because of that it
    # checks separately.
    diff2 = check_foreign_keys(models_metadata)
    diff = diff1 + diff2
    # For each difference run command
    for el in diff:
        execute_alembic_command(el)
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    conn = op.get_bind()
    op.execute("ALTER TABLE roombooking.blocking_principals ADD COLUMN id serial NOT NULL")
    op.add_column('blocking_principals', sa.Column('principal', postgresql.JSON(), nullable=True),
                  schema='roombooking')
    res = conn.execute("SELECT id, entity_type, entity_id FROM roombooking.blocking_principals")
    for id_, entity_type, entity_id in res:
        type_ = 'User' if entity_type in {'Avatar', 'User'} else 'Group'
        principal = (type_, entity_id)
        conn.execute("UPDATE roombooking.blocking_principals SET principal=%s WHERE id=%s",
                     (json.dumps(principal), id_))
    op.alter_column('blocking_principals', 'principal', nullable=False, schema='roombooking')
    op.drop_constraint('pk_blocking_principals', 'blocking_principals', schema='roombooking')
    op.create_primary_key(None, 'blocking_principals', ['id'], schema='roombooking')
    op.drop_column('blocking_principals', 'entity_id', schema='roombooking')
    op.drop_column('blocking_principals', 'entity_type', schema='roombooking')
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    op.create_table(
        'room_principals',
        sa.Column('read_access', sa.Boolean(), nullable=False),
        sa.Column('full_access', sa.Boolean(), nullable=False),
        sa.Column('permissions', postgresql.ARRAY(sa.String()), nullable=False),
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('room_id', sa.Integer(), nullable=False, index=True),
        sa.Column('local_group_id', sa.Integer(), nullable=True, index=True),
        sa.Column('mp_group_provider', sa.String(), nullable=True),
        sa.Column('mp_group_name', sa.String(), nullable=True),
        sa.Column('user_id', sa.Integer(), nullable=True, index=True),
        sa.Column('type', PyIntEnum(PrincipalType, exclude_values={PrincipalType.email, PrincipalType.network,
                                                                   PrincipalType.event_role}), nullable=False),
        sa.CheckConstraint('NOT read_access', name='no_read_access'),
        sa.CheckConstraint('read_access OR full_access OR array_length(permissions, 1) IS NOT NULL', name='has_privs'),
        sa.CheckConstraint('type != 1 OR (local_group_id IS NULL AND mp_group_name IS NULL AND '
                           'mp_group_provider IS NULL AND user_id IS NOT NULL)', name='valid_user'),
        sa.CheckConstraint('type != 2 OR (mp_group_name IS NULL AND mp_group_provider IS NULL AND user_id IS NULL AND '
                           'local_group_id IS NOT NULL)', name='valid_local_group'),
        sa.CheckConstraint('type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND mp_group_name IS NOT NULL AND '
                           'mp_group_provider IS NOT NULL)', name='valid_multipass_group'),
        sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']),
        sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
        sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
        sa.PrimaryKeyConstraint('id'),
        schema='roombooking'
    )
    op.create_index(None, 'room_principals', ['mp_group_provider', 'mp_group_name'], schema='roombooking')
    op.create_index('ix_uq_room_principals_user', 'room_principals', ['user_id', 'room_id'], unique=True,
                    schema='roombooking', postgresql_where=sa.text('type = 1'))
    op.create_index('ix_uq_room_principals_local_group', 'room_principals', ['local_group_id', 'room_id'], unique=True,
                    schema='roombooking', postgresql_where=sa.text('type = 2'))
    op.create_index('ix_uq_room_principals_mp_group', 'room_principals',
                    ['mp_group_provider', 'mp_group_name', 'room_id'], unique=True, schema='roombooking',
                    postgresql_where=sa.text('type = 3'))
    op.add_column('rooms', sa.Column('protection_mode',
                                     PyIntEnum(ProtectionMode, exclude_values={ProtectionMode.inheriting}),
                                     nullable=False, server_default=unicode(ProtectionMode.protected.value)),
                  schema='roombooking')
    _upgrade_permissions()
    op.alter_column('rooms', 'protection_mode', server_default=None, schema='roombooking')
def upgrade():
    if context.is_offline_mode():
        raise AssertionError('This migration can not be run in offline mode.')
    connection = context.get_context().connection
    query = settings.select(settings.c.key == SETTING_KEY)
    result = connection.execute(query).fetchone()
    
    current_value = result.value
    if current_value == u'true':
        new_value = u'True'
    else:
        new_value = u''
    execute(
        settings.update().\
            where(settings.c.key==inline_literal(SETTING_KEY)).\
            values({
                'value': inline_literal(new_value),
            })
    )
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    conn = op.get_bind()
    op.add_column('blocking_principals', sa.Column('principal', postgresql.JSON(), autoincrement=False, nullable=True),
                  schema='roombooking')
    res = conn.execute("SELECT id, type, user_id, local_group_id, mp_group_provider, mp_group_name FROM "
                       "roombooking.blocking_principals")
    for row in res:
        id_ = row[0]
        principal = _args_to_principal(*row[1:])
        conn.execute("UPDATE roombooking.blocking_principals SET principal = %s WHERE id = %s",
                     (json.dumps(principal), id_))
    op.alter_column('blocking_principals', 'principal', nullable=False, schema='roombooking')
    op.drop_column('blocking_principals', 'user_id', schema='roombooking')
    op.drop_column('blocking_principals', 'type', schema='roombooking')
    op.drop_column('blocking_principals', 'mp_group_provider', schema='roombooking')
    op.drop_column('blocking_principals', 'mp_group_name', schema='roombooking')
    op.drop_column('blocking_principals', 'local_group_id', schema='roombooking')
コード例 #25
0
def rebuild_fkeys(cascade):
    # Add or remove onupdate=CASCADE on some foreign keys.
    # We need to be online or we can't reflect the constraint names.
    if (context.is_offline_mode()
        or op.get_context().dialect.name != 'postgresql'):
        return
    connection = op.get_bind()
    md = sa.MetaData()
    md.reflect(bind=connection)
    for fkey in FKEYS_CASCADE:
        keyname = None
        for existing_fk in md.tables[fkey["from_t"]].foreign_keys:
            if existing_fk.constraint.columns == fkey["from_c"]:
                keyname = existing_fk.name
        assert keyname is not None
        op.drop_constraint(keyname, fkey["from_t"])
        op.create_foreign_key(keyname,
            fkey["from_t"], fkey["to_t"], fkey["from_c"], fkey["to_c"],
            onupdate=cascade, ondelete=cascade)
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    conn = op.get_bind()
    op.add_column('blocking_principals', sa.Column('entity_type', sa.VARCHAR(), autoincrement=False, nullable=True),
                  schema='roombooking')
    op.add_column('blocking_principals', sa.Column('entity_id', sa.VARCHAR(), autoincrement=False, nullable=True),
                  schema='roombooking')
    for id_, principal in conn.execute('SELECT id, principal FROM roombooking.blocking_principals'):
        entity_type = principal[0]
        entity_id = principal[1] if not isinstance(principal[1], list) else principal[1][1]
        conn.execute("UPDATE roombooking.blocking_principals SET entity_type=%s, entity_id=%s WHERE id=%s",
                     (entity_type, entity_id, id_))
    op.alter_column('blocking_principals', 'entity_id', nullable=False, schema='roombooking')
    op.alter_column('blocking_principals', 'entity_type', nullable=False, schema='roombooking')
    op.drop_constraint('pk_blocking_principals', 'blocking_principals', schema='roombooking')
    op.create_primary_key(None, 'blocking_principals', ['blocking_id', 'entity_type', 'entity_id'],
                          schema='roombooking')
    op.drop_column('blocking_principals', 'principal', schema='roombooking')
    op.drop_column('blocking_principals', 'id', schema='roombooking')
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    conn = op.get_bind()
    default_location_id = conn.execute('SELECT id FROM roombooking.locations WHERE is_default').scalar()
    if default_location_id is None:
        # We have some aspects that cannot be associated with a location since there is none
        conn.execute('DELETE FROM roombooking.aspects')
    default_location = unicode(default_location_id) if default_location_id is not None else None
    default_aspect_id = conn.execute('SELECT id FROM roombooking.aspects WHERE is_default').scalar()
    op.add_column('locations', sa.Column('default_aspect_id', sa.Integer, nullable=True), schema='roombooking')
    op.create_foreign_key(op.f('fk_locations_default_aspect_id'), 'locations', 'aspects',
                          ['default_aspect_id'], ['id'],
                          source_schema='roombooking', referent_schema='roombooking',
                          onupdate='CASCADE', ondelete='SET NULL')
    op.alter_column('aspects', 'top_left_latitude', type_=sa.String, schema='roombooking')
    op.alter_column('aspects', 'top_left_longitude', type_=sa.String, schema='roombooking')
    op.alter_column('aspects', 'bottom_right_latitude', type_=sa.String, schema='roombooking')
    op.alter_column('aspects', 'bottom_right_longitude', type_=sa.String, schema='roombooking')
    op.add_column('aspects', sa.Column('center_longitude', sa.String, nullable=False, server_default='0'),
                  schema='roombooking')
    op.alter_column('aspects', 'center_longitude', server_default=None, schema='roombooking')
    op.add_column('aspects', sa.Column('center_latitude', sa.String, nullable=False, server_default='0'),
                  schema='roombooking')
    op.alter_column('aspects', 'center_latitude', server_default=None, schema='roombooking')
    op.add_column('aspects', sa.Column('zoom_level', sa.SmallInteger, nullable=False, server_default='0'),
                  schema='roombooking')
    op.alter_column('aspects', 'zoom_level', server_default=None, schema='roombooking')
    op.add_column('aspects', sa.Column('location_id', sa.Integer, nullable=False, server_default=default_location),
                  schema='roombooking')
    op.alter_column('aspects', 'location_id', server_default=None, schema='roombooking')
    op.create_foreign_key(None, 'aspects', 'locations', ['location_id'], ['id'],
                          source_schema='roombooking', referent_schema='roombooking')
    op.drop_index('ix_uq_aspects_is_default', table_name='aspects', schema='roombooking')
    op.drop_column('aspects', 'is_default', schema='roombooking')
    if default_aspect_id is not None:
        conn.execute('UPDATE roombooking.locations SET default_aspect_id = %s WHERE id = %s',
                     (default_aspect_id, default_location_id))
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    op.create_table(
        'settings_principals',
        sa.Column('type', PyIntEnum(PrincipalType), nullable=True),
        sa.Column('mp_group_provider', sa.String(), nullable=True),
        sa.Column('mp_group_name', sa.String(), nullable=True),
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('module', sa.String(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('event_id', sa.String(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('local_group_id', sa.Integer(), nullable=True),
        sa.CheckConstraint('module = lower(module)', name='lowercase_module'),
        sa.CheckConstraint('name = lower(name)', name='lowercase_name'),
        sa.CheckConstraint(
            'type != 1 OR (mp_group_provider IS NULL AND local_group_id IS NULL '
            'AND mp_group_name IS NULL AND user_id IS NOT NULL)',
            name='valid_user'),
        sa.CheckConstraint(
            'type != 2 OR (mp_group_provider IS NULL AND mp_group_name IS NULL '
            'AND user_id IS NULL AND local_group_id IS NOT NULL)',
            name='valid_local_group'),
        sa.CheckConstraint(
            'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND mp_group_provider IS NOT NULL AND '
            'mp_group_name IS NOT NULL)',
            name='valid_multipass_group'),
        sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']),
        sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
        sa.PrimaryKeyConstraint('id'),
        schema='events')
    op.create_index(None, 'settings_principals', ['event_id'], schema='events')
    op.create_index(None,
                    'settings_principals', ['event_id', 'module'],
                    schema='events')
    op.create_index(None,
                    'settings_principals', ['event_id', 'module', 'name'],
                    schema='events')
    op.create_index(None,
                    'settings_principals', ['local_group_id'],
                    schema='events')
    op.create_index(None, 'settings_principals', ['module'], schema='events')
    op.create_index(None,
                    'settings_principals',
                    ['mp_group_provider', 'mp_group_name'],
                    schema='events')
    op.create_index(None, 'settings_principals', ['name'], schema='events')
    op.create_index(None, 'settings_principals', ['user_id'], schema='events')

    op.create_table(
        'settings_principals',
        sa.Column('type', PyIntEnum(PrincipalType), nullable=True),
        sa.Column('mp_group_provider', sa.String(), nullable=True),
        sa.Column('mp_group_name', sa.String(), nullable=True),
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('module', sa.String(), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('local_group_id', sa.Integer(), nullable=True),
        sa.CheckConstraint('module = lower(module)', name='lowercase_module'),
        sa.CheckConstraint('name = lower(name)', name='lowercase_name'),
        sa.CheckConstraint(
            'type != 1 OR (mp_group_provider IS NULL AND local_group_id IS NULL AND '
            'mp_group_name IS NULL AND user_id IS NOT NULL)',
            name='valid_user'),
        sa.CheckConstraint(
            'type != 2 OR (mp_group_provider IS NULL AND mp_group_name IS NULL AND '
            'user_id IS NULL AND local_group_id IS NOT NULL)',
            name='valid_local_group'),
        sa.CheckConstraint(
            'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND mp_group_provider IS NOT NULL AND '
            'mp_group_name IS NOT NULL)',
            name='valid_multipass_group'),
        sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']),
        sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
        sa.PrimaryKeyConstraint('id'),
        schema='indico')
    op.create_index(None, 'settings_principals', ['module'], schema='indico')
    op.create_index(None, 'settings_principals', ['name'], schema='indico')
    op.create_index(None,
                    'settings_principals', ['module', 'name'],
                    schema='indico')
    op.create_index(None, 'settings_principals', ['user_id'], schema='indico')
    op.create_index(None,
                    'settings_principals', ['local_group_id'],
                    schema='indico')
    op.create_index(None,
                    'settings_principals',
                    ['mp_group_provider', 'mp_group_name'],
                    schema='indico')
    json_to_acl(acl_settings)
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    acl_to_json(acl_settings)
コード例 #30
0
def run_migrations() -> None:
    """Runs generic migrations."""
    if context.is_offline_mode():
        __run_migrations_offline()
    else:
        __run_migrations_online()
コード例 #31
0
def run():
    if context.is_offline_mode():
        run_migrations_offline()
    else:
        run_migrations_online()
コード例 #32
0
def migrate_watch_assoc():
    """
    Migrates watch targets to watch assocs
    :return:
    """
    # Data migration - online mode only
    if context.is_offline_mode():
        logger.warning('Data migration skipped in the offline mode')
        return

    def strip(x):
        if x is None:
            return None
        return x.strip()

    def target_key(t):
        scheme, host, port = t.scan_scheme, t.scan_host, t.scan_port
        if scheme is None:
            scheme = 'https'
        if port is not None:
            port = int(port)
        if port is None:
            port = 443
        if scheme == 'http':
            scheme = 'https'
        if scheme == 'htttp':
            scheme = 'https'
        if port == 80 or port <= 10 or port >= 65535:
            port = 443
        host = strip(host)
        if host is not None:
            if host.startswith('*.'):
                host = host[2:]
            if host.startswith('%.'):
                host = host[2:]
        return scheme, host, port

    target_db = {}
    already_assoc = set()
    duplicates = []

    bind = op.get_bind()
    sess = BaseSession(bind=bind)
    it = sess.query(DbWatchTarget).yield_per(1000)
    for rec in it:
        ck = target_key(rec)
        rec_assoc = rec

        if ck in target_db:
            rec_assoc = target_db[ck]
            duplicates.append(rec.id)
        else:
            target_db[ck] = rec
            rec.scan_scheme = ck[0]
            rec.scan_host = ck[1]
            rec.scan_port = ck[2]

        if rec.user_id is None:
            continue

        cur_assoc_key = rec_assoc.id, rec.user_id
        if cur_assoc_key in already_assoc:
            print('already assoc: %s' % (cur_assoc_key, ))
            continue
        already_assoc.add(cur_assoc_key)

        assoc = DbWatchAssoc()
        assoc.scan_type = 1
        assoc.created_at = rec_assoc.created_at
        assoc.updated_at = rec_assoc.updated_at
        assoc.scan_periodicity = rec_assoc.scan_periodicity
        assoc.user_id = rec.user_id  # actual record!
        assoc.watch_id = rec_assoc.id
        sess.add(assoc)
    sess.commit()

    # remove duplicates
    if len(duplicates) > 0:
        sess.query(DbWatchTarget).filter(DbWatchTarget.id.in_(list(duplicates))) \
            .delete(synchronize_session='fetch')
        sess.commit()
        print('Removed %s duplicates %s' % (len(duplicates), duplicates))
コード例 #33
0
def upgrade():
    if context.is_offline_mode():
        raise Exception('This upgrade is only possible in online mode')
    op.create_table(
        'room_principals',
        sa.Column('read_access', sa.Boolean(), nullable=False),
        sa.Column('full_access', sa.Boolean(), nullable=False),
        sa.Column('permissions', postgresql.ARRAY(sa.String()),
                  nullable=False),
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('room_id', sa.Integer(), nullable=False, index=True),
        sa.Column('local_group_id', sa.Integer(), nullable=True, index=True),
        sa.Column('mp_group_provider', sa.String(), nullable=True),
        sa.Column('mp_group_name', sa.String(), nullable=True),
        sa.Column('user_id', sa.Integer(), nullable=True, index=True),
        sa.Column('type',
                  PyIntEnum(PrincipalType,
                            exclude_values={
                                PrincipalType.email, PrincipalType.network,
                                PrincipalType.event_role
                            }),
                  nullable=False),
        sa.CheckConstraint('NOT read_access', name='no_read_access'),
        sa.CheckConstraint(
            'read_access OR full_access OR array_length(permissions, 1) IS NOT NULL',
            name='has_privs'),
        sa.CheckConstraint(
            'type != 1 OR (local_group_id IS NULL AND mp_group_name IS NULL AND '
            'mp_group_provider IS NULL AND user_id IS NOT NULL)',
            name='valid_user'),
        sa.CheckConstraint(
            'type != 2 OR (mp_group_name IS NULL AND mp_group_provider IS NULL AND user_id IS NULL AND '
            'local_group_id IS NOT NULL)',
            name='valid_local_group'),
        sa.CheckConstraint(
            'type != 3 OR (local_group_id IS NULL AND user_id IS NULL AND mp_group_name IS NOT NULL AND '
            'mp_group_provider IS NOT NULL)',
            name='valid_multipass_group'),
        sa.ForeignKeyConstraint(['local_group_id'], ['users.groups.id']),
        sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
        sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
        sa.PrimaryKeyConstraint('id'),
        schema='roombooking')
    op.create_index(None,
                    'room_principals', ['mp_group_provider', 'mp_group_name'],
                    schema='roombooking')
    op.create_index('ix_uq_room_principals_user',
                    'room_principals', ['user_id', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 1'))
    op.create_index('ix_uq_room_principals_local_group',
                    'room_principals', ['local_group_id', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 2'))
    op.create_index('ix_uq_room_principals_mp_group',
                    'room_principals',
                    ['mp_group_provider', 'mp_group_name', 'room_id'],
                    unique=True,
                    schema='roombooking',
                    postgresql_where=sa.text('type = 3'))
    op.add_column('rooms',
                  sa.Column(
                      'protection_mode',
                      PyIntEnum(ProtectionMode,
                                exclude_values={ProtectionMode.inheriting}),
                      nullable=False,
                      server_default=unicode(ProtectionMode.protected.value)),
                  schema='roombooking')
    _upgrade_permissions()
    op.alter_column('rooms',
                    'protection_mode',
                    server_default=None,
                    schema='roombooking')
コード例 #34
0
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    _downgrade_permissions()
    op.drop_column('rooms', 'protection_mode', schema='roombooking')
    op.drop_table('room_principals', schema='roombooking')
def downgrade():
    if context.is_offline_mode():
        raise Exception('This downgrade is only possible in online mode')
    acl_to_json(acl_settings)
    op.drop_table('settings_principals', schema='indico')
    op.drop_table('settings_principals', schema='events')
コード例 #36
0
 def decorator(*args, **kwargs):
     if context.is_offline_mode():
         raise RuntimeError(
             _("%s cannot be called while in offline mode") % func.__name__)
     return func(*args, **kwargs)
コード例 #37
0
 def decorator(*args, **kwargs):
     if context.is_offline_mode():
         return
     return func(*args, **kwargs)
コード例 #38
0
    context.configure(
        connection=connection,
        target_metadata=metadata,
        )

    try:
        context.run_migrations()
    except:
        traceback.print_exc()
        transaction.abort()
    else:
        transaction.commit()
    finally:
        #connection.close()
        pass


try:  # Alembic's "if __name__ == '__main__'"
    offline_mode = context.is_offline_mode()
except (AttributeError, NameError):
    pass
else:
    if offline_mode:  # pragma: no cover
        raise ValueError(
            "\nNo support for Alembic's offline mode at this point."
            "\nYou may want to write your own env.py script to use "
            "\n'offline mode'."
            )
    run_migrations_online()
コード例 #39
0
from alembic import context
from hambar.models import db

context.configure(connection=db.session.connection(),
                  target_metadata=db.metadata)

context.run_migrations()

if not context.is_offline_mode():
    db.session.commit()
コード例 #40
0
ファイル: env.py プロジェクト: kailIII/c2cgeoportal
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    engine = engine_from_config(
        config.get_section(config.config_ini_section),
        prefix='sqlalchemy.',
        poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(
        connection=connection,
        target_metadata=target_metadata,
        version_table_schema=config.get_main_option('version_table_schema'),
    )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()

if context.is_offline_mode():  # pragma: nocover
    run_migrations_offline()
else:
    run_migrations_online()
コード例 #41
0
    with context.begin_transaction():
        context.run_migrations()


def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    connectable = engine_from_config(
        config.get_section(config.config_ini_section),
        prefix="sqlalchemy.",
        poolclass=pool.NullPool,
    )

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()


if context.is_offline_mode():
    run_migrations_offline()
else:
    run_migrations_online()
コード例 #42
0
ファイル: env.py プロジェクト: chuckbutler/review-queue
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    engine = engine_from_config(
                config.get_section(config.config_ini_section),
                prefix='sqlalchemy.',
                poolclass=pool.NullPool)

    connection = engine.connect()
    context.configure(
                connection=connection,
                target_metadata=target_metadata
                )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()

if context.is_offline_mode():
    run_migrations_offline()
else:
    run_migrations_online()

コード例 #43
0
def run(target_metadata):
    if context.is_offline_mode():
        run_migrations_offline(target_metadata)
    else:
        run_migrations_online(target_metadata)