예제 #1
0
 def add_constraint(self, constraint):
     if self._dbengine.dialect.name != 'sqlite':
         # It is impossible to add constraints to a pre-existing table for
         #  SQLite
         # source: http://www.sqlite.org/omitted.html
         create_constraint = AddConstraint(constraint, bind=self._dbengine)
         create_constraint.execute()
예제 #2
0
 def add_constraint(self, constraint):
     if self._dbengine.dialect.name != 'sqlite':
         # It is impossible to add constraints to a pre-existing table for
         #  SQLite
         # source: http://www.sqlite.org/omitted.html
         create_constraint = AddConstraint(constraint, bind=self._dbengine)
         create_constraint.execute()
예제 #3
0
    def test_conditional_constraint_deprecated(self):
        metadata, users, engine = self.metadata, self.users, self.engine
        nonpg_mock = engines.mock_engine(dialect_name='sqlite')
        pg_mock = engines.mock_engine(dialect_name='postgresql')
        constraint = CheckConstraint('a < b', name='my_test_constraint'
                , table=users)

        # by placing the constraint in an Add/Drop construct, the
        # 'inline_ddl' flag is set to False

        AddConstraint(constraint, on='postgresql'
                      ).execute_at('after-create', users)
        DropConstraint(constraint, on='postgresql'
                       ).execute_at('before-drop', users)
        metadata.create_all(bind=nonpg_mock)
        strings = ' '.join(str(x) for x in nonpg_mock.mock)
        assert 'my_test_constraint' not in strings
        metadata.drop_all(bind=nonpg_mock)
        strings = ' '.join(str(x) for x in nonpg_mock.mock)
        assert 'my_test_constraint' not in strings
        metadata.create_all(bind=pg_mock)
        strings = ' '.join(str(x) for x in pg_mock.mock)
        assert 'my_test_constraint' in strings
        metadata.drop_all(bind=pg_mock)
        strings = ' '.join(str(x) for x in pg_mock.mock)
        assert 'my_test_constraint' in strings
예제 #4
0
def forwards(apps, schema_editor):
    """
    Ensure all data tables have the new geo_version column, with a default of ''
    """
    session = get_session()
    inspector = inspect(session.bind)

    try:
        for data_table in DATA_TABLES.itervalues():
            db_model = data_table.model
            table = db_model.__table__

            cols = [c['name'] for c in inspector.get_columns(table.name)]
            if 'geo_version' in cols:
                continue

            # remove the old primary key constraint, if any
            pk = inspector.get_pk_constraint(table.name)['name']
            if pk:
                session.execute("ALTER TABLE %s DROP CONSTRAINT %s" %
                                (table.name, pk))

            # add the new column
            session.execute(
                "ALTER TABLE %s ADD COLUMN geo_version VARCHAR(100) DEFAULT ''"
                % table.name)

            # add the correct new constraint
            session.execute(AddConstraint(table.primary_key))

        session.commit()
    finally:
        session.close()
예제 #5
0
 def _add_constraint_in_db(
     self,
     constraint: ConstraintOrIndex,
     errors: str = 'raise',
 ) -> None:
     assert errors in _VALID_ERRORS_OPTIONS
     if self._constraint_already_active(constraint):
         return
     if constraint.table.name not in self._reflected_table_lookup:
         logger.warning(f'Cannot add {constraint.name}, '
                        f'table {constraint.table.name} does not exist')
         return
     with self._db.engine.connect() as conn:
         logger.info(f'Adding {constraint.name}')
         try:
             if isinstance(constraint, Index):
                 conn.execute(CreateIndex(constraint))
             else:
                 # We add a copy instead of the original constraint.
                 # Otherwise, when you later call metadata.create_all
                 # to create tables, SQLAlchemy thinks the
                 # constraints have already been created and skips
                 # them.
                 c = copy(constraint)
                 conn.execute(AddConstraint(c))
         except SQLAlchemyError:
             if errors == 'raise':
                 raise
             elif errors == 'ignore':
                 logger.info(f'Unable to add {constraint.name}')
예제 #6
0
    def test_conditional_constraint(self):
        metadata, users, engine = self.metadata, self.users, self.engine
        nonpg_mock = engines.mock_engine(dialect_name="sqlite")
        pg_mock = engines.mock_engine(dialect_name="postgresql")
        constraint = CheckConstraint("a < b",
                                     name="my_test_constraint",
                                     table=users)

        # by placing the constraint in an Add/Drop construct, the
        # 'inline_ddl' flag is set to False

        event.listen(
            users,
            "after_create",
            AddConstraint(constraint).execute_if(dialect="postgresql"),
        )

        event.listen(
            users,
            "before_drop",
            DropConstraint(constraint).execute_if(dialect="postgresql"),
        )

        metadata.create_all(bind=nonpg_mock)
        strings = " ".join(str(x) for x in nonpg_mock.mock)
        assert "my_test_constraint" not in strings
        metadata.drop_all(bind=nonpg_mock)
        strings = " ".join(str(x) for x in nonpg_mock.mock)
        assert "my_test_constraint" not in strings
        metadata.create_all(bind=pg_mock)
        strings = " ".join(str(x) for x in pg_mock.mock)
        assert "my_test_constraint" in strings
        metadata.drop_all(bind=pg_mock)
        strings = " ".join(str(x) for x in pg_mock.mock)
        assert "my_test_constraint" in strings
예제 #7
0
def reverse(apps, schema_editor):
    """
    Drop the new geo_version column from all data tables
    """
    session = get_session()
    inspector = inspect(session.bind)

    try:
        for data_table in DATA_TABLES.itervalues():
            db_model = data_table.model
            table = db_model.__table__

            # remove the primary key constraint, if any
            pk = inspector.get_pk_constraint(table.name)['name']
            if pk:
                session.execute("ALTER TABLE %s DROP CONSTRAINT %s" %
                                (table.name, pk))

            # drop the new column
            session.execute("ALTER TABLE %s DROP COLUMN geo_version" %
                            table.name)

            # add the old pk constraint
            pk = table.primary_key
            pk.columns.remove(table.c.geo_version)
            session.execute(AddConstraint(pk))

        session.commit()
    finally:
        session.close()
예제 #8
0
def _update_states_table_with_foreign_key_options(engine):
    """Add the options to foreign key constraints."""
    inspector = reflection.Inspector.from_engine(engine)
    alters = []
    for foreign_key in inspector.get_foreign_keys(TABLE_STATES):
        if foreign_key["name"] and not foreign_key["options"]:
            alters.append({
                "old_fk":
                ForeignKeyConstraint((), (), name=foreign_key["name"]),
                "columns":
                foreign_key["constrained_columns"],
            })

    if not alters:
        return

    states_key_constraints = Base.metadata.tables[
        TABLE_STATES].foreign_key_constraints
    old_states_table = Table(  # noqa: F841 pylint: disable=unused-variable
        TABLE_STATES, MetaData(), *[alter["old_fk"] for alter in alters])

    for alter in alters:
        try:
            engine.execute(DropConstraint(alter["old_fk"]))
            for fkc in states_key_constraints:
                if fkc.column_keys == alter["columns"]:
                    engine.execute(AddConstraint(fkc))
        except (InternalError, OperationalError):
            _LOGGER.exception("Could not update foreign options in %s table",
                              TABLE_STATES)
예제 #9
0
def _update_states_table_with_foreign_key_options(connection, engine):
    """Add the options to foreign key constraints."""
    inspector = sqlalchemy.inspect(engine)
    alters = []
    for foreign_key in inspector.get_foreign_keys(TABLE_STATES):
        if foreign_key["name"] and (
                # MySQL/MariaDB will have empty options
                not foreign_key.get("options") or
                # Postgres will have ondelete set to None
                foreign_key.get("options", {}).get("ondelete") is None):
            alters.append({
                "old_fk":
                ForeignKeyConstraint((), (), name=foreign_key["name"]),
                "columns":
                foreign_key["constrained_columns"],
            })

    if not alters:
        return

    states_key_constraints = Base.metadata.tables[
        TABLE_STATES].foreign_key_constraints
    old_states_table = Table(  # noqa: F841 pylint: disable=unused-variable
        TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters))

    for alter in alters:
        try:
            connection.execute(DropConstraint(alter["old_fk"]))
            for fkc in states_key_constraints:
                if fkc.column_keys == alter["columns"]:
                    connection.execute(AddConstraint(fkc))
        except (InternalError, OperationalError):
            _LOGGER.exception("Could not update foreign options in %s table",
                              TABLE_STATES)
    def test_conditional_constraint_deprecated(self):
        metadata, users = self.metadata, self.users
        nonpg_mock = engines.mock_engine(dialect_name="sqlite")
        pg_mock = engines.mock_engine(dialect_name="postgresql")
        constraint = CheckConstraint("a < b",
                                     name="my_test_constraint",
                                     table=users)

        # by placing the constraint in an Add/Drop construct, the
        # 'inline_ddl' flag is set to False

        AddConstraint(constraint,
                      on="postgresql").execute_at("after-create", users)
        DropConstraint(constraint,
                       on="postgresql").execute_at("before-drop", users)
        metadata.create_all(bind=nonpg_mock)
        strings = " ".join(str(x) for x in nonpg_mock.mock)
        assert "my_test_constraint" not in strings
        metadata.drop_all(bind=nonpg_mock)
        strings = " ".join(str(x) for x in nonpg_mock.mock)
        assert "my_test_constraint" not in strings
        metadata.create_all(bind=pg_mock)
        strings = " ".join(str(x) for x in pg_mock.mock)
        assert "my_test_constraint" in strings
        metadata.drop_all(bind=pg_mock)
        strings = " ".join(str(x) for x in pg_mock.mock)
        assert "my_test_constraint" in strings
예제 #11
0
 def create_fks(self):
     """Create foreign key constraints on PostgreSQL table"""
     for fk in self.foreign_keys:
         try:
             self.logger.info("Creating foreign key {}".format(fk.name))
             self.conn.execute(AddConstraint(fk))
         except SQLAlchemyError:
             self.logger.warn("Error creating foreign key {}".format(
                 fk.name))
예제 #12
0
def create_fk(db):
    from sqlalchemy.schema import AddConstraint
    from sqlalchemy import text
    connection = db.get_new_connection('create_fk')
    connection.execute(text("START TRANSACTION"))
    for table in build_metadata().sorted_tables:
        for fk in table.foreign_keys:
            connection.execute(AddConstraint(fk.constraint))
    connection.execute(text("COMMIT"))
예제 #13
0
def constraint_ddl(tables, engine, drop=False):

    output = []

    for table in tables:
        for constraint in table.constraints:

            # Avoid duplicating primary key constraint definitions (they are
            # included in CREATE TABLE statements).
            if not isinstance(constraint, PrimaryKeyConstraint):

                if not drop:
                    ddl = AddConstraint(constraint)
                else:
                    ddl = DropConstraint(constraint)

                output.append(str(ddl.compile(dialect=engine.dialect)).strip())
                output.append(';\n\n')

    return output
def create_table_constraints_ddl(tableObj, engine, debug_level=-1):
    ddl_text = ''
    constraints = tableObj.constraints
    for constraint in constraints:
        ddl_obj = AddConstraint(constraint)
        try:
            ddl_string = str(ddl_obj.compile(dialect=engine.dialect)).strip()
        except Exception as e:
            # if e.message.find('it has no name')<0:
            #     print(e)
            ddl_string = ''

        if ddl_string:
            if not ddl_text:
                ddl_text = ddl_string
            else:
                ddl_text = ddl_text + '\n' + ddl_string
    ddl_string = ddl_text
    if int(debug_level) > 0:
        msg = f"table [[{tableObj.name}]] create_table_constraints_DDL: [{ddl_string}]"
        log_message(msg)
    return ddl_string
예제 #15
0
def constraint_ddl(tables, engine, drop=False):

    output = []

    for table in tables:
        constraints = sorted(list(table.constraints), key=lambda k: k.name,
                             reverse=drop)
        for constraint in constraints:

            # Avoid duplicating primary key constraint definitions (they are
            # included in CREATE TABLE statements).
            if not isinstance(constraint, PrimaryKeyConstraint):

                if not drop:
                    ddl = AddConstraint(constraint)
                else:
                    ddl = DropConstraint(constraint)

                output.append(str(ddl.compile(dialect=engine.dialect)).strip())
                output.append(';\n\n')

    return output
예제 #16
0
def restore_foreign_keys(table_name, engine):
    import models
    orig_meta = models.Base.metadata.tables[table_name]
    constraints = list(orig_meta.constraints)
    fks = [
        item for item in constraints if isinstance(item, ForeignKeyConstraint)
    ]

    connection = engine.connect()
    transaction = connection.begin()
    for fkc in fks:
        connection.execute(AddConstraint(fkc))
    transaction.commit()
    connection.close()
예제 #17
0
def downgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    if migrate_engine.name not in ['mysql', 'postgresql']:
        return

    image_properties = Table('image_properties', meta, autoload=True)
    image_members = Table('image_members', meta, autoload=True)
    images = Table('images', meta, autoload=True)

    if migrate_engine.name == 'postgresql':
        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='ix_image_properties_image_id_name')
        migrate_engine.execute(DropConstraint(constraint))

        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        index = Index('ix_image_properties_image_id_name',
                      image_properties.c.image_id, image_properties.c.name)
        migrate_engine.execute(CreateIndex(index))

        images.c.id.alter(
            server_default=Sequence('images_id_seq').next_value())

    if migrate_engine.name == 'mysql':
        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='image_id')
        migrate_engine.execute(AddConstraint(constraint))

    image_members.c.status.alter(nullable=True, server_default=None)
    images.c.protected.alter(nullable=True, server_default=None)
예제 #18
0
 def create_table_constraints(self,
                              table,
                              _include_foreign_key_constraints=None):
     # EXASOL does not support FK constraints that reference
     # the table being created. Thus, these need to be created
     # via ALTER TABLE after table creation
     # TODO: FKs that reference other tables could be inlined
     # the create rule could be more specific but for now, ALTER
     # TABLE for all FKs work.
     for c in [
             c for c in table._sorted_constraints
             if isinstance(c, ForeignKeyConstraint)
     ]:
         c._create_rule = lambda: False
         event.listen(table, "after_create", AddConstraint(c))
     return super(EXADDLCompiler, self).create_table_constraints(table)
예제 #19
0
    def __copy_constraints(self):
        """
        Migrates constraints, UKs, CCs and FKs.
        """
        o_engine = create_engine(self.o_engine_conn)
        d_engine = create_engine(self.d_engine_conn)
        metadata = MetaData()
        metadata.reflect(o_engine)

        insp = inspect(o_engine)

        tables = filter(
            lambda x: x[0] not in self.exclude, metadata.tables.items())
        for table_name, table in tables:
            constraints_to_keep = []
            # keep unique constraints
            uks = insp.get_unique_constraints(table_name)
            for uk in uks:
                uk_cols = filter(
                    lambda c: c.name in uk["column_names"], table._columns)
                uuk = UniqueConstraint(*uk_cols, name=uk["name"])
                uuk._set_parent(table)
                constraints_to_keep.append(uuk)

            # keep check constraints
            ccs = filter(
                lambda cons: isinstance(
                    cons, CheckConstraint), table.constraints
            )
            for cc in ccs:
                cc.sqltext = TextClause(str(cc.sqltext).replace('"', ""))
                constraints_to_keep.append(cc)

            # keep fks
            for fk in filter(
                lambda cons: isinstance(
                    cons, ForeignKeyConstraint), table.constraints
            ):
                constraints_to_keep.append(fk)

            # create all constraints
            for cons in constraints_to_keep:
                try:
                    d_engine.execute(AddConstraint(cons))
                except Exception as e:
                    logger.warning(e)
예제 #20
0
    def test_alter_table_distribute_by(self):
        dbc = DistributeByConstraint('a', 'b')
        self.tables.t.append_constraint(dbc)

        config.db.execute(DropConstraint(dbc))

        insp = inspect(testing.db)
        for c in insp.get_columns('t'):
            assert c['is_distribution_key'] == False

        config.db.execute(AddConstraint(dbc))

        insp = inspect(testing.db)
        for c in insp.get_columns('t'):
            if not (c['name'] == 'c'):
                assert c['is_distribution_key'] == True
            else:
                assert c['is_distribution_key'] == False
예제 #21
0
 def _visit_constraint(self, constraint):
     constraint.name = self.get_constraint_name(constraint)
     if (isinstance(constraint, UniqueConstraint) and
             is_unique_constraint_with_null_columns_supported(
                 self.dialect)):
         for column in constraint:
             if column.nullable:
                 constraint.exclude_nulls = True
                 break
     if getattr(constraint, 'exclude_nulls', None):
         index = Index(constraint.name,
                       *(column for column in constraint),
                       unique=True)
         sql = self.process(CreateIndex(index))
         sql += ' EXCLUDE NULL KEYS'
     else:
         sql = self.process(AddConstraint(constraint))
     self.append(sql)
     self.execute()
예제 #22
0
def rename_vo(old_vo, new_vo, insert_new_vo=False, description=None, email=None, commit_changes=False, skip_history=False):
    """
    Updates rows so that entries associated with `old_vo` are now associated with `new_vo` as part of multi-VO migration.

    :param old_vo:         The 3 character string for the current VO (for a single-VO instance this will be 'def').
    :param new_vo:         The 3 character string for the new VO.
    :param insert_new_vo:  If True then an entry for `new_vo` is created in the database.
    :param description:    Full description of the new VO, unused if `insert_new_vo` is False.
    :param email:          Admin email for the new VO, unused if `insert_new_vo` is False.
    :param commit_changes: If True then changes are made against the database directly.
                           If False, then nothing is commited and the commands needed are dumped to be run later.
    :param skip_history:   If True then tables without FKC containing historical data will not be converted to save time.
    """
    success = True
    engine = session.get_engine()
    conn = engine.connect()
    trans = conn.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData(bind=conn, reflect=True)
    dialect = engine.dialect.name

    # Gather all the columns that need updating and all relevant foreign key constraints
    all_fks = []
    tables_and_columns = []
    for table_name in inspector.get_table_names():
        if skip_history and ('_history' in table_name or '_hist_recent' in table_name):
            continue
        fks = []
        table = Table(table_name, metadata)
        for column in table.c:
            if 'scope' in column.name or column.name == 'account':
                tables_and_columns.append((table, column))
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            if 'scope' in fk['referred_columns'] or 'account' in fk['referred_columns']:
                fks.append(ForeignKeyConstraint(fk['constrained_columns'], [fk['referred_table'] + '.' + r for r in fk['referred_columns']],
                                                name=fk['name'], table=table, **fk['options']))
        all_fks.extend(fks)

    try:
        bound_params = {'old_vo': old_vo,
                        'new_vo': new_vo,
                        'old_vo_suffix': '' if old_vo == 'def' else old_vo,
                        'new_vo_suffix': '' if new_vo == 'def' else '@%s' % new_vo,
                        'split_character': '@',
                        'int_1': 1,
                        'int_2': 2,
                        'new_description': description,
                        'new_email': email,
                        'datetime': datetime.utcnow()}

        bound_params_text = {}
        for key in bound_params:
            if isinstance(bound_params[key], int):
                bound_params_text[key] = bound_params[key]
            else:
                bound_params_text[key] = "'%s'" % bound_params[key]

        if insert_new_vo:
            table = Table('vos', metadata)
            insert_command = table.insert().values(vo=bindparam('new_vo'),
                                                   description=bindparam('new_description'),
                                                   email=bindparam('new_email'),
                                                   updated_at=bindparam('datetime'),
                                                   created_at=bindparam('datetime'))
            print(str(insert_command) % bound_params_text + ';')
            if commit_changes:
                conn.execute(insert_command, bound_params)

        # Drop all FKCs affecting InternalAccounts/Scopes
        for fk in all_fks:
            print(str(DropConstraint(fk)) + ';')
            if commit_changes:
                conn.execute(DropConstraint(fk))

        # Update columns
        for table, column in tables_and_columns:
            update_command = table.update().where(split_vo(dialect, column, return_vo=True) == bindparam('old_vo_suffix'))

            if new_vo == 'def':
                update_command = update_command.values({column.name: split_vo(dialect, column)})
            else:
                update_command = update_command.values({column.name: split_vo(dialect, column) + cast(bindparam('new_vo_suffix'), CHAR(4))})

            print(str(update_command) % bound_params_text + ';')
            if commit_changes:
                conn.execute(update_command, bound_params)

        table = Table('rses', metadata)
        update_command = table.update().where(table.c.vo == bindparam('old_vo')).values(vo=bindparam('new_vo'))
        print(str(update_command) % bound_params_text + ';')
        if commit_changes:
            conn.execute(update_command, bound_params)

        # Re-add the FKCs we dropped
        for fkc in all_fks:
            print(str(AddConstraint(fkc)) + ';')
            if commit_changes:
                conn.execute(AddConstraint(fkc))
    except:
        success = False
        print(format_exc())
        print('Exception occured, changes not committed to DB.')

    if commit_changes and success:
        trans.commit()
    trans.close()
    return success
예제 #23
0
def remove_vo(vo, commit_changes=False, skip_history=False):
    """
    Deletes rows associated with `vo` as part of multi-VO migration.

    :param vo:             The 3 character string for the VO being removed from the DB.
    :param commit_changes: If True then changes are made against the database directly.
                           If False, then nothing is commited and the commands needed are dumped to be run later.
    :param skip_history:   If True then tables without FKC containing historical data will not be converted to save time.
    """
    success = True
    engine = session.get_engine()
    conn = engine.connect()
    trans = conn.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData(bind=conn, reflect=True)
    dialect = engine.dialect.name

    # Gather all the columns that need deleting and all relevant foreign key constraints
    all_fks = []
    tables_and_columns = []
    tables_and_columns_rse = []
    for table_name in inspector.get_table_names():
        if skip_history and ('_history' in table_name or '_hist_recent' in table_name):
            continue
        fks = []
        table = Table(table_name, metadata)
        for column in table.c:
            if 'scope' in column.name or column.name == 'account':
                tables_and_columns.append((table, column))
            if 'rse_id' in column.name:
                tables_and_columns_rse.append((table, column))
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            if 'scope' in fk['referred_columns'] or 'account' in fk['referred_columns'] or ('rse' in fk['referred_table'] and 'id' in fk['referred_columns']):
                fks.append(ForeignKeyConstraint(fk['constrained_columns'], [fk['referred_table'] + '.' + r for r in fk['referred_columns']],
                                                name=fk['name'], table=table, **fk['options']))
        all_fks.extend(fks)

    try:
        bound_params = {'vo': vo,
                        'vo_suffix': '' if vo == 'def' else vo,
                        'split_character': '@',
                        'int_1': 1,
                        'int_2': 2}

        bound_params_text = {}
        for key in bound_params:
            if isinstance(bound_params[key], int):
                bound_params_text[key] = bound_params[key]
            else:
                bound_params_text[key] = "'%s'" % bound_params[key]

        # Drop all FKCs affecting InternalAccounts/Scopes or RSE IDs
        for fk in all_fks:
            print(str(DropConstraint(fk)) + ';')
            if commit_changes:
                conn.execute(DropConstraint(fk))

        # Delete rows
        for table, column in tables_and_columns:
            delete_command = table.delete().where(split_vo(dialect, column, return_vo=True) == bindparam('vo_suffix'))
            print(str(delete_command) % bound_params_text + ';')
            if commit_changes:
                conn.execute(delete_command, bound_params)

        rse_table = Table('rses', metadata)
        for table, column in tables_and_columns_rse:
            delete_command = table.delete().where(column == rse_table.c.id).where(rse_table.c.vo == bindparam('vo'))
            print(str(delete_command) % bound_params_text + ';')
            if commit_changes:
                conn.execute(delete_command, bound_params)

        delete_command = rse_table.delete().where(rse_table.c.vo == bindparam('vo'))
        print(str(delete_command) % bound_params_text + ';')
        if commit_changes:
            conn.execute(delete_command, bound_params)

        table = Table('vos', metadata)
        delete_command = table.delete().where(table.c.vo == bindparam('vo'))
        print(str(delete_command) % bound_params_text + ';')
        if commit_changes:
            conn.execute(delete_command, bound_params)

        # Re-add the FKCs we dropped
        for fkc in all_fks:
            print(str(AddConstraint(fkc)) + ';')
            if commit_changes:
                conn.execute(AddConstraint(fkc))
    except:
        success = False
        print(format_exc())
        print('Exception occured, changes not committed to DB.')

    if commit_changes and success:
        trans.commit()
    trans.close()
    return success
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    if migrate_engine.name not in ['mysql', 'postgresql']:
        return

    image_properties = Table('image_properties', meta, autoload=True)
    image_members = Table('image_members', meta, autoload=True)
    images = Table('images', meta, autoload=True)

    # We have to ensure that we doesn't have `nulls` values since we are going
    # to set nullable=False
    migrate_engine.execute(
        update(image_members).where(
            image_members.c.status == sql.expression.null()).values(
                status='pending'))

    migrate_engine.execute(
        update(images).where(
            images.c.protected == sql.expression.null()).values(
                protected=sql.expression.false()))

    image_members.c.status.alter(nullable=False, server_default='pending')
    images.c.protected.alter(nullable=False,
                             server_default=sql.expression.false())

    if migrate_engine.name == 'postgresql':
        Index('ix_image_properties_image_id_name', image_properties.c.image_id,
              image_properties.c.name).drop()

        # We have different names of this constraint in different versions of
        # postgresql. Since we have only one constraint on this table, we can
        # get it in the following way.
        name = migrate_engine.execute("""SELECT conname
               FROM pg_constraint
               WHERE conrelid =
                   (SELECT oid
                    FROM pg_class
                    WHERE relname LIKE 'image_properties')
                  AND contype = 'u';""").scalar()

        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='%s' % name)
        migrate_engine.execute(DropConstraint(constraint))

        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='ix_image_properties_image_id_name')
        migrate_engine.execute(AddConstraint(constraint))

        images.c.id.alter(server_default=None)
    if migrate_engine.name == 'mysql':
        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='image_id')
        migrate_engine.execute(DropConstraint(constraint))
        image_locations = Table('image_locations', meta, autoload=True)
        if len(image_locations.foreign_keys) == 0:
            migrate_engine.execute(
                AddConstraint(
                    ForeignKeyConstraint([image_locations.c.image_id],
                                         [images.c.id])))
예제 #25
0
 def create_pk(self):
     """Create primary key constraints on PostgreSQL table"""
     self.logger.info("Creating {} primary key".format(self.sql_table))
     self.conn.execute(AddConstraint(self.primary_key))
예제 #26
0
 def _visit_constraint(self, constraint):
     constraint.name = self.get_constraint_name(constraint)
     self.append(self.process(AddConstraint(constraint)))
     self.execute()
예제 #27
0
 def add_foreignkey(self, fk):
     self.connection.execute(AddConstraint(fk))
예제 #28
0
def downgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    metadef_namespaces = Table('metadef_namespaces', meta, autoload=True)
    metadef_properties = Table('metadef_properties', meta, autoload=True)
    metadef_objects = Table('metadef_objects', meta, autoload=True)
    metadef_ns_res_types = Table('metadef_namespace_resource_types',
                                 meta,
                                 autoload=True)
    metadef_resource_types = Table('metadef_resource_types',
                                   meta,
                                   autoload=True)
    metadef_tags = Table('metadef_tags', meta, autoload=True)

    Index('ix_namespaces_namespace', metadef_namespaces.c.namespace).create()

    Index('ix_objects_namespace_id_name', metadef_objects.c.namespace_id,
          metadef_objects.c.name).create()

    Index('ix_metadef_properties_namespace_id_name',
          metadef_properties.c.namespace_id,
          metadef_properties.c.name).create()

    Index('ix_metadef_tags_name', metadef_tags.c.name).drop()

    Index('ix_metadef_tags_namespace_id', metadef_tags.c.namespace_id,
          metadef_tags.c.name).drop()

    if migrate_engine.name != 'sqlite':
        fkc = migrate.ForeignKeyConstraint([metadef_tags.c.namespace_id],
                                           [metadef_namespaces.c.id])
        fkc.drop()

        Index('ix_tags_namespace_id_name', metadef_tags.c.namespace_id,
              metadef_tags.c.name).create()
    else:
        # NOTE(ochuprykov): fkc can't be dropped via `migrate` in sqlite,so it
        # is necessary to recreate table manually and populate it with data
        temp = Table('temp_', meta,
                     *([c.copy() for c in metadef_tags.columns]))
        temp.create()
        migrate_engine.execute('insert into temp_ select * from metadef_tags')
        metadef_tags.drop()
        migrate_engine.execute('alter table temp_ rename to metadef_tags')

        # Refresh old metadata for this table
        meta = sqlalchemy.MetaData()
        meta.bind = migrate_engine
        metadef_tags = Table('metadef_tags', meta, autoload=True)

        Index('ix_tags_namespace_id_name', metadef_tags.c.namespace_id,
              metadef_tags.c.name).create()

        uc = migrate.UniqueConstraint(metadef_tags.c.namespace_id,
                                      metadef_tags.c.name)
        uc.create()

    if migrate_engine.name == 'mysql':
        constraint = UniqueConstraint(metadef_properties.c.namespace_id,
                                      metadef_properties.c.name,
                                      name='namespace_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_objects.c.namespace_id,
                                      metadef_objects.c.name,
                                      name='namespace_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_ns_res_types.c.resource_type_id,
                                      metadef_ns_res_types.c.namespace_id,
                                      name='resource_type_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_namespaces.c.namespace,
                                      name='namespace')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_resource_types.c.name,
                                      name='name')
        migrate_engine.execute(AddConstraint(constraint))

    if migrate_engine.name == 'postgresql':
        constraint = UniqueConstraint(metadef_objects.c.namespace_id,
                                      metadef_objects.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_properties.c.namespace_id,
                                      metadef_properties.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_namespaces.c.namespace)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_resource_types.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(
            metadef_tags.c.namespace_id,
            metadef_tags.c.name,
            name='metadef_tags_namespace_id_name_key')
        migrate_engine.execute(AddConstraint(constraint))

    if migrate_engine.name == 'mysql':
        fkc = migrate.ForeignKeyConstraint(
            [metadef_ns_res_types.c.resource_type_id],
            [metadef_namespaces.c.id],
            name='metadef_namespace_resource_types_ibfk_2')
        fkc.drop()

        Index('ix_metadef_ns_res_types_namespace_id',
              metadef_ns_res_types.c.namespace_id).drop()

        fkc.create()
    else:
        Index('ix_metadef_ns_res_types_namespace_id',
              metadef_ns_res_types.c.namespace_id).drop()

    Index('ix_metadef_namespaces_namespace',
          metadef_namespaces.c.namespace).drop()

    Index('ix_metadef_namespaces_owner', metadef_namespaces.c.owner).drop()

    Index('ix_metadef_objects_name', metadef_objects.c.name).drop()

    Index('ix_metadef_objects_namespace_id',
          metadef_objects.c.namespace_id).drop()

    Index('ix_metadef_properties_name', metadef_properties.c.name).drop()

    Index('ix_metadef_properties_namespace_id',
          metadef_properties.c.namespace_id).drop()
예제 #29
0
def setup(context, drop):
    """ Initialize a database for openFRED data.

    Connect to the database specified in the `[openFRED]` section of oemof's
    configuration file and set the database up to hold openFRED data.
    This means that the configured schema is created if it doesn't already
    exists. The same holds for the tables necessary to store openFRED data
    inside the schema.
    """
    section = context.obj["db"]["section"]
    schema = oemof.db.config.get(section, "schema")
    engine = oemof.db.engine(section)
    inspector = inspect(engine)
    metadata = MetaData(schema=schema, bind=engine, reflect=(not drop))
    classes = mapped_classes(metadata)

    if drop == "schema":
        with engine.connect() as connection:
            connection.execute(
                "DROP SCHEMA IF EXISTS {} CASCADE".format(schema))
    elif drop == "tables":
        classes["__Base__"].metadata.drop_all(engine)
    if schema not in inspector.get_schema_names():
        engine.execute(CreateSchema(schema))

    with engine.connect() as connection:
        connection.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
        connection.execute("CREATE EXTENSION IF NOT EXISTS postgis_topology;")
    classes["__Base__"].metadata.create_all(engine)

    with db_session(engine) as session:
        timespan = classes["Timespan"]
        try:
            ts = (session.query(timespan).filter_by(start=None,
                                                    stop=None).one_or_none())
        except MRF:
            click.echo("Multiple timestamps found which have no `start` "
                       "and/or `stop` values.\nAborting.")
        ts = ts or classes["Timespan"]()
        session.add(ts)
        session.flush()

        context = MigrationContext.configure(session.connection())
        ops = Operations(context)
        ops.alter_column(
            table_name=str(classes["Series"].__table__.name),
            column_name="timespan_id",
            server_default=str(ts.id),
            schema=schema,
        )

        constraint_name = "singular_null_timestamp_constraint"
        if not [
                c for c in timespan.__table__.constraints
                if c.name == constraint_name
        ]:
            constraint = CheckConstraint(
                "(id = {}) OR ".format(ts.id) +
                "(start IS NOT NULL AND stop IS NOT NULL)",
                name=constraint_name,
            )
            timespan.__table__.append_constraint(constraint)
            session.execute(AddConstraint(constraint))

    return classes
예제 #30
0
from sqlalchemy.schema import AddConstraint
from pycroft.model.base import IntegerIdModel
from pycroft.model.types import IPAddress, IPNetwork


class VLAN(IntegerIdModel):
    name = Column(String(127), nullable=False)
    vid = Column(Integer, nullable=False)

    __table_args = (CheckConstraint(between(vid, 1, 4094)), )
    switch_ports = relationship('SwitchPort',
                                secondary='switch_port_default_vlans',
                                back_populates='default_vlans')
    subnets = relationship('Subnet', back_populates='vlan')


class Subnet(IntegerIdModel):
    address = Column(IPNetwork, nullable=False)
    gateway = Column(IPAddress)
    reserved_addresses = Column(Integer, default=0, nullable=True)
    description = Column(String(50))

    vlan_id = Column(Integer, ForeignKey(VLAN.id), nullable=False, index=True)
    vlan = relationship(VLAN, back_populates="subnets")


# Ensure that the gateway is contained in the subnet
constraint = CheckConstraint(Subnet.gateway.op('<<')(Subnet.address))
event.listen(Subnet.__table__, "after_create",
             AddConstraint(constraint).execute_if(dialect='postgresql'))
예제 #31
0
def make_dataset_table(table_name: str, create: bool = False) -> Table:
    """Create customized dataset table using a table name.
    TODO: Create an example
    Args:
        table_name - Table name
        create - Flag to create if not exists
    Returns
        dataset_table definition
    """
    if create:
        if not db.engine.dialect.has_table(connection=db.engine,
                                           table_name=table_name,
                                           schema=Config.SAMPLEDB_SCHEMA):
            with db.session.begin_nested():
                db.engine.execute(
                    f"CREATE TABLE {Config.SAMPLEDB_SCHEMA}.dataset_{table_name} OF dataset_type"
                )

                s_name = f"{Config.SAMPLEDB_SCHEMA}.dataset_{table_name}_id_seq"
                db.engine.execute(f"CREATE SEQUENCE {s_name}")

                klass = Table(f'dataset_{table_name}',
                              metadata,
                              autoload=True,
                              autoload_with=db.engine,
                              extend_existing=True)

                # Add index, primary key and foreign key
                db.engine.execute(
                    f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.class_id.name} SET NOT NULL"
                )
                db.engine.execute(
                    f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.start_date.name} SET NOT NULL"
                )
                db.engine.execute(
                    f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER COLUMN {klass.c.end_date.name} SET NOT NULL"
                )

                db.engine.execute(
                    f"ALTER TABLE {Config.SAMPLEDB_SCHEMA}.{klass.name} ALTER {klass.c.id.name} SET DEFAULT NEXTVAL('{s_name}');"
                )

                db.engine.execute(
                    AddConstraint(PrimaryKeyConstraint(klass.c.id)))
                db.engine.execute(CreateIndex(Index(None, klass.c.user_id)))
                db.engine.execute(CreateIndex(Index(None, klass.c.class_id)))
                db.engine.execute(
                    CreateIndex(
                        Index(None, klass.c.location,
                              postgresql_using='gist')))
                db.engine.execute(CreateIndex(Index(None, klass.c.start_date)))
                db.engine.execute(CreateIndex(Index(None, klass.c.end_date)))
                Index(f'idx_{klass.name}_start_end_date', klass.c.start_date,
                      klass.c.end_date)
                db.engine.execute(
                    AddConstraint(
                        ForeignKeyConstraint(
                            name=
                            f"dataset_{table_name}_{klass.c.user_id.name}_fkey",
                            columns=[klass.c.user_id],
                            refcolumns=[Users.id],
                            onupdate="CASCADE",
                            ondelete="CASCADE")))
                db.engine.execute(
                    AddConstraint(
                        ForeignKeyConstraint(
                            name=
                            f"dataset_{table_name}_{klass.c.class_id.name}_fkey",
                            columns=[klass.c.class_id],
                            refcolumns=[LucClass.id],
                            onupdate="CASCADE",
                            ondelete="CASCADE")))

            db.session.commit()
    else:
        klass = Table(f'dataset_{table_name}',
                      metadata,
                      autoload=True,
                      autoload_with=db.engine)

    return klass
def downgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine
    inspector = inspect(migrate_engine)

    metadef_namespaces = Table('metadef_namespaces', meta, autoload=True)
    metadef_properties = Table('metadef_properties', meta, autoload=True)
    metadef_objects = Table('metadef_objects', meta, autoload=True)
    metadef_ns_res_types = Table('metadef_namespace_resource_types',
                                 meta, autoload=True)
    metadef_resource_types = Table('metadef_resource_types', meta,
                                   autoload=True)
    metadef_tags = Table('metadef_tags', meta, autoload=True)

    constraints = [('ix_namespaces_namespace',
                    [metadef_namespaces.c.namespace]),
                   ('ix_objects_namespace_id_name',
                    [metadef_objects.c.namespace_id,
                     metadef_objects.c.name]),
                   ('ix_metadef_properties_namespace_id_name',
                    [metadef_properties.c.namespace_id,
                     metadef_properties.c.name])]
    metadef_tags_constraints = inspector.get_unique_constraints('metadef_tags')
    for constraint in metadef_tags_constraints:
        if set(constraint['column_names']) == set(['namespace_id', 'name']):
            constraints.append((constraint['name'],
                                [metadef_tags.c.namespace_id,
                                 metadef_tags.c.name]))

    if meta.bind.name == "ibm_db_sa":
        # For db2, the following constraints need to be dropped first,
        # otherwise the index like ix_metadef_ns_res_types_namespace_id
        # will fail to drop. These constraints will be added back at
        # the end. It should not affect the origional logic for other
        # database backends.
        for (constraint_name, cols) in constraints:
            _change_db2_unique_constraint('drop', constraint_name, *cols)
    else:
        Index('ix_namespaces_namespace',
              metadef_namespaces.c.namespace).create()

        Index('ix_objects_namespace_id_name', metadef_objects.c.namespace_id,
              metadef_objects.c.name).create()

        Index('ix_metadef_properties_namespace_id_name',
              metadef_properties.c.namespace_id,
              metadef_properties.c.name).create()

    Index('ix_metadef_tags_name', metadef_tags.c.name).drop()

    Index('ix_metadef_tags_namespace_id', metadef_tags.c.namespace_id,
          metadef_tags.c.name).drop()

    if migrate_engine.name != 'sqlite':
        fkc = migrate.ForeignKeyConstraint([metadef_tags.c.namespace_id],
                                           [metadef_namespaces.c.id])
        fkc.drop()

        if meta.bind.name != "ibm_db_sa":
            # This index would not be created when it is db2 backend.
            Index('ix_tags_namespace_id_name', metadef_tags.c.namespace_id,
                  metadef_tags.c.name).create()
    else:
        # NOTE(ochuprykov): fkc can't be dropped via `migrate` in sqlite,so it
        # is necessary to recreate table manually and populate it with data
        temp = Table('temp_', meta, *(
            [c.copy() for c in metadef_tags.columns]))
        temp.create()
        migrate_engine.execute('insert into temp_ select * from metadef_tags')
        metadef_tags.drop()
        migrate_engine.execute('alter table temp_ rename to metadef_tags')

        # Refresh old metadata for this table
        meta = sqlalchemy.MetaData()
        meta.bind = migrate_engine
        metadef_tags = Table('metadef_tags', meta, autoload=True)

        Index('ix_tags_namespace_id_name', metadef_tags.c.namespace_id,
              metadef_tags.c.name).create()

        uc = migrate.UniqueConstraint(metadef_tags.c.namespace_id,
                                      metadef_tags.c.name)
        uc.create()

    if migrate_engine.name == 'mysql':
        constraint = UniqueConstraint(metadef_properties.c.namespace_id,
                                      metadef_properties.c.name,
                                      name='namespace_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_objects.c.namespace_id,
                                      metadef_objects.c.name,
                                      name='namespace_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_ns_res_types.c.resource_type_id,
                                      metadef_ns_res_types.c.namespace_id,
                                      name='resource_type_id')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_namespaces.c.namespace,
                                      name='namespace')
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(metadef_resource_types.c.name,
                                      name='name')
        migrate_engine.execute(AddConstraint(constraint))

    if migrate_engine.name == 'postgresql':
        constraint = UniqueConstraint(
            metadef_objects.c.namespace_id,
            metadef_objects.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(
            metadef_properties.c.namespace_id,
            metadef_properties.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(
            metadef_namespaces.c.namespace)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(
            metadef_resource_types.c.name)
        migrate_engine.execute(AddConstraint(constraint))

        constraint = UniqueConstraint(
            metadef_tags.c.namespace_id,
            metadef_tags.c.name,
            name='metadef_tags_namespace_id_name_key')
        migrate_engine.execute(AddConstraint(constraint))

    if migrate_engine.name == 'mysql':
        fkc = migrate.ForeignKeyConstraint(
            [metadef_ns_res_types.c.resource_type_id],
            [metadef_namespaces.c.id],
            name='metadef_namespace_resource_types_ibfk_2')
        fkc.drop()

        Index('ix_metadef_ns_res_types_namespace_id',
              metadef_ns_res_types.c.namespace_id).drop()

        fkc.create()
    else:
        Index('ix_metadef_ns_res_types_namespace_id',
              metadef_ns_res_types.c.namespace_id).drop()

    Index('ix_metadef_namespaces_namespace',
          metadef_namespaces.c.namespace).drop()

    Index('ix_metadef_namespaces_owner', metadef_namespaces.c.owner).drop()

    Index('ix_metadef_objects_name', metadef_objects.c.name).drop()

    Index('ix_metadef_objects_namespace_id',
          metadef_objects.c.namespace_id).drop()

    Index('ix_metadef_properties_name', metadef_properties.c.name).drop()

    Index('ix_metadef_properties_namespace_id',
          metadef_properties.c.namespace_id).drop()

    if meta.bind.name == "ibm_db_sa":
        # For db2, add these constraints back. It should not affect the
        # origional logic for other database backends.
        for (constraint_name, cols) in constraints:
            _change_db2_unique_constraint('create', constraint_name, *cols)