Exemplo n.º 1
0
 def upgrade(engine):
     schema = ControlledSchema(engine, self.repo_path)
     changeset = schema.changeset(None)
     with sautils.withoutSqliteForeignKeys(engine):
         for version, change in changeset:
             log.msg('migrating schema version %s -> %d' %
                     (version, version + 1))
             schema.runchange(version, change, 1)
Exemplo n.º 2
0
 def upgrade(engine):
     schema = ControlledSchema(engine, self.repo_path)
     changeset = schema.changeset(None)
     with sautils.withoutSqliteForeignKeys(engine):
         for version, change in changeset:
             log.msg('migrating schema version %s -> %d'
                     % (version, version + 1))
             schema.runchange(version, change, 1)
Exemplo n.º 3
0
 def upgrade_thd(engine):
     with querylog.log_queries():
         schema = migrate.versioning.schema.ControlledSchema(
             engine, self.db.model.repo_path)
         changeset = schema.changeset(target_version)
         with sautils.withoutSqliteForeignKeys(engine):
             for version, change in changeset:
                 log.msg('upgrading to schema version %d' % (version + 1))
                 schema.runchange(version, change, 1)
Exemplo n.º 4
0
 def upgrade_thd(engine):
     with querylog.log_queries():
         schema = migrate.versioning.schema.ControlledSchema(
             engine, self.db.model.repo_path)
         changeset = schema.changeset(target_version)
         with sautils.withoutSqliteForeignKeys(engine):
             for version, change in changeset:
                 log.msg('upgrading to schema version %d' %
                         (version + 1))
                 schema.runchange(version, change, 1)
Exemplo n.º 5
0
        def thd(conn):
            alembic_scripts = self.alembic_get_scripts()
            current_script_rev_head = alembic_scripts.get_current_head()

            if self.table_exists(conn, 'version'):
                raise UpgradeFromBefore0p9Error()

            if self.table_exists(conn, 'migrate_version'):
                version = self.migrate_get_version(conn)

                if version < 40:
                    raise UpgradeFromBefore0p9Error()

                last_sqlalchemy_migrate_version = 58
                if version != last_sqlalchemy_migrate_version:
                    raise UpgradeFromBefore3p0Error()

                self.alembic_stamp(conn, alembic_scripts,
                                   alembic_scripts.get_base())
                conn.execute('drop table migrate_version')

            if not self.table_exists(conn, 'alembic_version'):
                log.msg("Initializing empty database")

                # Do some tests first
                test_unicode(conn)

                Model.metadata.create_all(conn)
                self.alembic_stamp(conn, alembic_scripts,
                                   current_script_rev_head)
                return

            context = alembic.runtime.migration.MigrationContext.configure(
                conn)
            current_rev = context.get_current_revision()

            if current_rev == current_script_rev_head:
                log.msg(
                    'Upgrading database: the current database schema is already the newest'
                )
                return

            log.msg('Upgrading database')
            with sautils.withoutSqliteForeignKeys(conn):
                with context.begin_transaction():
                    context.run_migrations()

            log.msg('Upgrading database: done')
Exemplo n.º 6
0
        def upgrade_thd(engine):
            with querylog.log_queries():
                with sautils.withoutSqliteForeignKeys(engine):
                    with engine.connect() as conn:

                        def upgrade(rev, context):
                            log.msg(
                                f'Upgrading from {rev} to {target_revision}')
                            return alembic_scripts._upgrade_revs(
                                target_revision, rev)

                        context = MigrationContext.configure(
                            conn, opts={'fn': upgrade})

                        with context.begin_transaction():
                            context.run_migrations()
Exemplo n.º 7
0
    def __thd_clean_database(self, conn):
        # In general it's nearly impossible to do "bullet proof" database
        # cleanup with SQLAlchemy that will work on a range of databases
        # and they configurations.
        #
        # Following approaches were considered.
        #
        # 1. Drop Buildbot Model schema:
        #
        #     model.Model.metadata.drop_all(bind=conn, checkfirst=True)
        #
        # Dropping schema from model is correct and working operation only
        # if database schema is exactly corresponds to the model schema.
        #
        # If it is not (e.g. migration script failed or migration results in
        # old version of model), then some tables outside model schema may be
        # present, which may reference tables in the model schema.
        # In this case either dropping model schema will fail (if database
        # enforces referential integrity, e.g. PostgreSQL), or
        # dropping left tables in the code below will fail (if database allows
        # removing of tables on which other tables have references,
        # e.g. SQLite).
        #
        # 2. Introspect database contents and drop found tables.
        #
        #     meta = MetaData(bind=conn)
        #     meta.reflect()
        #     meta.drop_all()
        #
        # May fail if schema contains reference cycles (and Buildbot schema
        # has them). Reflection looses metadata about how reference cycles
        # can be teared up (e.g. use_alter=True).
        # Introspection may fail if schema has invalid references
        # (e.g. possible in SQLite).
        #
        # 3. What is actually needed here is accurate code for each engine
        # and each engine configuration that will drop all tables,
        # indexes, constraints, etc in proper order or in a proper way
        # (using tables alternation, or DROP TABLE ... CASCADE, etc).
        #
        # Conclusion: use approach 2 with manually teared apart known
        # reference cycles.

        # pylint: disable=too-many-nested-blocks

        try:
            meta = MetaData(bind=conn)

            # Reflect database contents. May fail, e.g. if table references
            # non-existent table in SQLite.
            meta.reflect()

            # Table.foreign_key_constraints introduced in SQLAlchemy 1.0.
            if sa_version()[:2] >= (1, 0):
                # Restore `use_alter` settings to break known reference cycles.
                # Main goal of this part is to remove SQLAlchemy warning
                # about reference cycle.
                # Looks like it's OK to do it only with SQLAlchemy >= 1.0.0,
                # since it's not issued in SQLAlchemy == 0.8.0

                # List of reference links (table_name, ref_table_name) that
                # should be broken by adding use_alter=True.
                table_referenced_table_links = [('buildsets', 'builds'),
                                                ('builds', 'buildrequests')]
                for table_name, ref_table_name in table_referenced_table_links:
                    if table_name in meta.tables:
                        table = meta.tables[table_name]
                        for fkc in table.foreign_key_constraints:
                            if fkc.referred_table.name == ref_table_name:
                                fkc.use_alter = True

            # Drop all reflected tables and indices. May fail, e.g. if
            # SQLAlchemy wouldn't be able to break circular references.
            # Sqlalchemy fk support with sqlite is not yet perfect, so we must deactivate fk during that
            # operation, even though we made our possible to use use_alter
            with withoutSqliteForeignKeys(conn.engine, conn):
                meta.drop_all()

        except Exception:
            # sometimes this goes badly wrong; being able to see the schema
            # can be a big help
            if conn.engine.dialect.name == 'sqlite':
                r = conn.execute("select sql from sqlite_master "
                                 "where type='table'")
                log.msg("Current schema:")
                for row in r.fetchall():
                    log.msg(row.sql)
            raise
Exemplo n.º 8
0
 def verify_thd(engine):
     with sautils.withoutSqliteForeignKeys(engine):
         verify_thd_cb(engine)
Exemplo n.º 9
0
    def __thd_clean_database(self, conn):
        # In general it's nearly impossible to do "bullet proof" database
        # cleanup with SQLAlchemy that will work on a range of databases
        # and they configurations.
        #
        # Following approaches were considered.
        #
        # 1. Drop Buildbot Model schema:
        #
        #     model.Model.metadata.drop_all(bind=conn, checkfirst=True)
        #
        # Dropping schema from model is correct and working operation only
        # if database schema is exactly corresponds to the model schema.
        #
        # If it is not (e.g. migration script failed or migration results in
        # old version of model), then some tables outside model schema may be
        # present, which may reference tables in the model schema.
        # In this case either dropping model schema will fail (if database
        # enforces referential integrity, e.g. PostgreSQL), or
        # dropping left tables in the code below will fail (if database allows
        # removing of tables on which other tables have references,
        # e.g. SQLite).
        #
        # 2. Introspect database contents and drop found tables.
        #
        #     meta = MetaData(bind=conn)
        #     meta.reflect()
        #     meta.drop_all()
        #
        # May fail if schema contains reference cycles (and Buildbot schema
        # has them). Reflection looses metadata about how reference cycles
        # can be teared up (e.g. use_alter=True).
        # Introspection may fail if schema has invalid references
        # (e.g. possible in SQLite).
        #
        # 3. What is actually needed here is accurate code for each engine
        # and each engine configuration that will drop all tables,
        # indexes, constraints, etc in proper order or in a proper way
        # (using tables alternation, or DROP TABLE ... CASCADE, etc).
        #
        # Conclusion: use approach 2 with manually teared apart known
        # reference cycles.

        try:
            meta = MetaData(bind=conn)

            # Reflect database contents. May fail, e.g. if table references
            # non-existent table in SQLite.
            meta.reflect()

            # Table.foreign_key_constraints introduced in SQLAlchemy 1.0.
            if sa_version()[:2] >= (1, 0):
                # Restore `use_alter` settings to break known reference cycles.
                # Main goal of this part is to remove SQLAlchemy warning
                # about reference cycle.
                # Looks like it's OK to do it only with SQLAlchemy >= 1.0.0,
                # since it's not issued in SQLAlchemy == 0.8.0

                # List of reference links (table_name, ref_table_name) that
                # should be broken by adding use_alter=True.
                table_referenced_table_links = [
                    ('buildsets', 'builds'), ('builds', 'buildrequests')]
                for table_name, ref_table_name in table_referenced_table_links:
                    if table_name in meta.tables:
                        table = meta.tables[table_name]
                        for fkc in table.foreign_key_constraints:
                            if fkc.referred_table.name == ref_table_name:
                                fkc.use_alter = True

            # Drop all reflected tables and indices. May fail, e.g. if
            # SQLAlchemy wouldn't be able to break circular references.
            # Sqlalchemy fk support with sqlite is not yet perfect, so we must deactivate fk during that
            # operation, even though we made our possible to use use_alter
            with withoutSqliteForeignKeys(conn.engine, conn):
                meta.drop_all()

        except Exception:
            # sometimes this goes badly wrong; being able to see the schema
            # can be a big help
            if conn.engine.dialect.name == 'sqlite':
                r = conn.execute("select sql from sqlite_master "
                                 "where type='table'")
                log.msg("Current schema:")
                for row in r.fetchall():
                    log.msg(row.sql)
            raise
Exemplo n.º 10
0
 def verify_thd(engine):
     with sautils.withoutSqliteForeignKeys(engine):
         verify_thd_cb(engine)