def setUp(self): """Run before each test method to initialize test environment.""" super(TestCase, self).setUp() self.mock_helm_refresh = self.helm_refresh_patcher.start() self.dbapi = dbapi.get_instance() test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except ValueError: # If timeout value is invalid do not set a timeout. test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or os.environ.get('OS_STDOUT_CAPTURE') == '1'): stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or os.environ.get('OS_STDERR_CAPTURE') == '1'): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) self.log_fixture = self.useFixture(fixtures.FakeLogger()) def fake_logging_setup(*args): pass self.useFixture( fixtures.MonkeyPatch('oslo_log.log.setup', fake_logging_setup)) logging.register_options(CONF) self.useFixture(conf_fixture.ConfFixture(CONF)) global _DB_CACHE if not _DB_CACHE: engine = enginefacade.get_legacy_facade().get_engine() _DB_CACHE = Database(engine, migration, sql_connection=CONF.database.connection, sqlite_db='sysinv.sqlite', sqlite_clean_db='clean.sqlite') self.useFixture(_DB_CACHE) # NOTE(danms): Make sure to reset us back to non-remote objects # for each test to avoid interactions. Also, backup the object # registry objects_base.SysinvObject.indirection_api = None self._base_test_obj_backup = copy.copy( objects_base.SysinvObject._obj_classes) self.addCleanup(self._restore_obj_registry) self.addCleanup(self._clear_attrs) self.useFixture(fixtures.EnvironmentVariable('http_proxy')) self.policy = self.useFixture(policy_fixture.PolicyFixture()) CONF.set_override('fatal_exception_format_errors', True)
def version(config=None, engine=None): """Current database version.""" if engine is None: engine = enginefacade.get_legacy_facade().get_engine() with engine.connect() as conn: context = alembic_migration.MigrationContext.configure(conn) return context.get_current_revision()
def setUp(self): super(DbTestCase, self).setUp() self.dbapi = dbapi.get_instance() global _DB_CACHE if not _DB_CACHE: engine = enginefacade.get_legacy_facade().get_engine() _DB_CACHE = Database(engine, migration, sql_connection=CONF.database.connection) self.useFixture(_DB_CACHE)
def version(config=None, engine=None): """Current database version. :returns: Database version :rtype: string """ if engine is None: engine = enginefacade.get_legacy_facade().get_engine() with engine.connect() as conn: context = alembic_migration.MigrationContext.configure(conn) return context.get_current_revision()
def setUp(self): super(DBTestCase, self).setUp() CONF.set_override('connection', 'sqlite://', group='database') self.db_api = db_api.get_instance() global _DB_CACHE if not _DB_CACHE: engine = enginefacade.get_legacy_facade().get_engine() _DB_CACHE = Database(engine, sql_connection=CONF.database.connection, sqlite_clean_db='clean.sqlite') self.useFixture(_DB_CACHE)
def initialize_sql_session(connection_str=unit.IN_MEM_DB_CONN_STRING, enforce_sqlite_fks=True): # Make sure the DB is located in the correct location, in this case set # the default value, as this should be able to be overridden in some # test cases. db_options.set_defaults(CONF, connection=connection_str) # Enable the Sqlite FKs for global engine by default. facade = enginefacade.get_legacy_facade() engine = facade.get_engine() f_key = 'ON' if enforce_sqlite_fks else 'OFF' if engine.name == 'sqlite': engine.connect().execute('PRAGMA foreign_keys = ' + f_key)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = enginefacade.get_legacy_facade().get_engine() with engine.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations()
def create_schema(config=None, engine=None): """Create database schema from models description. Can be used for initial installation instead of upgrade('head'). """ if engine is None: engine = enginefacade.get_legacy_facade().get_engine() if version(engine=engine) is not None: raise db_exc.DBMigrationError("DB schema is already under version" " control. Use upgrade() instead") models.Base.metadata.create_all(engine) stamp('head', config=config)
def initialize_sql_session(connection_str=unit.IN_MEM_DB_CONN_STRING, enforce_sqlite_fks=True): # Make sure the DB is located in the correct location, in this case set # the default value, as this should be able to be overridden in some # test cases. db_options.set_defaults( CONF, connection=connection_str) # Enable the Sqlite FKs for global engine by default. facade = enginefacade.get_legacy_facade() engine = facade.get_engine() f_key = 'ON' if enforce_sqlite_fks else 'OFF' if engine.name == 'sqlite': engine.connect().execute('PRAGMA foreign_keys = ' + f_key)
def create_schema(config=None, engine=None): """Create database schema from models description. Can be used for initial installation instead of upgrade('head'). """ if engine is None: engine = enginefacade.get_legacy_facade().get_engine() # NOTE(viktors): If we will use metadata.create_all() for non empty db # schema, it will only add the new tables, but leave # existing as is. So we should avoid of this situation. if version(engine=engine) is not None: raise db_exc.DBMigrationError("DB schema is already under version" " control. Use upgrade() instead") models.Base.metadata.create_all(engine) stamp('head', config=config)
def create_schema(config=None, engine=None): """Create database schema from models description. Can be used for initial installation instead of upgrade('head'). """ if engine is None: engine = enginefacade.get_legacy_facade().get_engine() # NOTE(viktors): If we will use metadata.create_all() for non empty db # schema, it will only add the new tables, but leave # existing as is. So we should avoid of this situation. if version(engine=engine) is not None: raise db_exc.DbMigrationError("DB schema is already under version" " control. Use upgrade() instead") models.Base.metadata.create_all(engine) stamp('head', config=config)
def setUp(self): """Run before each test method to initialize test environment.""" super(TestCase, self).setUp() test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except ValueError: # If timeout value is invalid do not set a timeout. test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or os.environ.get('OS_STDOUT_CAPTURE') == '1'): stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or os.environ.get('OS_STDERR_CAPTURE') == '1'): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) self.log_fixture = self.useFixture(fixtures.FakeLogger()) def fake_logging_setup(*args): pass self.useFixture( fixtures.MonkeyPatch('oslo_log.log.setup', fake_logging_setup)) logging.register_options(CONF) self.useFixture(conf_fixture.ConfFixture(CONF)) global _DB_CACHE if not _DB_CACHE: engine = enginefacade.get_legacy_facade().get_engine() engine.dispose() engine.connect() migration.db_sync(engine=engine)
def get_engine(connection): engine = enginefacade.get_legacy_facade().get_engine() return engine
def patch_with_engine(engine): with mock.patch.object(enginefacade.get_legacy_facade(), 'get_engine') as patch_engine: patch_engine.return_value = engine yield
def setup_sqlite(self, sql_connection, db_migrate): self.sql_connection = sql_connection self.engine = enginefacade.get_legacy_facade().get_engine() self.engine.dispose() self.engine.connect()
def get_engine(): return enginefacade.get_legacy_facade().get_engine()
import os import sqlalchemy from migrate import exceptions as versioning_exceptions from migrate.versioning import api as versioning_api from migrate.versioning.repository import Repository from oslo_db.sqlalchemy import enginefacade from inventory.common import exception from inventory.common.i18n import _ from inventory.db import migration _REPOSITORY = None get_engine = enginefacade.get_legacy_facade().get_engine def db_sync(version=None): if version is not None: try: version = int(version) except ValueError: raise exception.ApiError(_("version should be an integer")) current_version = db_version() repository = _find_migrate_repo() if version is None or version > current_version: return versioning_api.upgrade(get_engine(), repository, version) else: return versioning_api.downgrade(get_engine(), repository, version)
def create_tables(): BASE.metadata.create_all(enginefacade.get_legacy_facade().get_engine())