def test_db_init(clirunner, initialised_postgres_db): with initialised_postgres_db.connect() as connection: drop_db(connection._connection) assert not has_schema(initialised_postgres_db._engine, connection._connection) # Run on an empty database. result = clirunner(['system', 'init']) assert 'Created.' in result.output with initialised_postgres_db.connect() as connection: assert has_schema(initialised_postgres_db._engine, connection._connection)
def db_fixture_instance(request): local_config: LocalConfig = request.getfixturevalue( config_fixture_name) db = PostgresDb.from_config(local_config, application_name='dea-test-run', validate_connection=False) # Drop and recreate tables so our tests have a clean db. with db.connect() as connection: _core.drop_db(connection._connection) remove_dynamic_indexes() # Disable informational messages since we're doing this on every test run. with _increase_logging(_core._LOG) as _: _core.ensure_db(db._engine) # We don't need informational create/drop messages for every config change. _dynamic._LOG.setLevel(logging.WARN) yield db db.close()
def uninitialised_postgres_db(local_config, request): """ Return a connection to an empty PostgreSQL database """ timezone = request.param db = PostgresDb.from_config(local_config, application_name='test-run', validate_connection=False) # Drop tables so our tests have a clean db. # with db.begin() as c: # Creates a new PostgresDbAPI, by passing a new connection to it _core.drop_db(db._engine) db._engine.execute('alter database %s set timezone = %r' % (local_config['db_database'], timezone)) # We need to run this as well, I think because SQLAlchemy grabs them into it's MetaData, # and attempts to recreate them. WTF TODO FIX remove_dynamic_indexes() yield db # with db.begin() as c: # Drop SCHEMA _core.drop_db(db._engine) db.close()