def test_fake_logs_with_log_capture(self, env_get_mock): env_get_mock.side_effect = lambda value: {'OS_DEBUG': 0, 'OS_LOG_CAPTURE': 'True' }[value] f = log.ConfigureLogging() f.setUp() env_get_mock.assert_any_call('OS_LOG_CAPTURE') env_get_mock.assert_any_call('OS_DEBUG') self.assertIsNotNone(f.logger)
def test_fake_logs_with_debug_int(self, basic_logger_mock, env_get_mock): env_get_mock.side_effect = lambda value, default=None: { 'OS_DEBUG': '10', 'OS_LOG_CAPTURE': 0}.get(value, default) f = log.ConfigureLogging() f.setUp() env_get_mock.assert_any_call('OS_LOG_CAPTURE') env_get_mock.assert_any_call('OS_DEBUG') basic_logger_mock.assert_called_once_with( format=log.ConfigureLogging.DEFAULT_FORMAT, level=logging.DEBUG)
def test_fake_logs_default(self, env_get_mock): # without debug and log capture env_get_mock.side_effect = lambda value, default=None: { 'OS_DEBUG': 0, 'OS_LOG_CAPTURE': 0}.get(value, default) f = log.ConfigureLogging() f.setUp() env_get_mock.assert_any_call('OS_LOG_CAPTURE') env_get_mock.assert_any_call('OS_DEBUG') self.assertFalse(f.capture_logs) self.assertIsNone(f.logger)
def _fake_logs(self): self.log_fixture = self.useFixture(log.ConfigureLogging())
def setUp(self): super(BaseTestCase, self).setUp() self.logger = self.useFixture(log.ConfigureLogging()).logger
def start_fixture(self): """Create necessary temp files and do the config dance.""" self.output = output.CaptureOutput() self.output.setUp() self.log = log.ConfigureLogging() self.log.setUp() global LOAD_APP_KWARGS data_tmp_dir = tempfile.mkdtemp(prefix='gnocchi') if os.getenv("GABBI_LIVE"): dcf = None else: dcf = [] conf = service.prepare_service([], default_config_files=dcf) py_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',)) conf.set_override('paste_config', os.path.join(py_root, 'rest', 'api-paste.ini'), group="api") conf.set_override('policy_file', os.path.join(py_root, 'rest', 'policy.json'), group="oslo_policy") # NOTE(sileht): This is not concurrency safe, but only this tests file # deal with cors, so we are fine. set_override don't work because cors # group doesn't yet exists, and we the CORS middleware is created it # register the option and directly copy value of all configurations # options making impossible to override them properly... cfg.set_defaults(cors.CORS_OPTS, allowed_origin="http://foobar.com") self.conf = conf self.tmp_dir = data_tmp_dir if conf.indexer.url is None: raise case.SkipTest("No indexer configured") # Use the presence of DEVSTACK_GATE_TEMPEST as a semaphore # to signal we are not in a gate driven functional test # and thus should override conf settings. if 'DEVSTACK_GATE_TEMPEST' not in os.environ: conf.set_override('driver', 'file', 'storage') conf.set_override('file_basepath', data_tmp_dir, 'storage') # NOTE(jd) All of that is still very SQL centric but we only support # SQL for now so let's say it's good enough. conf.set_override( 'url', sqlalchemy.SQLAlchemyIndexer._create_new_database( conf.indexer.url), 'indexer') index = indexer.get_driver(conf) index.connect() index.upgrade() # Set pagination to a testable value conf.set_override('max_limit', 7, 'api') # Those tests uses noauth mode # TODO(jd) Rewrite them for basic conf.set_override("auth_mode", "noauth", 'api') self.index = index s = storage.get_driver(conf) s.upgrade(index) LOAD_APP_KWARGS = { 'storage': s, 'indexer': index, 'conf': conf, } # start up a thread to async process measures self.metricd_thread = MetricdThread(index, s) self.metricd_thread.start()
def setUpClass(self): super(TestCase, self).setUpClass() # NOTE(sileht): oslotest does this in setUp() but we # need it here self.output = output.CaptureOutput() self.output.setUp() self.log = log.ConfigureLogging() self.log.setUp() self.conf = service.prepare_service([], default_config_files=[]) py_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',)) self.conf.set_override('paste_config', os.path.join(py_root, 'rest', 'api-paste.ini'), group="api") self.conf.set_override('policy_file', os.path.join(py_root, 'rest', 'policy.json'), group="oslo_policy") # NOTE(jd) This allows to test S3 on AWS if not os.getenv("AWS_ACCESS_KEY_ID"): self.conf.set_override('s3_endpoint_url', os.getenv("GNOCCHI_STORAGE_HTTP_URL"), group="storage") self.conf.set_override('s3_access_key_id', "gnocchi", group="storage") self.conf.set_override('s3_secret_access_key', "anythingworks", group="storage") self.index = indexer.get_driver(self.conf) self.index.connect() # NOTE(jd) So, some driver, at least SQLAlchemy, can't create all # their tables in a single transaction even with the # checkfirst=True, so what we do here is we force the upgrade code # path to be sequential to avoid race conditions as the tests run # in parallel. self.coord = coordination.get_coordinator( self.conf.storage.coordination_url, str(uuid.uuid4()).encode('ascii')) self.coord.start(start_heart=True) with self.coord.get_lock(b"gnocchi-tests-db-lock"): self.index.upgrade() self.coord.stop() self.archive_policies = self.ARCHIVE_POLICIES.copy() for name, ap in six.iteritems(self.archive_policies): # Create basic archive policies try: self.index.create_archive_policy(ap) except indexer.ArchivePolicyAlreadyExists: pass storage_driver = os.getenv("GNOCCHI_TEST_STORAGE_DRIVER", "file") self.conf.set_override('driver', storage_driver, 'storage') if storage_driver == 'ceph': self.conf.set_override('ceph_conffile', os.getenv("CEPH_CONF"), 'storage')