def test_error_resilience(self, mocked_logger): backend = ElasticsearchBackend(hosts=['non-existant-domain']) # ensure index and mapping setup failures are caught and logged self.assertEqual(2, len(mocked_logger.error.call_args_list)) # ensure write failure is caught and logged backend.write(name='test_error_resilience') mocked_logger.warning.assert_called_once()
def setUp(self): super(TestTimeExecution, self).setUp() self.backend = ElasticsearchBackend( 'elasticsearch', index='unittest', ) settings.configure(backends=[self.backend]) self._clear()
def configure_metrics(): # Check feature flag #if not settings.METRICS_ENABLED: # return if False: elasticsearch = ElasticsearchBackend('localhost', index='metrics') settings.configure(backends=[elasticsearch]) if True: metrics_backends = [] async_es_metrics = ThreadedBackend( ElasticsearchBackend, backend_kwargs={ 'host': 'localhost', 'port': '9200', #'url_prefix': settings.ELASTICSEARCH_PREFIX, #'use_ssl': settings.ELASTICSEARCH_SSL, #'verify_certs': settings.ELASTICSEARCH_VERIFY_CERTS, #'index': settings.ELASTICSEARCH_INDEX, #'http_auth': settings.ELASTICSEARCH_AUTH, }, ) metrics_backends.append(async_es_metrics) settings.configure(backends=metrics_backends, hooks=[ status_code_hook, ], origin='inspire_next')
def configure_appmetrics(self, app): if not app.config.get('FEATURE_FLAG_ENABLE_APPMETRICS'): return if app.config['APPMETRICS_THREADED_BACKEND']: backend = ThreadedBackend( ElasticsearchBackend, backend_kwargs=dict( hosts=app.config['APPMETRICS_ELASTICSEARCH_HOSTS'], index=app.config['APPMETRICS_ELASTICSEARCH_INDEX']), lazy_init=True, ) else: backend = ElasticsearchBackend( hosts=app.config['APPMETRICS_ELASTICSEARCH_HOSTS'], index=app.config['APPMETRICS_ELASTICSEARCH_INDEX'], ) origin = 'inspire_next' hooks = [ inspire_service_orcid_hooks.status_code_hook, inspire_service_orcid_hooks.orcid_error_code_hook, inspire_service_orcid_hooks.orcid_service_exception_hook, # Add other hooks here: exception_hook, ] time_execution.settings.configure( backends=[backend], hooks=hooks, origin=origin )
def configure_appmetrics(self, app): if not app.config.get("FEATURE_FLAG_ENABLE_APPMETRICS"): return if app.config["APPMETRICS_THREADED_BACKEND"]: backend = ThreadedBackend( ElasticsearchBackend, backend_kwargs=dict( hosts=app.config["APPMETRICS_ELASTICSEARCH_HOSTS"], index=app.config["APPMETRICS_ELASTICSEARCH_INDEX"], ), ) else: backend = ElasticsearchBackend( hosts=app.config["APPMETRICS_ELASTICSEARCH_HOSTS"], index=app.config["APPMETRICS_ELASTICSEARCH_INDEX"], ) origin = "inspirehep" hooks = [ inspire_service_orcid_hooks.status_code_hook, inspire_service_orcid_hooks.orcid_error_code_hook, inspire_service_orcid_hooks.orcid_service_exception_hook, ] time_execution.settings.configure( backends=[backend], hooks=hooks, origin=origin )
def setUp(self): super(BaseTestTimeExecutionElasticSearch, self).setUp() self.backend = ElasticsearchBackend(ELASTICSEARCH_HOST, index="unittest") settings.configure(backends=[self.backend]) self._clear()
def test_pipeline_present(self, mocked_index): backend = ElasticsearchBackend(ELASTICSEARCH_HOST, index="pipelinetest", pipeline="custom-pipeline") with settings(backends=[backend]): go() assert "pipeline" in mocked_index.call_args.kwargs assert mocked_index.call_args.kwargs[ "pipeline"] == "custom-pipeline" ElasticTestMixin._clear(backend)
def configure(env): APPMETRICS_ELASTICSEARCH_KWARGS = dict( port=443, http_auth=(os.environ['APPMETRICS_ELASTICSEARCH_USERNAME'], os.environ['APPMETRICS_ELASTICSEARCH_PASSWORD']), use_ssl=True, verify_certs=False, ) APPMETRICS_ELASTICSEARCH_HOSTS = [ dict(host='inspire-{}-logs-client1.cern.ch'.format(env), **APPMETRICS_ELASTICSEARCH_KWARGS), dict(host='inspire-{}-logs-client2.cern.ch'.format(env), **APPMETRICS_ELASTICSEARCH_KWARGS), ] INDEX_NAME = 'inspiremonitoring-{}'.format(env) backend = ElasticsearchBackend(hosts=APPMETRICS_ELASTICSEARCH_HOSTS, index=INDEX_NAME) time_execution.settings.configure( backends=[backend], # hooks=(status_code_hook,), origin='inspire_next')
class TestTimeExecution(TestBaseBackend): def setUp(self): super(TestTimeExecution, self).setUp() self.backend = ElasticsearchBackend( 'elasticsearch', index='unittest', ) self._clear() configure(backends=[self.backend]) def tearDown(self): self._clear() def _clear(self): self.backend.client.indices.delete(self.backend.index, ignore=404) self.backend.client.indices.delete("{}*".format(self.backend.index), ignore=404) def _query_backend(self, name): self.backend.client.indices.refresh(self.backend.get_index()) metrics = self.backend.client.search(index=self.backend.get_index(), body={ "query": { "term": { "name": name } }, }) return metrics def test_time_execution(self): count = 4 for i in range(count): go() metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], count) for metric in metrics['hits']['hits']: self.assertTrue('value' in metric['_source']) def test_duration_field(self): configure(backends=[self.backend], duration_field='my_duration') go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertTrue('my_duration' in metric['_source']) def test_with_arguments(self): go('hello', world='world') Dummy().go('hello', world='world') metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], 1) metrics = self._query_backend(Dummy().go.fqn) self.assertEqual(metrics['hits']['total'], 1) def test_hook(self): def test_args(**kwargs): self.assertIn('response', kwargs) self.assertIn('exception', kwargs) self.assertIn('metric', kwargs) return dict() def test_metadata(*args, **kwargs): return dict(test_key='test value') configure(backends=[self.backend], hooks=[test_args, test_metadata]) go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['test_key'], 'test value')
class TestTimeExecution(TestBaseBackend): def setUp(self): super(TestTimeExecution, self).setUp() self.backend = ElasticsearchBackend( 'elasticsearch', index='unittest', ) settings.configure(backends=[self.backend]) self._clear() def tearDown(self): self._clear() def _clear(self): ElasticTestMixin._clear(self.backend) def _query_backend(self, name): return ElasticTestMixin._query_backend(self.backend, name) def test_time_execution(self): count = 4 for i in range(count): go() metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], count) for metric in metrics['hits']['hits']: self.assertTrue('value' in metric['_source']) self.assertFalse('origin' in metric['_source']) def test_duration_field(self): with settings(duration_field='my_duration'): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertTrue('my_duration' in metric['_source']) def test_with_arguments(self): go('hello', world='world') Dummy().go('hello', world='world') metrics = self._query_backend(get_fqn(go)) self.assertEqual(metrics['hits']['total'], 1) metrics = self._query_backend(get_fqn(Dummy().go)) self.assertEqual(metrics['hits']['total'], 1) def test_hook(self): def test_args(**kwargs): self.assertIn('response', kwargs) self.assertIn('exception', kwargs) self.assertIn('metric', kwargs) return dict() def test_metadata(*args, **kwargs): return dict(test_key='test value') with settings(hooks=[test_args, test_metadata]): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['test_key'], 'test value') @mock.patch('time_execution.backends.elasticsearch.logger') def test_error_warning(self, mocked_logger): transport_error = TransportError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.elasticsearch.Elasticsearch.index', side_effect=transport_error) frozen_time_ctx = freeze_time('2016-07-13') with es_index_error_ctx, frozen_time_ctx: self.backend.write(name='test:metric', value=None) mocked_logger.warning.assert_called_once_with( 'writing metric %r failure %r', { 'timestamp': datetime(2016, 7, 13), 'value': None, 'name': 'test:metric' }, transport_error) def test_with_origin(self): with settings(origin='unit_test'): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['origin'], 'unit_test') def test_bulk_write(self): metrics = [{ 'name': 'metric.name', 'value': 1, 'timestamp': 1, }, { 'name': 'metric.name', 'value': 2, 'timestamp': 2, }, { 'name': 'metric.name', 'value': 3, 'timestamp': 3, }] self.backend.bulk_write(metrics) query_result = self._query_backend('metric.name') self.assertEqual(len(metrics), query_result['hits']['total']) @mock.patch('time_execution.backends.elasticsearch.logger') def test_bulk_write_error(self, mocked_logger): transport_error = TransportError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.elasticsearch.Elasticsearch.bulk', side_effect=transport_error) metrics = [1, 2, 3] with es_index_error_ctx: self.backend.bulk_write(metrics) mocked_logger.warning.assert_called_once_with( 'bulk_write metrics %r failure %r', metrics, transport_error)
class TestTimeExecution(TestBaseBackend): def setUp(self): super(TestTimeExecution, self).setUp() self.backend = ElasticsearchBackend( 'elasticsearch', index='unittest', ) self._clear() configure(backends=[self.backend]) def tearDown(self): self._clear() def _clear(self): self.backend.client.indices.delete(self.backend.index, ignore=404) self.backend.client.indices.delete("{}*".format(self.backend.index), ignore=404) def _query_backend(self, name): self.backend.client.indices.refresh(self.backend.get_index()) metrics = self.backend.client.search( index=self.backend.get_index(), body={ "query": { "term": {"name": name} }, } ) return metrics def test_time_execution(self): count = 4 for i in range(count): go() metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], count) for metric in metrics['hits']['hits']: self.assertTrue('value' in metric['_source']) def test_duration_field(self): configure(backends=[self.backend], duration_field='my_duration') go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertTrue('my_duration' in metric['_source']) def test_with_arguments(self): go('hello', world='world') Dummy().go('hello', world='world') metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], 1) metrics = self._query_backend(Dummy().go.fqn) self.assertEqual(metrics['hits']['total'], 1) def test_hook(self): def test_args(**kwargs): self.assertIn('response', kwargs) self.assertIn('exception', kwargs) self.assertIn('metric', kwargs) return dict() def test_metadata(*args, **kwargs): return dict(test_key='test value') configure(backends=[self.backend], hooks=[test_args, test_metadata]) go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['test_key'], 'test value')
class TestTimeExecution(TestBaseBackend): def setUp(self): super(TestTimeExecution, self).setUp() self.backend = ElasticsearchBackend( 'elasticsearch', index='unittest', ) settings.configure(backends=[self.backend]) self._clear() def tearDown(self): self._clear() def _clear(self): ElasticTestMixin._clear(self.backend) def _query_backend(self, name): return ElasticTestMixin._query_backend(self.backend, name) def test_time_execution(self): count = 4 for i in range(count): go() metrics = self._query_backend(go.fqn) self.assertEqual(metrics['hits']['total'], count) for metric in metrics['hits']['hits']: self.assertTrue('value' in metric['_source']) self.assertFalse('origin' in metric['_source']) def test_duration_field(self): with settings(duration_field='my_duration'): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertTrue('my_duration' in metric['_source']) def test_with_arguments(self): go('hello', world='world') Dummy().go('hello', world='world') metrics = self._query_backend(get_fqn(go)) self.assertEqual(metrics['hits']['total'], 1) metrics = self._query_backend(get_fqn(Dummy().go)) self.assertEqual(metrics['hits']['total'], 1) def test_hook(self): def test_args(**kwargs): self.assertIn('response', kwargs) self.assertIn('exception', kwargs) self.assertIn('metric', kwargs) return dict() def test_metadata(*args, **kwargs): return dict(test_key='test value') with settings(hooks=[test_args, test_metadata]): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['test_key'], 'test value') @mock.patch('time_execution.backends.elasticsearch.logger') def test_error_warning(self, mocked_logger): transport_error = TransportError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.elasticsearch.Elasticsearch.index', side_effect=transport_error ) frozen_time_ctx = freeze_time('2016-07-13') with es_index_error_ctx, frozen_time_ctx: self.backend.write(name='test:metric', value=None) mocked_logger.warning.assert_called_once_with( 'writing metric %r failure %r', { 'timestamp': datetime(2016, 7, 13), 'value': None, 'name': 'test:metric' }, transport_error ) def test_with_origin(self): with settings(origin='unit_test'): go() for metric in self._query_backend(go.fqn)['hits']['hits']: self.assertEqual(metric['_source']['origin'], 'unit_test') def test_bulk_write(self): metrics = [ { 'name': 'metric.name', 'value': 1, 'timestamp': 1, }, { 'name': 'metric.name', 'value': 2, 'timestamp': 2, }, { 'name': 'metric.name', 'value': 3, 'timestamp': 3, } ] self.backend.bulk_write(metrics) query_result = self._query_backend('metric.name') self.assertEqual( len(metrics), query_result['hits']['total'] ) @mock.patch('time_execution.backends.elasticsearch.logger') def test_bulk_write_error(self, mocked_logger): transport_error = TransportError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.elasticsearch.Elasticsearch.bulk', side_effect=transport_error ) metrics = [1, 2, 3] with es_index_error_ctx: self.backend.bulk_write(metrics) mocked_logger.warning.assert_called_once_with( 'bulk_write metrics %r failure %r', metrics, transport_error)
def test_do_not_create_index(self, setup_index, setup_mapping): ElasticsearchBackend(ELASTICSEARCH_HOST, index="unittest", create_index=False) setup_index.assert_not_called() setup_mapping.assert_not_called()