def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app")
class ElasticSearchTest(TestCase): def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app") @mock.patch("requests.post") def test_cpu_max(self, post_mock): self.es.process = mock.Mock() self.es.cpu_max() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "cpu_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @mock.patch("requests.post") def test_mem_max(self, post_mock): self.es.process = mock.Mock() self.es.mem_max() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "mem_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @mock.patch("requests.post") def test_units(self, post_mock): self.es.units() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "cpu_max") aggregation = {"units": {"cardinality": {"field": "host"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @mock.patch("requests.post") def test_requests_min(self, post_mock): self.es.requests_min() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "response_time") aggregation = {"sum": {"sum": {"field": "count"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @mock.patch("requests.post") def test_response_time(self, post_mock): self.es.response_time() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "response_time") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @mock.patch("requests.post") def test_connections(self, post_mock): data = { "took": 132, "timed_out": False, "_shards": {"total": 380, "successful": 380, "failed": 0}, "hits": {"total": 478998, "max_score": 0, "hits": []}, "aggregations": { "range": { "buckets": [ { "key": "2015-09-16T21:42:00.000Z-2015-09-16T21:47:05.700Z", "from": 1442439720000, "from_as_string": "2015-09-16T21:42:00.000Z", "to": 1442440025700, "to_as_string": "2015-09-16T21:47:05.700Z", "doc_count": 1, "date": { "buckets": [ { "key_as_string": "2015-09-16T21:40:00.000Z", "key": 1442439600000, "doc_count": 1, "connection": { "doc_count_error_upper_bound": 0, "sum_other_doc_count": 0, "buckets": [ {"key": "tsuru.company.com:80", "doc_count": 50}, {"key": "remote.something.com:8080", "doc_count": 13}, ], }, } ] }, } ] } }, } response = mock.Mock() response.json.return_value = data post_mock.return_value = response result = self.es.connections() expected = { "data": [ {"x": 1442439600000, "tsuru.company.com:80": 50, "remote.something.com:8080": 13}, {"x": 1442439600000, "tsuru.company.com:80": 50, "remote.something.com:8080": 13}, ], "min": 13, "max": 50, } self.assertEqual(expected, result) url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "connection") legacy_aggregation = {"connection": {"terms": {"field": "connection.raw"}}} aggregation = {"connection": {"terms": {"field": "value.raw"}}} expected_calls = [ mock.call(url, data=json.dumps(self.es.query(aggregation=legacy_aggregation))), mock.call(url, data=json.dumps(self.es.query(aggregation=aggregation))), ] self.assertEqual(expected_calls, post_mock.call_args_list) def test_process(self): data = { "took": 86, "timed_out": False, "_shards": {"total": 266, "successful": 266, "failed": 0}, "hits": {"total": 644073, "max_score": 0, "hits": []}, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, ] }, } ] } }, } expected = { "data": [ {"x": 1437507300000, "max": "97517568.00", "min": "97517568.00", "avg": "97517568.00"}, {"x": 1437507360000, "max": "97517568.00", "min": "97517568.00", "avg": "97517568.00"}, ], "min": "97517568.00", "max": "97517568.00", } d = self.es.process(data) self.assertDictEqual(d, expected) def test_process_custom_formatter(self): data = { "took": 86, "timed_out": False, "_shards": {"total": 266, "successful": 266, "failed": 0}, "hits": {"total": 644073, "max_score": 0, "hits": []}, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, ] }, } ] } }, } expected = { "data": [ {"x": 1437507300000, "max": "93.00", "min": "93.00", "avg": "93.00"}, {"x": 1437507360000, "max": "93.00", "min": "93.00", "avg": "93.00"}, ], "min": "93.00", "max": "93.00", } d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected)
class ElasticSearchTest(TestCase): def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app") @patch("requests.post") def test_cpu_max(self, post_mock): self.es.process = Mock() self.es.cpu_max() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "cpu_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_mem_max(self, post_mock): self.es.process = Mock() self.es.mem_max() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "mem_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_units(self, post_mock): self.es.units() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "cpu_max") aggregation = {"units": {"cardinality": {"field": "host"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_requests_min(self, post_mock): self.es.requests_min() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "response_time") aggregation = {"sum": {"sum": {"field": "count"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_response_time(self, post_mock): self.es.response_time() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "response_time") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_connections(self, post_mock): self.es.connections() url = "{}/.measure-tsuru-*/{}/_search".format(self.es.url, "connection") aggregation = {"connection": {"terms": {"field": "connection.raw"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) def test_process(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": { "value": 97517568 }, "max": { "value": 97517568 }, "avg": { "value": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": { "value": 97517568 }, "max": { "value": 97517568 }, "avg": { "value": 97517568 } } ] } } ] } } } expected = { "data": { "max": [[1437507300000, '97517568.00'], [1437507360000, '97517568.00']], "min": [[1437507300000, '97517568.00'], [1437507360000, '97517568.00']], "avg": [[1437507300000, '97517568.00'], [1437507360000, '97517568.00']], }, "min": '97517568.00', "max": '97517569.00' } d = self.es.process(data) self.assertDictEqual(d, expected) def test_process_custom_formatter(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": { "value": 97517568 }, "max": { "value": 97517568 }, "avg": { "value": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": { "value": 97517568 }, "max": { "value": 97517568 }, "avg": { "value": 97517568 } } ] } } ] } } } expected = { "data": { "max": [[1437507300000, '93.00'], [1437507360000, '93.00']], "min": [[1437507300000, '93.00'], [1437507360000, '93.00']], "avg": [[1437507300000, '93.00'], [1437507360000, '93.00']], }, "min": '93.00', "max": '94.00' } d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected)