class ElasticSearchTest(TestCase): def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", AppFilter(app="app_name").query()) self.index = ".measure-tsuru-{}*".format(datetime.datetime.utcnow().strftime("%Y.%m.%d")) @patch("requests.post") def test_cpu_max(self, post_mock): self.es.process = Mock() self.es.cpu_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_mem_max(self, post_mock): self.es.process = Mock() self.es.mem_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "mem_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_swap(self, post_mock): self.es.process = Mock() self.es.swap() url = "{}/{}/{}/_search".format(self.es.url, self.index, "swap") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_units(self, post_mock): self.es.units() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") aggregation = {"units": {"cardinality": {"field": "host.raw"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_requests_min(self, post_mock): self.es.requests_min() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"sum": {"sum": {"field": "count"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_response_time(self, post_mock): self.es.response_time() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = { "stats": {"stats": {"field": "value"}}, "percentiles": {"percentiles": {"field": "value"}} } post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_http_methods(self, post_mock): self.es.http_methods() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"method": {"terms": {"field": "method"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_status_code(self, post_mock): self.es.status_code() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"status_code": {"terms": {"field": "status_code"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_top_slow(self, post_mock): self.es.top_slow() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = { "top": { "terms": { "script": "doc['method'].value +'U'+doc['path.raw'].value +'U'+doc['status_code'].value" }, "aggs": {"max": {"max": {"field": "value"}}} } } post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_connections(self, post_mock): self.es.connections() url = "{}/{}/{}/_search".format(self.es.url, self.index, "connection") aggregation = {"connection": {"terms": {"field": "connection.raw"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_netrx(self, post_mock): self.es.netrx() url = "{}/{}/{}/_search".format(self.es.url, self.index, "netrx") post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=NET_AGGREGATION))) @patch("requests.post") def test_nettx(self, post_mock): self.es.nettx() url = "{}/{}/{}/_search".format(self.es.url, self.index, "nettx") post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=NET_AGGREGATION))) def test_process(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } } ] } } } expected = { "data": { "max": [[1437507300000, 97517568], [1437507360000, 97517568]], "min": [[1437507300000, 97517568], [1437507360000, 97517568]], "avg": [[1437507300000, 97517568], [1437507360000, 97517568]], }, "min": 97517568, "max": 97517569 } d = self.es.process(data) self.assertDictEqual(d, expected) def test_process_custom_formatter(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } } ] } } } expected = { "data": { "max": [[1437507300000, 93], [1437507360000, 93]], "min": [[1437507300000, 93], [1437507360000, 93]], "avg": [[1437507300000, 93], [1437507360000, 93]], }, "min": 93, "max": 94 } d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_unit_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.units_process) self.assertDictEqual(d, expected) def test_request_min_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.requests_min_process) self.assertDictEqual(d, expected) def test_connections_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.connections_process) self.assertDictEqual(d, expected)
def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", AppFilter(app="app_name").query()) self.index = ".measure-tsuru-{}*".format(datetime.datetime.utcnow().strftime("%Y.%m.%d"))
def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app") self.index = ".measure-tsuru-{}".format(datetime.datetime.utcnow().strftime("%Y.%m.%d"))
class ElasticSearchTest(TestCase): def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app") self.index = ".measure-tsuru-{}".format(datetime.datetime.utcnow().strftime("%Y.%m.%d")) @patch("requests.post") def test_cpu_max(self, post_mock): self.es.process = Mock() self.es.cpu_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_mem_max(self, post_mock): self.es.process = Mock() self.es.mem_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "mem_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_swap(self, post_mock): self.es.process = Mock() self.es.swap() url = "{}/{}/{}/_search".format(self.es.url, self.index, "swap") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_units(self, post_mock): self.es.units() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") aggregation = {"units": {"cardinality": {"field": "host"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_requests_min(self, post_mock): self.es.requests_min() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"sum": {"sum": {"field": "count"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_response_time(self, post_mock): self.es.response_time() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_connections(self, post_mock): self.es.connections() url = "{}/{}/{}/_search".format(self.es.url, self.index, "connection") aggregation = {"connection": {"terms": {"field": "connection.raw"}}} post_mock.assert_called_with(url, data=json.dumps(self.es.query(aggregation=aggregation))) def test_process(self): data = { "took": 86, "timed_out": False, "_shards": {"total": 266, "successful": 266, "failed": 0}, "hits": {"total": 644073, "max_score": 0, "hits": []}, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, ] }, } ] } }, } expected = { "data": { "max": [[1437507300000, "97517568.00"], [1437507360000, "97517568.00"]], "min": [[1437507300000, "97517568.00"], [1437507360000, "97517568.00"]], "avg": [[1437507300000, "97517568.00"], [1437507360000, "97517568.00"]], }, "min": "97517568.00", "max": "97517569.00", } d = self.es.process(data) self.assertDictEqual(d, expected) def test_process_custom_formatter(self): data = { "took": 86, "timed_out": False, "_shards": {"total": 266, "successful": 266, "failed": 0}, "hits": {"total": 644073, "max_score": 0, "hits": []}, "aggregations": { "range": { "buckets": [ { "key": "2015-07-21T19:35:00.000Z-2015-07-21T19:37:05.388Z", "from": 1437507300000, "from_as_string": "2015-07-21T19:35:00.000Z", "to": 1437507425388, "to_as_string": "2015-07-21T19:37:05.388Z", "doc_count": 18, "date": { "buckets": [ { "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "min": {"value": 97517568}, "max": {"value": 97517568}, "avg": {"value": 97517568}, }, ] }, } ] } }, } expected = { "data": { "max": [[1437507300000, "93.00"], [1437507360000, "93.00"]], "min": [[1437507300000, "93.00"], [1437507360000, "93.00"]], "avg": [[1437507300000, "93.00"], [1437507360000, "93.00"]], }, "min": "93.00", "max": "94.00", } d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": {"total": 0, "successful": 0, "failed": 0}, "hits": {"total": 0, "max_score": 0, "hits": []}, } expected = {"data": {}, "max": "1.00", "min": "0.00"} d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_unit_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": {"total": 0, "successful": 0, "failed": 0}, "hits": {"total": 0, "max_score": 0, "hits": []}, } expected = {"data": {}, "max": "0.00", "min": "0.00"} d = self.es.units_process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_request_min_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": {"total": 0, "successful": 0, "failed": 0}, "hits": {"total": 0, "max_score": 0, "hits": []}, } expected = {"data": {}, "max": 0, "min": None} d = self.es.requests_min_process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_connections_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": {"total": 0, "successful": 0, "failed": 0}, "hits": {"total": 0, "max_score": 0, "hits": []}, } expected = {"data": {}, "max": 0, "min": 0} d = self.es.connections_process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected)
def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", AppFilter(app="app_name").query()) self.index = ".measure-tsuru-{}*".format( datetime.datetime.utcnow().strftime("%Y.%m.%d"))
class ElasticSearchTest(TestCase): def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", AppFilter(app="app_name").query()) self.index = ".measure-tsuru-{}*".format( datetime.datetime.utcnow().strftime("%Y.%m.%d")) @patch("requests.post") def test_cpu_max(self, post_mock): self.es.process = Mock() self.es.cpu_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_mem_max(self, post_mock): self.es.process = Mock() self.es.mem_max() url = "{}/{}/{}/_search".format(self.es.url, self.index, "mem_max") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_swap(self, post_mock): self.es.process = Mock() self.es.swap() url = "{}/{}/{}/_search".format(self.es.url, self.index, "swap") post_mock.assert_called_with(url, data=json.dumps(self.es.query())) @patch("requests.post") def test_units(self, post_mock): self.es.units() url = "{}/{}/{}/_search".format(self.es.url, self.index, "cpu_max") aggregation = {"units": {"cardinality": {"field": "host.raw"}}} post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_requests_min(self, post_mock): self.es.requests_min() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"sum": {"sum": {"field": "count"}}} post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_response_time(self, post_mock): self.es.response_time() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = { "stats": { "stats": { "field": "value" } }, "percentiles": { "percentiles": { "field": "value" } } } post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_http_methods(self, post_mock): self.es.http_methods() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"method": {"terms": {"field": "method"}}} post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_status_code(self, post_mock): self.es.status_code() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") aggregation = {"status_code": {"terms": {"field": "status_code"}}} post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_top_slow(self, post_mock): self.es.top_slow() url = "{}/{}/{}/_search".format(self.es.url, self.index, "response_time") query = { "query": self.es.filtered_query, "size": 0, "aggs": { "top": { "terms": { "script": "doc['method'].value +'|-o-|'+doc['path.raw'].value +'|-o-|'+doc['status_code'].value", "order": { "stats.max": "desc" } }, "aggs": { "stats": { "stats": { "field": "value" } }, "percentiles": { "percentiles": { "field": "value" } }, "max": { "top_hits": { "sort": [{ "value": { "order": "desc" } }], "size": 1 } } } } } } post_mock.assert_called_with(url, data=json.dumps(query)) @patch("requests.post") def test_connections(self, post_mock): self.es.connections() url = "{}/{}/{}/_search".format(self.es.url, self.index, "connection") aggregation = {"connection": {"terms": {"field": "connection.raw"}}} post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=aggregation))) @patch("requests.post") def test_netrx(self, post_mock): self.es.netrx() url = "{}/{}/{}/_search".format(self.es.url, self.index, "netrx") post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=NET_AGGREGATION))) @patch("requests.post") def test_nettx(self, post_mock): self.es.nettx() url = "{}/{}/{}/_search".format(self.es.url, self.index, "nettx") post_mock.assert_called_with( url, data=json.dumps(self.es.query(aggregation=NET_AGGREGATION))) def test_process(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "date": { "buckets": [{ "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }] } } } expected = { "data": { "max": [[1437507300000, 97517568], [1437507360000, 97517568]], "min": [[1437507300000, 97517568], [1437507360000, 97517568]], "avg": [[1437507300000, 97517568], [1437507360000, 97517568]], }, "min": 97517568, "max": 97517569 } d = self.es.process(data) self.assertDictEqual(d, expected) def test_process_custom_formatter(self): data = { "took": 86, "timed_out": False, "_shards": { "total": 266, "successful": 266, "failed": 0 }, "hits": { "total": 644073, "max_score": 0, "hits": [] }, "aggregations": { "date": { "buckets": [{ "key_as_string": "2015-07-21T19:35:00.000Z", "key": 1437507300000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }, { "key_as_string": "2015-07-21T19:36:00.000Z", "key": 1437507360000, "doc_count": 9, "stats": { "min": 97517568, "max": 97517568, "avg": 97517568 } }] } } } expected = { "data": { "max": [[1437507300000, 93], [1437507360000, 93]], "min": [[1437507300000, 93], [1437507360000, 93]], "avg": [[1437507300000, 93], [1437507360000, 93]], }, "min": 93, "max": 94 } d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.process(data, formatter=lambda x: x / (1024 * 1024)) self.assertDictEqual(d, expected) def test_unit_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.units_process) self.assertDictEqual(d, expected) def test_request_min_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.requests_min_process) self.assertDictEqual(d, expected) def test_connections_process_no_aggregation(self): data = { "took": 1, "timed_out": False, "_shards": { "total": 0, "successful": 0, "failed": 0 }, "hits": { "total": 0, "max_score": 0, "hits": [] } } expected = {'data': {}, 'max': 1, 'min': 0} d = self.es.base_process(data, self.es.connections_process) self.assertDictEqual(d, expected)
def setUp(self): self.maxDiff = None self.es = ElasticSearch("http://url.com", "index", "app")