def build_alarm(self, name): """Build alarm json.""" alarm = {} id = str(uuid.uuid4()) alarm["id"] = id alarm["alarm_definition"] = self.alarm_definition alarm["metrics"] = self.related_metrics[name] alarm["state"] = self.expr_data_queue[name]["state"] alarm["reason"] = reasons[alarm["state"]] alarm["reason_data"] = {} sub_alarms = [] dt = self.expr_data_queue[name]["data"] for expr in self.sub_expr_list: sub_alarms.append( { "sub_alarm_expression": self.sub_alarm_expr[expr.fmtd_sub_expr_str], "sub_alarm_state": dt[expr.fmtd_sub_expr_str]["state"], "current_values": dt[expr.fmtd_sub_expr_str]["values"], } ) alarm["sub_alarms"] = sub_alarms ct = self.expr_data_queue[name]["create_timestamp"] st = self.expr_data_queue[name]["state_update_timestamp"] t = self.expr_data_queue[name]["update_timestamp"] alarm["state_updated_timestamp"] = tu.iso8601_from_timestamp(st) alarm["updated_timestamp"] = tu.iso8601_from_timestamp(t) alarm["created_timestamp"] = tu.iso8601_from_timestamp(ct) return json.dumps(alarm)
def test_get_meter_byname(self): res = mock.Mock() req = mock.Mock() def _side_effect(arg): if arg == 'name': return 'tongli' elif arg == 'dimensions': return 'key1:100, key2:200' req.get_param.side_effect = _side_effect req_result = mock.Mock() req_result.json.return_value = json.loads(self.response_str) req_result.status_code = 200 with mock.patch.object(requests, 'post', return_value=req_result): self.dispatcher.get_meter_byname(req, res, "BABMGD") # test that the response code is 200 self.assertEqual(res.status, getattr(falcon, 'HTTP_200')) obj = json.loads(res.body) self.assertEqual(obj[0]['counter_name'], 'BABMGD') self.assertEqual(obj[0]['counter_type'], 'metrics') self.assertEqual(obj[0]['user_id'], 'efd87807-12d2-4b38-9c70-5f5c2ac427ff') self.assertEqual(obj[0]['project_id'], '35b17138-b364-4e6a-a131-8f3099c5be68') self.assertEqual(obj[0]['counter_volume'], 4) self.assertEqual(obj[0]['timestamp'], tu.iso8601_from_timestamp(1461337094000)) self.assertEqual(len(obj), 1)
def test_do_get_statistics(self): res = mock.Mock() req = mock.Mock() def _side_effect(arg): if arg == 'name': return 'tongli' elif arg == 'dimensions': return 'key1:100, key2:200' elif arg == 'start_time': return '2014-01-01' elif arg == 'end_time': return None elif arg == 'period': return None elif arg == 'statistics': return 'avg, sum, max' req.get_param.side_effect = _side_effect req_result = mock.Mock() response_str = """ {"took":2006,"timed_out":false,"_shards":{"total":5,"successful":5, "failed":0},"hits":{"total":600,"max_score":0.0,"hits":[]}, "aggregations":{"by_name":{"doc_count_error_upper_bound":0, "sum_other_doc_count":0,"buckets":[{"key":"BABMGD","doc_count":300, "by_dim":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0, "buckets":[{"key":"64e6ce08b3b8547b7c32e5cfa5b7d81f","doc_count":300, "periods":{"buckets":[{"key":1421700000,"doc_count":130, "statistics":{"count":130,"min":0.0,"max":595.0274095324651, "avg":91.83085293930924,"sum":11938.0108821102}}, {"key":1422000000,"doc_count":170,"statistics":{"count":170, "min":0.0,"max":1623.511307756313,"avg":324.69434786459897, "sum":55198.039136981824}}]},"dimension":{"hits":{"total":300, "max_score":1.4142135,"hits":[{"_index":"data_20150121", "_type":"metrics","_id":"AUsSNF5mTZaMxA7_wmFx","_score":1.4142135, "_source":{"name":"BABMGD","dimensions":{"key2":"NVITDU", "key1":"FUFMPY","key_43":"ROQBZM"}}}]}}}]}}]}}} """ req_result.json.return_value = json.loads(response_str) req_result.status_code = 200 with mock.patch.object(requests, 'post', return_value=req_result): self.dispatcher.get_meter_statistics(req, res, 'BABMGD') # test that the response code is 200 self.assertEqual(res.status, getattr(falcon, 'HTTP_200')) print(res.body) obj = json.loads(res.body) # there should be total of 2 objects self.assertEqual(len(obj), 2) self.assertEqual(obj[0]['avg'], 91.8308529393) self.assertEqual(obj[1]['max'], 1623.51130776) self.assertEqual(obj[1]['period'], 300) self.assertEqual(obj[0]['duration_start'], tu.iso8601_from_timestamp(1421700000))
def _render_metric(dim): source = dim['dimension']['hits']['hits'][0]['_source'] yield '{"name":"' + source['name'] + '","dimensions":' yield json.dumps(source['dimensions']) yield ',"columns":["id","timestamp","value"],"measurements":[' is_first = True for measure in dim['measures']['hits']['hits']: ss = measure['_source'] m = ('["' + measure['_id'] + '","' + tu.iso8601_from_timestamp(ss['timestamp']) + '",' + str(ss['value']) + ']') if is_first: yield m is_first = False else: yield ',' + m yield ']}'
def _render_stats(dim): source = dim['dimension']['hits']['hits'][0]['_source'] yield '{"name":"' + source['name'] + '","dimensions":' yield json.dumps(source['dimensions']) yield ',"columns":' + col_json + ',"statistics":[' is_first = True for item in dim['periods']['buckets']: m = ('["' + tu.iso8601_from_timestamp(item['key']) + '"') for s in stats: m += ',' + str(item['statistics'][s]) m += ']' if is_first: yield m is_first = False else: yield ',' + m yield ']}'
def _render_hits(item): _type = item['meters']['hits']['hits'][0]['_type'] _source = item['meters']['hits']['hits'][0]['_source'] rslt = ('{"counter_name":' + json.dumps(_source['name']) + ',' '"counter_type":' + json.dumps(_type) + ',' '"counter_unit":null,' '"counter_volume":' + json.dumps(_source['value']) + ',' '"message_id":null,' '"project_id":' + json.dumps(_source['project_id']) + ',' '"recorded_at":null,' '"resource_id":' + json.dumps(_source['tenant_id']) + ',' '"resource_metadata":null,' '"source":' + json.dumps(_source['user_agent']) + ',' '"timestamp":"' + tu.iso8601_from_timestamp(_source['timestamp']) + '",' '"user_id":' + json.dumps(_source['user_id']) + '}') if flag['is_first']: flag['is_first'] = False return rslt else: return ',' + rslt
def _render_stats(dim): is_first = True oldest_time = [] previous_time = [] for item in dim['periods']['buckets']: current_time = item['key'] # calculte period and duration difference if is_first: period_diff = 'null' oldest_time = current_time duration_diff = 'null' previous_time = current_time else: period_diff = (current_time - previous_time) / 1000 duration_diff = (current_time - oldest_time) / 1000 # parses the statistics data _max = 'null' _min = 'null' _sum = 'null' _avg = 'null' _count = 'null' if 'statistics' in item: _max = str(item['statistics']['max']) _min = str(item['statistics']['min']) _sum = str(item['statistics']['sum']) _avg = str(item['statistics']['avg']) _count = str(item['statistics']['count']) else: if 'average' in item: _avg = str(item['average']['value']) if 'maximum' in item: _max = str(item['maximum']['value']) if 'minimum' in item: _min = str(item['minimum']['value']) if 'count' in item: _count = str(item['count']['value']) if 'sum' in item: _sum = str(item['sum']['value']) curr_timestamp = tu.iso8601_from_timestamp(current_time) prev_timestamp = tu.iso8601_from_timestamp(previous_time) old_timestamp = tu.iso8601_from_timestamp(oldest_time) rslt = ('{"avg":' + _avg + ',' '"count":' + _count + ',' '"duration":' + str(duration_diff) + ',' '"duration_end":' + '"%s"' % str(curr_timestamp) + ',' '"duration_start":' + '"%s"' % str(old_timestamp) + ',' '"max":' + _max + ',' '"min":' + _min + ',' '"period":' + str(period_diff) + ',' '"period_end":' + '"%s"' % str(curr_timestamp) + ',' '"period_start":' + '"%s"' % str(prev_timestamp) + ',' '"sum":' + _sum + ',' '"unit":null}') previous_time = current_time if is_first: yield rslt is_first = False else: yield ',' + rslt