class MeasuredClientMixIn(object): def initialize(self, client, address, dimensions, *args, **kwargs): super(MeasuredClientMixIn, self).initialize(*args, **kwargs) self.measure = Measure(client, address) self.dimensions = dimensions def fetch_impl(self, request, callback): now = time.time() def new_callback(response): response_time = time.time() - now self.write_metric(request, response, response_time) return callback(response) return super(MeasuredClientMixIn, self).fetch_impl(request, new_callback) def write_metric(self, request, response, response_time): dimensions = { 'url': request.url, 'host': urlparse(request.url).netloc, 'status_code': response.code, 'response_time': response_time } if self.dimensions: dimensions.update(self.dimensions) self.measure.count('http_response', dimensions=dimensions)
class LogstashBackend(object): def __init__(self): envs = os.environ self.app_name = envs.get("TSURU_APPNAME") self.host_name = socket.gethostname() # initialize measure host = envs.get("LOGSTASH_HOST") or 'localhost' port = envs.get("LOGSTASH_PORT") or '1984' from measures import Measure self.client = Measure(envs.get("LOGSTASH_CLIENT", "tsuru"), (host, int(port))) def stop(self): pass def gauge(self, key, value): dimensions = {"app": self.app_name, "host": self.host_name, "value": float(value)} self.client.count(key, dimensions=dimensions) def disk_usage(self, value): self.gauge("disk_usage", value) def net_sent(self, value): self.gauge("net_sent", value) def net_recv(self, value): self.gauge("net_recv", value) def net_connections(self, value): self.gauge("net_connections", value) def connections(self, connection_list): for conn in connection_list: dimensions = {"app": self.app_name, "host": self.host_name, "connection": conn} self.client.count("connection", dimensions=dimensions) def cpu_max(self, name, value): self.gauge("cpu_max", value) def cpu_sum(self, name, value): self.gauge("cpu_sum", value) def mem_max(self, name, value): self.gauge("mem_max", value) def mem_sum(self, name, value): self.gauge("mem_sum", value) def mem_pct_max(self, name, value): self.gauge("mem_pct_max", value) def mem_pct_sum(self, name, value): self.gauge("mem_pct_sum", value)
class ELKClient: def __init__(self): es_url = app.config['ELASTICSEARCH_URL'] es_port = app.config['ELASTICSEARCH_PORT'] logstash_host = app.config['LOGSTASH_HOST'] logstash_port = int(app.config['LOGSTASH_PORT']) self.measure = Measure(app.config['ELASTICSEARCH_CLIENT'], (logstash_host, logstash_port)) self.es = connections.create_connection(hosts=[es_url + ':' + es_port]) def create_usage_record(self, data): self.measure.count(app.config['ELASTICSEARCH_TYPE'], dimensions=data) def find_usage_records(self, region, account, start, end): s = Search(using=self.es, index=app.config['ELASTICSEARCH_INDEX'], doc_type=app.config['ELASTICSEARCH_TYPE']) s = s.filter('term', region=region) if account is not None: s = s.filter('term', account=account) s = s.filter('range', date={ 'gte': parse(start).date().isoformat(), 'lte': parse(end).date().isoformat() })[0:0] s.aggs.bucket('by_project', 'terms', field='projectid.raw', size=0) \ .bucket('by_type', 'terms', field='usagetype.raw') \ .bucket('by_offering', 'terms', field='offering_struct.raw') \ .metric('rawusage_sum', 'sum', field='rawusage') return s.execute().aggregations.to_dict() def delete_usage_records(self, region, date): index = app.config['ELASTICSEARCH_INDEX'] doc_type = app.config['ELASTICSEARCH_TYPE'] s = Search(using=self.es, index=app.config['ELASTICSEARCH_INDEX'], doc_type=doc_type) \ .filter('term', region=region) \ .filter('term', date=date) self.es.delete_by_query(index=index, doc_type=doc_type, body=s.to_dict()) def health(self): return self.es.cluster.health()
class MeasureClient: def __init__(self): es_url = app.config['ELASTICSEARCH_URL'] es_port = app.config['ELASTICSEARCH_PORT'] logstash_host = app.config['LOGSTASH_HOST'] logstash_port = int(app.config['LOGSTASH_PORT']) self.measure = Measure(app.config['ELASTICSEARCH_CLIENT'], (logstash_host, logstash_port)) self.es = connections.create_connection(hosts=[es_url + ':' + es_port]) def create(self, data): self.measure.count(app.config['ELASTICSEARCH_TYPE'], dimensions=data) def find(self, region, account, start, end): s = Search(using=self.es, index=app.config['ELASTICSEARCH_INDEX'], doc_type=app.config['ELASTICSEARCH_TYPE']) s = s.filter('term', region=region) if account is not None: s = s.filter('term', account=account) s = s.filter('range', date={ 'gte': parse(start).date().isoformat(), 'lte': parse(end).date().isoformat() })[0:0] s.aggs.bucket('by_project', 'terms', field='projectid.raw', size=0) \ .bucket('by_type', 'terms', field='usagetype.raw') \ .bucket('by_offering', 'terms', field='offeringid.raw') \ .metric('rawusage_sum', 'sum', field='rawusage') return s.execute().aggregations.to_dict() def delete(self, region, date): index = app.config['ELASTICSEARCH_INDEX'] doc_type = app.config['ELASTICSEARCH_TYPE'] s = Search(using=self.es, index=app.config['ELASTICSEARCH_INDEX'], doc_type=doc_type) \ .filter('term', region=region) \ .filter('term', date=date) self.es.delete_by_query(index=index, doc_type=doc_type, body=s.to_dict()) def health(self): return self.es.cluster.health()
class MeasureTestCase(UDPServerTestCase): def setUp(self): super(MeasureTestCase, self).setUp() self.measure = Measure('myclient', ('127.0.0.1', 1984)) def test_recieve_count_message_correctly_over_network(self): self.measure.count('mymetric') self.wait_for(lambda: len(self.messages)) self.assertEqual(len(self.messages), 1) message = json.loads(self.messages[0].decode('utf-8')) expected_message = { 'client': 'myclient', 'metric': 'mymetric', 'count': 1, } self.assertDictEqual(message, expected_message) def test_recieve_time_message_correctly_over_network(self): with self.measure.time('mymetric'): pass while True: self.wait_for(lambda: len(self.messages)) message = json.loads(self.messages.pop().decode('utf-8')) if 'time' in message: break self.assertIsInstance(message['time'], float) self.assertIn('client', message) self.assertEqual(message['client'], 'myclient') self.assertIn('metric', message) self.assertEqual(message['metric'], 'mymetric') self.assertGreater(message['time'], 0) self.assertIn('error_type', message) self.assertEqual(message['error_type'], '') self.assertIn('error_value', message) self.assertEqual(message['error_value'], '') def test_count_must_not_hang_if_server_is_up(self): p = Process(target=lambda: self.measure.count('mymetric')) p.start() p.join(0.3) self.assertFalse(p.is_alive()) p.terminate() def test_time_must_not_hang_if_server_is_up(self): p = Process(target=lambda: self.measure.time('mymetric')) p.start() p.join(0.3) self.assertFalse(p.is_alive()) p.terminate() def test_count_must_not_hang_if_server_is_down(self): self.__class__.stop_server() time.sleep(0.5) p = Process(target=lambda: self.measure.count('mymetric')) p.start() p.join(0.3) self.assertFalse(p.is_alive()) p.terminate() def test_time_must_not_hang_if_server_is_down(self): self.__class__.stop_server() time.sleep(0.5) p = Process(target=lambda: self.measure.time('mymetric')) p.start() p.join(0.3) self.assertFalse(p.is_alive()) p.terminate()
class LogstashBackend(object): def __init__(self): envs = os.environ self.app_name = envs.get("TSURU_APPNAME") self.host_name = socket.gethostname() # initialize measure host = envs.get("LOGSTASH_HOST") or 'localhost' port = envs.get("LOGSTASH_PORT") or '1984' from measures import Measure self.client = Measure(envs.get("LOGSTASH_CLIENT", "tsuru"), (host, int(port))) def stop(self): pass def gauge(self, key, value): dimensions = { "app": self.app_name, "host": self.host_name, "value": float(value) } self.client.count(key, dimensions=dimensions) def disk_usage(self, value): self.gauge("disk_usage", value) def net_sent(self, value): self.gauge("net_sent", value) def net_recv(self, value): self.gauge("net_recv", value) def net_connections(self, value): self.gauge("net_connections", value) def connections(self, connection_list): for conn in connection_list: dimensions = { "app": self.app_name, "host": self.host_name, "connection": conn } self.client.count("connection", dimensions=dimensions) def cpu_max(self, name, value): self.gauge("cpu_max", value) def cpu_sum(self, name, value): self.gauge("cpu_sum", value) def mem_max(self, name, value): self.gauge("mem_max", value) def mem_sum(self, name, value): self.gauge("mem_sum", value) def mem_pct_max(self, name, value): self.gauge("mem_pct_max", value) def mem_pct_sum(self, name, value): self.gauge("mem_pct_sum", value)