def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self): udp_port = randint(8125, 8999) tcp_port = randint(8125, 9999) flush_interval = 2 self.app_process = self.create_server_process(udp_port=udp_port, tcp_port=tcp_port, flush_interval=flush_interval) client = Client("localhost", udp_port) tcp_client = TCPClient("localhost", tcp_port) for _ in range(0, 2): client.increment("event") tcp_client.increment("event") client.timing("process", 8.5) client.timing("process", 9.8) tcp_client.timing("process", 8.7) tcp_client.timing("query", 2) # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreater(len(flushed_metrics), 9, 'flushed 1 counter and at least 2 x 4 timers') self.maxDiff = None self.assertDictEqual( { "event": 4, "process.count": 3, "process.max": 9.8, "process.min": 8.5, "process.mean": 9.0, "process.median": 8.7, "query.count": 1, "query.max": 2.0, "query.min": 2.0, "query.mean": 2.0, "query.median": 2.0 }, metrics_to_dict(flushed_metrics) )
def test_when_client_is_removed_the_socket_batch_client_socket_is_not_closed(self): client = Client("localhost") batch_client = client.batch_client() sock = batch_client._socket del client gc.collect() self.assertFalse(sock.closed)
def test_udp_collectors_flushing_stdout(self): udp_port = randint(8125, 8999) flush_interval = 2 self.app_process = self.create_server_process( udp_port=udp_port, flush_interval=flush_interval) client = Client("localhost", udp_port) for _ in range(0, 3): client.increment("event") client.timing("process", 101) client.timing("process", 102) client.timing("process", 103) # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreater(len(flushed_metrics), 5, 'flushed 1 counter and at least 4 timers') self.assertDictEqual( { "event": 3, "process.count": 3, "process.max": 103, "process.min": 101, "process.mean": 102, "process.median": 102 }, metrics_to_dict(flushed_metrics))
def test_context_manager_flushes_metrics(self): client = Client("localhost", prefix="_.") client._socket = self.mock_socket with client.batch_client() as batch_client: batch_client.increment("event", rate=0.5) batch_client.timing("query", 1200) batch_client.decrement("event", rate=0.2) self.assertEqual(self.mock_sendto.call_count, 0) expected_calls = [ mock.call(bytearray("_.event:1|c|@0.5\n_.query:1200|ms\n".encode()), ("127.0.0.2", 8125)), ] self.assertEqual(self.mock_sendto.mock_calls, expected_calls)
def test_context_manager_flushes_metrics_when_context_ends(self): client = Client("localhost", prefix="_.") client._socket = self.mock_socket with client.batch_client() as batch_client: batch_client.increment("event", rate=0.5) batch_client.timing("query", 1200) batch_client.decrement("event", rate=0.2) self.assertEqual(self.mock_sendto.call_count, 0) expected_calls = [ mock.call(bytearray("_.event:1|c|@0.5\n_.query:1200|ms\n".encode()), ("127.0.0.2", 8125)), ] self.assertEqual(self.mock_sendto.mock_calls, expected_calls)
def test_context_manager_flushes_metrics_when_context_raises_errors(self): client = Client("localhost", prefix="_.") client._socket = self.mock_socket with self.assertRaises(RuntimeError): with client.batch_client() as batch_client: batch_client.increment("event", rate=0.5) batch_client.timing("query", 300) self.assertEqual(self.mock_sendto.call_count, 0) raise RuntimeError('mock error') batch_client.timing("not.there", 300) expected_calls = [ mock.call(bytearray("_.event:1|c|@0.5\n_.query:300|ms\n".encode()), ("127.0.0.2", 8125)), ] self.assertEqual(self.mock_sendto.mock_calls, expected_calls)
def test_get_set_client(self): self.assertEqual(self.stopwatch.client, self.client) client = Client('127.0.0.2') stopwatch = Stopwatch(client, "new_watch") self.assertEqual(stopwatch.client, client) stopwatch.client = self.client self.assertEqual(stopwatch.client, self.client)
def test_get_set_client(self): self.assertEqual(self.chronometer.client, self.client) client = Client('127.0.0.2') chronometer = Chronometer(client) self.assertEqual(chronometer.client, client) chronometer.client = self.client self.assertEqual(chronometer.client, self.client)
def test_timing_since_with_none_float_datetimes_fails(self): client = Client("localhost") self.assertRaises(ValueError, client.timing_since, "event", "string") self.assertRaises(ValueError, client.timing_since, "event", None) self.assertRaises(ValueError, client.timing_since, "event", self) self.assertRaises(ValueError, client.timing_since, "event", client)
def test_client_creates_stopwatch(self): test_start_timestamp = time() one_minute_before_test = test_start_timestamp - 60 client = Client("localhost") client._socket = self.mock_socket stopwatch = client.stopwatch("event") self.assertIsInstance(stopwatch, Stopwatch) self.assertEqual(stopwatch.client, client) self.assertEqual(stopwatch.rate, 1) self.assertGreaterEqual(stopwatch.reference, test_start_timestamp) stopwatch_low_rate = client.stopwatch("low_rate", rate=0.001) self.assertEqual(stopwatch_low_rate.rate, 0.001) self.assertGreaterEqual(stopwatch.reference, test_start_timestamp) stopwatch_1min_ref = client.stopwatch("low_rate", reference=one_minute_before_test) self.assertGreaterEqual(test_start_timestamp, stopwatch_1min_ref.reference) with client.stopwatch("something"): sleep(0.01) self.assertEqual(self.mock_sendto.call_count, 1) request_args = self.mock_sendto.call_args[0] self.assertEqual(len(request_args), 2) request = request_args[0] self.assertRegex(request.decode(), "something:[1-9]\d{0,3}\|ms")
def test_udp_collectors_flushing_stdout(self): udp_port = randint(8125, 8999) flush_interval = 2 self.app_process = self.create_server_process(udp_port=udp_port, flush_interval=flush_interval) client = Client("localhost", udp_port) for _ in range(0, 3): client.increment("event") client.timing("process", 10.1) client.timing("process", 10.2) client.timing("process", 10.3) # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreater(len(flushed_metrics), 5, 'flushed 1 counter and at least 4 timers') self.assertDictEqual( { "event": 3, "process.count": 3, "process.max": 10.3, "process.min": 10.1, "process.mean": 10.2, "process.median": 10.2 }, metrics_to_dict(flushed_metrics) )
def test_close_on_no_more_client(self): sock = AutoClosingSharedSocket(self.mock_socket) self.assertFalse(sock.closed) client = Client("localhost") sock.add_client(client) self.assertFalse(sock.closed) sock.remove_client(client) self.assertTrue(sock.closed) self.assertEqual(self.mock_close.call_count, 1)
def test_set(self): client = Client("localhost") client._socket = self.mock_socket client.set("ip address", "10.10.10.1") self.mock_sendto.assert_called_with( "ip_address:10.10.10.1|s".encode(), ("127.0.0.2", 8125) ) client.prefix = "region." client.set("~username*", rate=0.9, value='first') self.mock_sendto.assert_called_with( "region.username:first|s|@0.9".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.set("low.rate", 256, 0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_gauge_delta(self): client = Client("localhost") client._socket = self.mock_socket client.gauge_delta("memory!", 128) self.mock_sendto.assert_called_with( "memory:+128|g".encode(), ("127.0.0.2", 8125) ) client.prefix = "region." client.gauge_delta("cpu percentage%", rate=0.9, delta=-12) self.mock_sendto.assert_called_with( "region.cpu_percentage:-12|g|@0.9".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.gauge_delta("low.rate", 10, 0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_flushing_files(self): _, file_name = mkstemp() os.remove(file_name) udp_port = randint(8125, 8999) flush_interval = 2 csv_file_name = file_name + '.csv' self.remove_files.append(file_name) self.remove_files.append(csv_file_name) self.app_process = self.create_server_process( udp_port=udp_port, flush_interval=flush_interval, flush_file=file_name, flush_file_csv=csv_file_name) client = Client("localhost", udp_port) for _ in range(0, 3): client.increment("event") client.timing("process", 20) client.timing("process", 22) client.timing("process", 24) # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) self.assertTrue(os.path.exists(file_name)) with open(file_name) as file_handle: flushed_metrics = [ line.rstrip() for line in file_handle.readlines() ] self.assertTrue(os.path.exists(csv_file_name)) with open(csv_file_name) as file_handle: flushed_metrics_csv = [ line.rstrip() for line in file_handle.readlines() ] expected_metrics_dict = { "event": 3, "process.count": 3, "process.max": 24, "process.min": 20, "process.mean": 22, "process.median": 22 } self.assertGreater(len(flushed_metrics), 5, 'flushed 1 counter and at least 4 timers') self.assertDictEqual(expected_metrics_dict, metrics_to_dict(flushed_metrics)) self.assertGreater(len(flushed_metrics_csv), 5, 'flushed 1 counter and at least 4 timers')
def test_context_manager_creates_batch_client(self): client = Client("localhost") client._socket = self.mock_socket with client.batch_client() as batch_client: self.assertIsInstance(batch_client, BatchClient) self.assertGreater(batch_client.batch_size, 0) self.assertEqual(client.host, batch_client.host) self.assertEqual(client.port, batch_client.port) self.assertEqual( client._remote_address, batch_client._remote_address ) self.assertEqual( client._socket, batch_client._socket ) with client.batch_client(2048) as batch_client: self.assertEqual(batch_client.batch_size, 2048)
def test_gauge(self): client = Client("localhost") client._socket = self.mock_socket client.gauge("memory", 10240) self.mock_sendto.assert_called_with( "memory:10240|g".encode(), ("127.0.0.2", 8125) ) client.prefix = "region." client.gauge("cpu percentage%", rate=0.9, value=98.3) self.mock_sendto.assert_called_with( "region.cpu_percentage:98.3|g|@0.9".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.gauge("low.rate", 128, 0.1) self.assertEqual(self.mock_sendto.call_count, 0) self.assertRaises(AssertionError, client.gauge, "negative", -5)
def test_close_on_no_more_client_multithreaded(self): sock = AutoClosingSharedSocket(self.mock_socket) self.assertFalse(sock.closed) client1 = Client("localhost") sock.add_client(client1) client2 = Client("localhost") sock.add_client(client2) client3 = Client("localhost") sock.add_client(client3) self.assertFalse(sock.closed) start_remove = threading.Event() def _remove_client(client): start_remove.wait() self.assertFalse(sock.closed) sock.remove_client(client) th1 = threading.Thread(target=_remove_client, args=[client1]) th2 = threading.Thread(target=_remove_client, args=[client2]) th3 = threading.Thread(target=_remove_client, args=[client3]) th1.start() th2.start() th3.start() start_remove.set() th1.join() th2.join() th3.join() self.assertTrue(sock.closed) self.assertEqual(self.mock_close.call_count, 1)
def test_timing_since_with_datetime_timestamp(self): start_time = datetime.now() client = Client("localhost") client._socket = self.mock_socket sleep(0.01) client.timing_since("event", start_time) self.assertEqual(self.mock_sendto.call_count, 1) socket_sendto_args = self.mock_sendto.call_args self.assertEqual(len(socket_sendto_args), 2) request, remote_address = socket_sendto_args[0] self.assertRegex(request.decode(), "event:[1-9]\d{0,3}\|ms") self.assertEqual(remote_address, ("127.0.0.2", 8125)) self.mock_sendto.reset_mock() client.timing_since("low.rate", start_time, rate=0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_timing_since_with_timestamp_as_number(self): start_time = time() client = Client("localhost") client._socket = self.mock_socket self.assertRaises(AssertionError, client.timing_since, "negative", -1) sleep(0.01) client.timing_since("event", start_time) self.assertEqual(self.mock_sendto.call_count, 1) socket_sendto_args = self.mock_sendto.call_args self.assertEqual(len(socket_sendto_args), 2) request, remote_address = socket_sendto_args[0] self.assertRegex(request.decode(), "event:[1-9]+\d{0,3}\|ms") self.assertEqual(remote_address, ("127.0.0.2", 8125)) self.mock_sendto.reset_mock() client.timing_since("low.rate", start_time, rate=0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self): udp_port = randint(8125, 8999) tcp_port = randint(8125, 9999) flush_interval = 2 self.app_process = self.create_server_process( udp_port=udp_port, tcp_port=tcp_port, flush_interval=flush_interval) client = Client("localhost", udp_port) tcp_client = TCPClient("localhost", tcp_port) for _ in range(0, 2): client.increment("event") tcp_client.increment("event") client.timing("process", 85) client.timing("process", 98) tcp_client.timing("process", 87) tcp_client.timing("query", 2) # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreater(len(flushed_metrics), 9, 'flushed 1 counter and at least 2 x 4 timers') self.maxDiff = None self.assertDictEqual( { "event": 4, "process.count": 3, "process.max": 98, "process.min": 85, "process.mean": 90, "process.median": 87, "query.count": 1, "query.max": 2.0, "query.min": 2.0, "query.mean": 2.0, "query.median": 2.0 }, metrics_to_dict(flushed_metrics))
def setUp(self): self.client = Client('127.0.0.1') self.request_mock = mock.MagicMock() self.client._request = self.request_mock self.chronometer = Chronometer(self.client)
def test_timing(self): client = Client("localhost") client._socket = self.mock_socket client.timing("event", 10) self.mock_sendto.assert_called_with( "event:10|ms".encode(), ("127.0.0.2", 8125) ) client.timing("db.event name", 34.5, 0.5) self.mock_sendto.assert_called_with( "db.event_name:34|ms|@0.5".encode(), ("127.0.0.2", 8125) ) client.prefix = "region.c_" client.timing("db/query", rate=0.7, milliseconds=22.22) self.mock_sendto.assert_called_with( "region.c_db-query:22|ms|@0.7".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.timing("low.rate", 12, rate=0.1) self.assertEqual(self.mock_sendto.call_count, 0) self.assertRaises(AssertionError, client.timing, "negative", -1)
def test_decrement(self): client = Client("localhost") client._socket = self.mock_socket client.decrement("event") self.mock_sendto.assert_called_with( "event:-1|c".encode(), ("127.0.0.2", 8125) ) client.decrement("event2", 5) self.mock_sendto.assert_called_with( "event2:-5|c".encode(), ("127.0.0.2", 8125) ) client.decrement("region.event name", 2, 0.5) self.mock_sendto.assert_called_with( "region.event_name:-2|c|@0.5".encode(), ("127.0.0.2", 8125) ) client.prefix = "region.c_" client.decrement("active!users", rate=0.7) self.mock_sendto.assert_called_with( "region.c_activeusers:-1|c|@0.7".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.decrement("low.rate", rate=0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_increment(self): client = Client("localhost") client._socket = self.mock_socket client.increment("event") self.mock_sendto.assert_called_with( "event:1|c".encode(), ("127.0.0.2", 8125) ) client.increment("event2", 5) self.mock_sendto.assert_called_with( "event2:5|c".encode(), ("127.0.0.2", 8125) ) client.increment("region.event name", 2, 0.5) self.mock_sendto.assert_called_with( "region.event_name:2|c|@0.5".encode(), ("127.0.0.2", 8125) ) client.prefix = "region.c_" client.increment("@login#", rate=0.6) self.mock_sendto.assert_called_with( "region.c_login:1|c|@0.6".encode(), ("127.0.0.2", 8125) ) self.mock_sendto.reset_mock() client.increment("low.rate", rate=0.1) self.assertEqual(self.mock_sendto.call_count, 0)
def test_reload_server_keeps_the_queue(self): tcp_port = randint(8125, 9999) _, config_filename = mkstemp(text=True) self.remove_files.append(config_filename) write_to_file( config_filename, """ [navdoon] log-level=DEBUG log-stderr=true flush-stdout=true collect-tcp=127.0.0.1:{} flush-interval=60 """.format(tcp_port)) self.app_process = self.create_server_process(config=config_filename) tcp_client = TCPClient("localhost", tcp_port) for _ in range(0, 2): tcp_client.increment("event") tcp_client.timing("query", 2) del tcp_client wait_until_server_processed_metric(self.app_process, 'event') udp_port = randint(8125, 8999) flush_interval = 5 write_to_file( config_filename, """ [navdoon] log-level=DEBUG log-stderr=true flush-stdout=true collect-udp=127.0.0.1:{} flush-interval={} """.format(udp_port, flush_interval)) self.app_process.send_signal(signal.SIGHUP) wait_until_server_starts_collecting(self.app_process, 10) client = Client("localhost", udp_port) for _ in range(0, 2): client.increment("event") client.timing("query", 4) client.increment("finish") self.assertRaises(Exception, TCPClient, "localhost", tcp_port) # TCP collector should be down os.remove(config_filename) wait_until_server_processed_metric(self.app_process, 'finish') # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreaterEqual(len(flushed_metrics), 5) self.maxDiff = None self.assertDictEqual( { "event": 4, "query.count": 2, "query.max": 4.0, "query.min": 2.0, "query.mean": 3.0, "query.median": 3.0, "finish": 1 }, metrics_to_dict(flushed_metrics))
def test_client_creates_chronometer(self): client = Client("localhost") chronometer = client.chronometer() self.assertIsInstance(chronometer, Chronometer) self.assertEqual(chronometer.client, client)
def test_sending_metrics(self): start = datetime.now() start_timestamp = time() client = Client("localhost", self.__class__.port) client.increment("1.test", 5) client.increment("2.login") client.timing("3.query", 3600) client.gauge("4.memory", 102400) client.gauge_delta("5.memory", 256) client.gauge_delta("6.memory", -128) client.set("7.ip", "127.0.0.1") expected = [ "1.test:5|c", "2.login:1|c", "3.query:3600|ms", "4.memory:102400|g", "5.memory:+256|g", "6.memory:-128|g", "7.ip:127.0.0.1|s", ] self.assert_server_received_expected_requests(expected) self.__class__.server.requests.clear() client.timing_since("1.query", start_timestamp) client.timing_since("2.other_query", start) chronometer = client.chronometer() chronometer.time_callable("3.sleepy", sleep, 1, (0.02,)) @chronometer.wrap("4.wait_a_sec") def wait_a_sec(): sleep(0.01) wait_a_sec() with client.stopwatch("5.my_with_block"): sleep(0.02) expected_patterns = [ "1.query:[1-9]\d{0,4}\|ms", "2.other_query:[1-9]\d{0,4}\|ms", "3.sleepy:[1-9]\d{0,4}\|ms", "4.wait_a_sec:[1-9]\d{0,4}\|ms", "5.my_with_block:[1-9]\d{0,4}\|ms", ] self.assert_server_received_expected_request_regex(expected_patterns)
def setUp(self): self.client = Client('127.0.0.1') self.request_mock = mock.MagicMock() self.client._request = self.request_mock self.metric_name = "timed_event" self.stopwatch = Stopwatch(self.client, self.metric_name)
def test_sending_metrics(self): start = datetime.now() start_timestamp = time() client = Client("localhost", self.__class__.port) client.increment("1.test", 5) client.increment("2.login") client.timing("3.query", 3600) client.gauge("4.memory", 102400) client.gauge_delta("5.memory", 256) client.gauge_delta("6.memory", -128) client.set("7.ip", "127.0.0.1") expected = [ "1.test:5|c", "2.login:1|c", "3.query:3600|ms", "4.memory:102400|g", "5.memory:+256|g", "6.memory:-128|g", "7.ip:127.0.0.1|s", ] self.assert_server_received_expected_requests(expected) self.__class__.server.requests.clear() client.timing_since("1.query", start_timestamp) client.timing_since("2.other_query", start) chronometer = client.chronometer() chronometer.time_callable("3.sleepy", sleep, 1, (0.02, )) @chronometer.wrap("4.wait_a_sec") def wait_a_sec(): sleep(0.01) wait_a_sec() with client.stopwatch("5.my_with_block"): sleep(0.02) expected_patterns = [ "1.query:[1-9]\d{0,4}\|ms", "2.other_query:[1-9]\d{0,4}\|ms", "3.sleepy:[1-9]\d{0,4}\|ms", "4.wait_a_sec:[1-9]\d{0,4}\|ms", "5.my_with_block:[1-9]\d{0,4}\|ms", ] self.assert_server_received_expected_request_regex(expected_patterns)
def test_reload_server_keeps_the_queue(self): tcp_port = randint(8125, 9999) _, config_filename = mkstemp(text=True) self.remove_files.append(config_filename) write_to_file( config_filename, """ [navdoon] log-level=DEBUG log-stderr=true flush-stdout=true collect-tcp=127.0.0.1:{} flush-interval=60 """.format(tcp_port) ) self.app_process = self.create_server_process(config=config_filename) tcp_client = TCPClient("localhost", tcp_port) for _ in range(0, 2): tcp_client.increment("event") tcp_client.timing("query", 2) del tcp_client wait_until_server_processed_metric(self.app_process, 'event') udp_port = randint(8125, 8999) flush_interval = 5 write_to_file( config_filename, """ [navdoon] log-level=DEBUG log-stderr=true flush-stdout=true collect-udp=127.0.0.1:{} flush-interval={} """.format(udp_port, flush_interval) ) self.app_process.send_signal(signal.SIGHUP) wait_until_server_starts_collecting(self.app_process, 10) client = Client("localhost", udp_port) for _ in range(0, 2): client.increment("event") client.timing("query", 4) client.increment("finish") self.assertRaises(Exception, TCPClient, "localhost", tcp_port) # TCP collector should be down os.remove(config_filename) wait_until_server_processed_metric(self.app_process, 'finish') # wait for at least 1 flush sleep(flush_interval) self.app_process.terminate() wait_until_server_shuts_down(self.app_process) flushed_metrics = self.app_process.communicate()[0].splitlines() self.assertGreaterEqual(len(flushed_metrics), 5) self.maxDiff = None self.assertDictEqual( { "event": 4, "query.count": 2, "query.max": 4.0, "query.min": 2.0, "query.mean": 3.0, "query.median": 3.0, "finish": 1 }, metrics_to_dict(flushed_metrics) )