def test_when_client_is_removed_the_socket_batch_client_socket_is_not_closed(self):
     client = TCPClient("localhost")
     batch_client = client.batch_client()
     sock = batch_client._socket
     del client
     gc.collect()
     self.assertFalse(sock.closed)
Example #2
0
    def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self):
        udp_port = randint(8125, 8999)
        tcp_port = randint(8125, 9999)
        flush_interval = 2
        self.app_process = self.create_server_process(udp_port=udp_port, tcp_port=tcp_port,
                                                      flush_interval=flush_interval)
        client = Client("localhost", udp_port)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            client.increment("event")
            tcp_client.increment("event")

        client.timing("process", 8.5)
        client.timing("process", 9.8)
        tcp_client.timing("process", 8.7)
        tcp_client.timing("query", 2)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 9, 'flushed 1 counter and at least 2 x 4 timers')

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4, "process.count": 3, "process.max": 9.8,
                "process.min": 8.5, "process.mean": 9.0,
                "process.median": 8.7, "query.count": 1, "query.max": 2.0,
                "query.min": 2.0, "query.mean": 2.0, "query.median": 2.0
            },
            metrics_to_dict(flushed_metrics)
        )
    def test_gauge_delta(self):
        client = TCPClient("localhost")
        client._socket = self.mock_socket
        client.gauge_delta("memory!", 128)
        self.mock_sendall.assert_called_with("memory:+128|g\n".encode())

        client.prefix = "region."
        client.gauge_delta("cpu percentage%", rate=0.9, delta=-12)
        self.mock_sendall.assert_called_with(
            "region.cpu_percentage:-12|g|@0.9\n".encode()
        )

        self.mock_sendall.reset_mock()
        client.gauge_delta("low.rate", 10, 0.1)
        self.assertEqual(self.mock_sendall.call_count, 0)
    def test_set(self):
        client = TCPClient("localhost")
        client._socket = self.mock_socket
        client.set("ip address", "10.10.10.1")
        self.mock_sendall.assert_called_with(
            "ip_address:10.10.10.1|s\n".encode()
        )

        client.prefix = "region."
        client.set("~username*", rate=0.9, value='first')
        self.mock_sendall.assert_called_with(
            "region.username:first|s|@0.9\n".encode()
        )

        self.mock_sendall.reset_mock()
        client.set("low.rate", 256, 0.1)
        self.assertEqual(self.mock_sendall.call_count, 0)
 def test_increment(self):
     client = TCPClient("localhost")
     client._socket = self.mock_socket
     client.increment("event")
     self.mock_sendall.assert_called_with("event:1|c\n".encode())
     client.increment("region.event name", 2, 0.5)
     self.mock_sendall.assert_called_with("region.event_name:2|c|@0.5\n".encode())
    def test_client_creates_stopwatch(self):
        test_start_timestamp = time()
        one_minute_before_test = test_start_timestamp - 60
        client = TCPClient("localhost")
        client._socket = self.mock_socket
        stopwatch = client.stopwatch("event")
        self.assertIsInstance(stopwatch, Stopwatch)
        self.assertEqual(stopwatch.client, client)
        self.assertEqual(stopwatch.rate, 1)
        self.assertGreaterEqual(stopwatch.reference, test_start_timestamp)

        stopwatch_low_rate = client.stopwatch("low_rate", rate=0.001)
        self.assertEqual(stopwatch_low_rate.rate, 0.001)
        self.assertGreaterEqual(stopwatch.reference, test_start_timestamp)

        stopwatch_1min_ref = client.stopwatch("low_rate", reference=one_minute_before_test)
        self.assertGreaterEqual(test_start_timestamp, stopwatch_1min_ref.reference)

        with client.stopwatch("something"):
            sleep(0.01)

        self.assertEqual(self.mock_sendall.call_count, 1)
        request_args = self.mock_sendall.call_args[0]
        self.assertEqual(len(request_args), 1)
        request = request_args[0]
        self.assertRegex(request.decode(), "something:[1-9]\d{0,3}\|ms")
    def test_gauge(self):
        client = TCPClient("localhost")
        client._socket = self.mock_socket
        client.gauge("memory", 10240)
        self.mock_sendall.assert_called_with(
            "memory:10240|g\n".encode()
        )

        client.prefix = "region."
        client.gauge("cpu percentage%", rate=0.9, value=98.3)
        self.mock_sendall.assert_called_with(
            "region.cpu_percentage:98.3|g|@0.9\n".encode()
        )

        self.mock_sendall.reset_mock()
        client.gauge("low.rate", 128, 0.1)
        self.assertEqual(self.mock_sendall.call_count, 0)

        self.assertRaises(AssertionError, client.gauge, "negative", -5)
Example #8
0
    def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self):
        udp_port = randint(8125, 8999)
        tcp_port = randint(8125, 9999)
        flush_interval = 2
        self.app_process = self.create_server_process(
            udp_port=udp_port,
            tcp_port=tcp_port,
            flush_interval=flush_interval)
        client = Client("localhost", udp_port)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            client.increment("event")
            tcp_client.increment("event")

        client.timing("process", 85)
        client.timing("process", 98)
        tcp_client.timing("process", 87)
        tcp_client.timing("query", 2)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 9,
                           'flushed 1 counter and at least 2 x 4 timers')

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4,
                "process.count": 3,
                "process.max": 98,
                "process.min": 85,
                "process.mean": 90,
                "process.median": 87,
                "query.count": 1,
                "query.max": 2.0,
                "query.min": 2.0,
                "query.mean": 2.0,
                "query.median": 2.0
            }, metrics_to_dict(flushed_metrics))
    def test_timing_since_with_datetime_timestamp(self):
        start_time = datetime.now()
        client = TCPClient("localhost")
        client._socket = self.mock_socket

        sleep(0.01)
        client.timing_since("event", start_time)
        self.assertEqual(self.mock_sendall.call_count, 1)
        socket_sendall_args = self.mock_sendall.call_args[0]
        self.assertEqual(len(socket_sendall_args), 1)
        request = socket_sendall_args[0]
        self.assertRegex(request.decode(), "event:[1-9]\d*\|ms")
        self.mock_sendall.reset_mock()

        client.timing_since("low.rate", start_time, rate=0.01)
        self.assertEqual(self.mock_sendall.call_count, 0)
    def test_timing_since_with_timestamp_as_number(self):
        start_time = time()
        client = TCPClient("localhost")
        client._socket = self.mock_socket

        self.assertRaises(AssertionError, client.timing_since, "negative", -1)

        sleep(0.01)
        client.timing_since("event", start_time)
        self.assertEqual(self.mock_sendall.call_count, 1)
        socket_sendall_args = self.mock_sendall.call_args[0]
        self.assertEqual(len(socket_sendall_args), 1)
        request = socket_sendall_args[0]
        self.assertRegex(request.decode(), "event:[1-9]+\d*\|ms")
        self.mock_sendall.reset_mock()

        client.timing_since("low.rate", start_time, rate=0.1)
        self.assertEqual(self.mock_sendall.call_count, 0)
    def test_timing(self):
        client = TCPClient("localhost")
        client._socket = self.mock_socket
        client.timing("event", 10)
        self.mock_sendall.assert_called_with(
            "event:10|ms\n".encode()
        )
        client.timing("db.event name", 34.5, 0.5)
        self.mock_sendall.assert_called_with(
            "db.event_name:34|ms|@0.5\n".encode(),
        )

        client.prefix = "region.c_"
        client.timing("db/query", rate=0.7, milliseconds=22.22)
        self.mock_sendall.assert_called_with(
            "region.c_db-query:22|ms|@0.7\n".encode(),
        )

        self.mock_sendall.reset_mock()
        client.timing("low.rate", 12, rate=0.1)
        self.assertEqual(self.mock_sendall.call_count, 0)

        self.assertRaises(AssertionError, client.timing, "negative", -2)
 def test_client_creates_chronometer(self):
     client = TCPClient("localhost")
     chronometer = client.chronometer()
     self.assertIsInstance(chronometer, Chronometer)
     self.assertEqual(chronometer.client, client)
Example #13
0
    def test_reload_server_keeps_the_queue(self):
        tcp_port = randint(8125, 9999)
        _, config_filename = mkstemp(text=True)
        self.remove_files.append(config_filename)
        write_to_file(
            config_filename,
            """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-tcp=127.0.0.1:{}
flush-interval=60
""".format(tcp_port)
        )
        self.app_process = self.create_server_process(config=config_filename)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            tcp_client.increment("event")
        tcp_client.timing("query", 2)
        del tcp_client
        wait_until_server_processed_metric(self.app_process, 'event')

        udp_port = randint(8125, 8999)
        flush_interval = 5
        write_to_file(
            config_filename,
            """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-udp=127.0.0.1:{}
flush-interval={}
""".format(udp_port, flush_interval)
        )

        self.app_process.send_signal(signal.SIGHUP)
        wait_until_server_starts_collecting(self.app_process, 10)

        client = Client("localhost", udp_port)
        for _ in range(0, 2):
            client.increment("event")
        client.timing("query", 4)
        client.increment("finish")

        self.assertRaises(Exception, TCPClient, "localhost", tcp_port)  # TCP collector should be down
        os.remove(config_filename)

        wait_until_server_processed_metric(self.app_process, 'finish')
        # wait for at least 1 flush
        sleep(flush_interval)

        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)

        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreaterEqual(len(flushed_metrics), 5)

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4, "query.count": 2, "query.max": 4.0,
                "query.min": 2.0, "query.mean": 3.0, "query.median": 3.0,
                "finish": 1
            },
            metrics_to_dict(flushed_metrics)
        )
    def test_sending_metrics(self):
        start = datetime.now()
        start_timestamp = time()
        client = TCPClient("localhost", self.__class__.port)
        client.increment("1.test", 5)
        client.increment("2.login")
        client.timing("3.query", 3600)
        client.gauge("4.memory", 102400)
        client.gauge_delta("5.memory", 256)
        client.gauge_delta("6.memory", -128)
        client.set("7.ip", "127.0.0.1")

        expected = [
            "1.test:5|c",
            "2.login:1|c",
            "3.query:3600|ms",
            "4.memory:102400|g",
            "5.memory:+256|g",
            "6.memory:-128|g",
            "7.ip:127.0.0.1|s",
        ]

        self.assert_server_received_expected_requests(expected)

        client.timing_since("1.query", start_timestamp)
        client.timing_since("2.other_query", start)
        chronometer = client.chronometer()
        chronometer.time_callable("3.sleepy", sleep, 1, (0.02, ))

        @chronometer.wrap("4.wait_a_sec")
        def wait_a_sec():
            sleep(0.01)

        wait_a_sec()

        with client.stopwatch("5.my_with_block"):
            sleep(0.02)

        expected_patterns = [
            "1.query:[1-9]\d{0,4}\|ms", "2.other_query:[1-9]\d{0,4}\|ms",
            "3.sleepy:[1-9]\d{0,4}\|ms", "4.wait_a_sec:[1-9]\d{0,4}\|ms",
            "5.my_with_block:[1-9]\d{0,4}\|ms"
        ]
        self.assert_server_received_expected_request_regex(expected_patterns)
    def test_sending_metrics(self):
        start = datetime.now()
        start_timestamp = time()
        client = TCPClient("localhost", self.__class__.port)
        client.increment("1.test", 5)
        client.increment("2.login")
        client.timing("3.query", 3600)
        client.gauge("4.memory", 102400)
        client.gauge_delta("5.memory", 256)
        client.gauge_delta("6.memory", -128)
        client.set("7.ip", "127.0.0.1")

        expected = [
                "1.test:5|c",
                "2.login:1|c",
                "3.query:3600|ms",
                "4.memory:102400|g",
                "5.memory:+256|g",
                "6.memory:-128|g",
                "7.ip:127.0.0.1|s",
        ]

        self.assert_server_received_expected_requests(expected)

        client.timing_since("1.query", start_timestamp)
        client.timing_since("2.other_query", start)
        chronometer = client.chronometer()
        chronometer.time_callable("3.sleepy", sleep, 1, (0.02,))

        @chronometer.wrap("4.wait_a_sec")
        def wait_a_sec():
            sleep(0.01)

        wait_a_sec()

        with client.stopwatch("5.my_with_block"):
            sleep(0.02)

        expected_patterns = [
            "1.query:[1-9]\d{0,4}\|ms",
            "2.other_query:[1-9]\d{0,4}\|ms",
            "3.sleepy:[1-9]\d{0,4}\|ms",
            "4.wait_a_sec:[1-9]\d{0,4}\|ms",
            "5.my_with_block:[1-9]\d{0,4}\|ms"
        ]
        self.assert_server_received_expected_request_regex(expected_patterns)
Example #16
0
    def test_reload_server_keeps_the_queue(self):
        tcp_port = randint(8125, 9999)
        _, config_filename = mkstemp(text=True)
        self.remove_files.append(config_filename)
        write_to_file(
            config_filename, """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-tcp=127.0.0.1:{}
flush-interval=60
""".format(tcp_port))
        self.app_process = self.create_server_process(config=config_filename)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            tcp_client.increment("event")
        tcp_client.timing("query", 2)
        del tcp_client
        wait_until_server_processed_metric(self.app_process, 'event')

        udp_port = randint(8125, 8999)
        flush_interval = 5
        write_to_file(
            config_filename, """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-udp=127.0.0.1:{}
flush-interval={}
""".format(udp_port, flush_interval))

        self.app_process.send_signal(signal.SIGHUP)
        wait_until_server_starts_collecting(self.app_process, 10)

        client = Client("localhost", udp_port)
        for _ in range(0, 2):
            client.increment("event")
        client.timing("query", 4)
        client.increment("finish")

        self.assertRaises(Exception, TCPClient, "localhost",
                          tcp_port)  # TCP collector should be down
        os.remove(config_filename)

        wait_until_server_processed_metric(self.app_process, 'finish')
        # wait for at least 1 flush
        sleep(flush_interval)

        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)

        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreaterEqual(len(flushed_metrics), 5)

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4,
                "query.count": 2,
                "query.max": 4.0,
                "query.min": 2.0,
                "query.mean": 3.0,
                "query.median": 3.0,
                "finish": 1
            }, metrics_to_dict(flushed_metrics))