示例#1
0
    def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self):
        udp_port = randint(8125, 8999)
        tcp_port = randint(8125, 9999)
        flush_interval = 2
        self.app_process = self.create_server_process(udp_port=udp_port, tcp_port=tcp_port,
                                                      flush_interval=flush_interval)
        client = Client("localhost", udp_port)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            client.increment("event")
            tcp_client.increment("event")

        client.timing("process", 8.5)
        client.timing("process", 9.8)
        tcp_client.timing("process", 8.7)
        tcp_client.timing("query", 2)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 9, 'flushed 1 counter and at least 2 x 4 timers')

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4, "process.count": 3, "process.max": 9.8,
                "process.min": 8.5, "process.mean": 9.0,
                "process.median": 8.7, "query.count": 1, "query.max": 2.0,
                "query.min": 2.0, "query.mean": 2.0, "query.median": 2.0
            },
            metrics_to_dict(flushed_metrics)
        )
示例#2
0
    def test_udp_collectors_flushing_stdout(self):
        udp_port = randint(8125, 8999)
        flush_interval = 2
        self.app_process = self.create_server_process(
            udp_port=udp_port, flush_interval=flush_interval)

        client = Client("localhost", udp_port)
        for _ in range(0, 3):
            client.increment("event")
        client.timing("process", 101)
        client.timing("process", 102)
        client.timing("process", 103)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 5,
                           'flushed 1 counter and at least 4 timers')
        self.assertDictEqual(
            {
                "event": 3,
                "process.count": 3,
                "process.max": 103,
                "process.min": 101,
                "process.mean": 102,
                "process.median": 102
            }, metrics_to_dict(flushed_metrics))
示例#3
0
    def test_udp_collectors_flushing_stdout(self):
        udp_port = randint(8125, 8999)
        flush_interval = 2
        self.app_process = self.create_server_process(udp_port=udp_port, flush_interval=flush_interval)

        client = Client("localhost", udp_port)
        for _ in range(0, 3):
            client.increment("event")
        client.timing("process", 10.1)
        client.timing("process", 10.2)
        client.timing("process", 10.3)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 5, 'flushed 1 counter and at least 4 timers')
        self.assertDictEqual(
            {
                "event": 3, "process.count": 3, "process.max": 10.3,
                "process.min": 10.1, "process.mean": 10.2,
                "process.median": 10.2
            },
            metrics_to_dict(flushed_metrics)
        )
示例#4
0
    def test_flushing_files(self):
        _, file_name = mkstemp()
        os.remove(file_name)
        udp_port = randint(8125, 8999)
        flush_interval = 2
        csv_file_name = file_name + '.csv'
        self.remove_files.append(file_name)
        self.remove_files.append(csv_file_name)
        self.app_process = self.create_server_process(
            udp_port=udp_port,
            flush_interval=flush_interval,
            flush_file=file_name,
            flush_file_csv=csv_file_name)

        client = Client("localhost", udp_port)
        for _ in range(0, 3):
            client.increment("event")
        client.timing("process", 20)
        client.timing("process", 22)
        client.timing("process", 24)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)

        self.assertTrue(os.path.exists(file_name))
        with open(file_name) as file_handle:
            flushed_metrics = [
                line.rstrip() for line in file_handle.readlines()
            ]

        self.assertTrue(os.path.exists(csv_file_name))
        with open(csv_file_name) as file_handle:
            flushed_metrics_csv = [
                line.rstrip() for line in file_handle.readlines()
            ]

        expected_metrics_dict = {
            "event": 3,
            "process.count": 3,
            "process.max": 24,
            "process.min": 20,
            "process.mean": 22,
            "process.median": 22
        }
        self.assertGreater(len(flushed_metrics), 5,
                           'flushed 1 counter and at least 4 timers')
        self.assertDictEqual(expected_metrics_dict,
                             metrics_to_dict(flushed_metrics))

        self.assertGreater(len(flushed_metrics_csv), 5,
                           'flushed 1 counter and at least 4 timers')
    def test_timing(self):
        client = Client("localhost")
        client._socket = self.mock_socket
        client.timing("event", 10)
        self.mock_sendto.assert_called_with(
            "event:10|ms".encode(),
            ("127.0.0.2", 8125)
        )
        client.timing("db.event name", 34.5, 0.5)
        self.mock_sendto.assert_called_with(
            "db.event_name:34|ms|@0.5".encode(),
            ("127.0.0.2", 8125)
        )

        client.prefix = "region.c_"
        client.timing("db/query", rate=0.7, milliseconds=22.22)
        self.mock_sendto.assert_called_with(
            "region.c_db-query:22|ms|@0.7".encode(),
            ("127.0.0.2", 8125)
        )

        self.mock_sendto.reset_mock()
        client.timing("low.rate", 12, rate=0.1)
        self.assertEqual(self.mock_sendto.call_count, 0)

        self.assertRaises(AssertionError, client.timing, "negative", -1)
    def test_timing(self):
        client = Client("localhost")
        client._socket = self.mock_socket
        client.timing("event", 10)
        self.mock_sendto.assert_called_with(
            "event:10|ms".encode(),
            ("127.0.0.2", 8125)
        )
        client.timing("db.event name", 34.5, 0.5)
        self.mock_sendto.assert_called_with(
            "db.event_name:34|ms|@0.5".encode(),
            ("127.0.0.2", 8125)
        )

        client.prefix = "region.c_"
        client.timing("db/query", rate=0.7, milliseconds=22.22)
        self.mock_sendto.assert_called_with(
            "region.c_db-query:22|ms|@0.7".encode(),
            ("127.0.0.2", 8125)
        )

        self.mock_sendto.reset_mock()
        client.timing("low.rate", 12, rate=0.1)
        self.assertEqual(self.mock_sendto.call_count, 0)

        self.assertRaises(AssertionError, client.timing, "negative", -1)
    def test_sending_metrics(self):
        start = datetime.now()
        start_timestamp = time()
        client = Client("localhost", self.__class__.port)
        client.increment("1.test", 5)
        client.increment("2.login")
        client.timing("3.query", 3600)
        client.gauge("4.memory", 102400)
        client.gauge_delta("5.memory", 256)
        client.gauge_delta("6.memory", -128)
        client.set("7.ip", "127.0.0.1")

        expected = [
            "1.test:5|c",
            "2.login:1|c",
            "3.query:3600|ms",
            "4.memory:102400|g",
            "5.memory:+256|g",
            "6.memory:-128|g",
            "7.ip:127.0.0.1|s",
        ]

        self.assert_server_received_expected_requests(expected)

        self.__class__.server.requests.clear()
        client.timing_since("1.query", start_timestamp)
        client.timing_since("2.other_query", start)
        chronometer = client.chronometer()
        chronometer.time_callable("3.sleepy", sleep, 1, (0.02, ))

        @chronometer.wrap("4.wait_a_sec")
        def wait_a_sec():
            sleep(0.01)

        wait_a_sec()

        with client.stopwatch("5.my_with_block"):
            sleep(0.02)

        expected_patterns = [
            "1.query:[1-9]\d{0,4}\|ms",
            "2.other_query:[1-9]\d{0,4}\|ms",
            "3.sleepy:[1-9]\d{0,4}\|ms",
            "4.wait_a_sec:[1-9]\d{0,4}\|ms",
            "5.my_with_block:[1-9]\d{0,4}\|ms",
        ]
        self.assert_server_received_expected_request_regex(expected_patterns)
    def test_sending_metrics(self):
        start = datetime.now()
        start_timestamp = time()
        client = Client("localhost", self.__class__.port)
        client.increment("1.test", 5)
        client.increment("2.login")
        client.timing("3.query", 3600)
        client.gauge("4.memory", 102400)
        client.gauge_delta("5.memory", 256)
        client.gauge_delta("6.memory", -128)
        client.set("7.ip", "127.0.0.1")

        expected = [
                "1.test:5|c",
                "2.login:1|c",
                "3.query:3600|ms",
                "4.memory:102400|g",
                "5.memory:+256|g",
                "6.memory:-128|g",
                "7.ip:127.0.0.1|s",
        ]

        self.assert_server_received_expected_requests(expected)

        self.__class__.server.requests.clear()
        client.timing_since("1.query", start_timestamp)
        client.timing_since("2.other_query", start)
        chronometer = client.chronometer()
        chronometer.time_callable("3.sleepy", sleep, 1, (0.02,))

        @chronometer.wrap("4.wait_a_sec")
        def wait_a_sec():
            sleep(0.01)

        wait_a_sec()

        with client.stopwatch("5.my_with_block"):
            sleep(0.02)

        expected_patterns = [
            "1.query:[1-9]\d{0,4}\|ms",
            "2.other_query:[1-9]\d{0,4}\|ms",
            "3.sleepy:[1-9]\d{0,4}\|ms",
            "4.wait_a_sec:[1-9]\d{0,4}\|ms",
            "5.my_with_block:[1-9]\d{0,4}\|ms",
        ]
        self.assert_server_received_expected_request_regex(expected_patterns)
示例#9
0
    def test_udp_and_tcp_collectors_combine_and_flush_to_stdout(self):
        udp_port = randint(8125, 8999)
        tcp_port = randint(8125, 9999)
        flush_interval = 2
        self.app_process = self.create_server_process(
            udp_port=udp_port,
            tcp_port=tcp_port,
            flush_interval=flush_interval)
        client = Client("localhost", udp_port)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            client.increment("event")
            tcp_client.increment("event")

        client.timing("process", 85)
        client.timing("process", 98)
        tcp_client.timing("process", 87)
        tcp_client.timing("query", 2)
        # wait for at least 1 flush
        sleep(flush_interval)
        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)
        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreater(len(flushed_metrics), 9,
                           'flushed 1 counter and at least 2 x 4 timers')

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4,
                "process.count": 3,
                "process.max": 98,
                "process.min": 85,
                "process.mean": 90,
                "process.median": 87,
                "query.count": 1,
                "query.max": 2.0,
                "query.min": 2.0,
                "query.mean": 2.0,
                "query.median": 2.0
            }, metrics_to_dict(flushed_metrics))
示例#10
0
    def test_reload_server_keeps_the_queue(self):
        tcp_port = randint(8125, 9999)
        _, config_filename = mkstemp(text=True)
        self.remove_files.append(config_filename)
        write_to_file(
            config_filename,
            """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-tcp=127.0.0.1:{}
flush-interval=60
""".format(tcp_port)
        )
        self.app_process = self.create_server_process(config=config_filename)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            tcp_client.increment("event")
        tcp_client.timing("query", 2)
        del tcp_client
        wait_until_server_processed_metric(self.app_process, 'event')

        udp_port = randint(8125, 8999)
        flush_interval = 5
        write_to_file(
            config_filename,
            """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-udp=127.0.0.1:{}
flush-interval={}
""".format(udp_port, flush_interval)
        )

        self.app_process.send_signal(signal.SIGHUP)
        wait_until_server_starts_collecting(self.app_process, 10)

        client = Client("localhost", udp_port)
        for _ in range(0, 2):
            client.increment("event")
        client.timing("query", 4)
        client.increment("finish")

        self.assertRaises(Exception, TCPClient, "localhost", tcp_port)  # TCP collector should be down
        os.remove(config_filename)

        wait_until_server_processed_metric(self.app_process, 'finish')
        # wait for at least 1 flush
        sleep(flush_interval)

        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)

        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreaterEqual(len(flushed_metrics), 5)

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4, "query.count": 2, "query.max": 4.0,
                "query.min": 2.0, "query.mean": 3.0, "query.median": 3.0,
                "finish": 1
            },
            metrics_to_dict(flushed_metrics)
        )
示例#11
0
    def test_reload_server_keeps_the_queue(self):
        tcp_port = randint(8125, 9999)
        _, config_filename = mkstemp(text=True)
        self.remove_files.append(config_filename)
        write_to_file(
            config_filename, """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-tcp=127.0.0.1:{}
flush-interval=60
""".format(tcp_port))
        self.app_process = self.create_server_process(config=config_filename)
        tcp_client = TCPClient("localhost", tcp_port)
        for _ in range(0, 2):
            tcp_client.increment("event")
        tcp_client.timing("query", 2)
        del tcp_client
        wait_until_server_processed_metric(self.app_process, 'event')

        udp_port = randint(8125, 8999)
        flush_interval = 5
        write_to_file(
            config_filename, """
[navdoon]
log-level=DEBUG
log-stderr=true
flush-stdout=true
collect-udp=127.0.0.1:{}
flush-interval={}
""".format(udp_port, flush_interval))

        self.app_process.send_signal(signal.SIGHUP)
        wait_until_server_starts_collecting(self.app_process, 10)

        client = Client("localhost", udp_port)
        for _ in range(0, 2):
            client.increment("event")
        client.timing("query", 4)
        client.increment("finish")

        self.assertRaises(Exception, TCPClient, "localhost",
                          tcp_port)  # TCP collector should be down
        os.remove(config_filename)

        wait_until_server_processed_metric(self.app_process, 'finish')
        # wait for at least 1 flush
        sleep(flush_interval)

        self.app_process.terminate()
        wait_until_server_shuts_down(self.app_process)

        flushed_metrics = self.app_process.communicate()[0].splitlines()
        self.assertGreaterEqual(len(flushed_metrics), 5)

        self.maxDiff = None
        self.assertDictEqual(
            {
                "event": 4,
                "query.count": 2,
                "query.max": 4.0,
                "query.min": 2.0,
                "query.mean": 3.0,
                "query.median": 3.0,
                "finish": 1
            }, metrics_to_dict(flushed_metrics))