def test_kubelet_check_prometheus(monkeypatch, aggregator): check = KubeletCheck('kubelet', None, {}, [{}]) monkeypatch.setattr(check, 'retrieve_pod_list', mock.Mock(return_value=json.loads(mock_from_file('pods.json')))) monkeypatch.setattr(check, '_retrieve_node_spec', mock.Mock(return_value=NODE_SPEC)) monkeypatch.setattr(check, '_perform_kubelet_check', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'process_cadvisor', mock.Mock(return_value=None)) attrs = { 'close.return_value': True, 'iter_lines.return_value': mock_from_file('metrics.txt').split('\n') } mock_resp = mock.Mock(headers={'Content-Type': 'text/plain'}, **attrs) monkeypatch.setattr(check, 'poll', mock.Mock(return_value=mock_resp)) check.check({}) assert check.cadvisor_legacy_url is None check.retrieve_pod_list.assert_called_once() check._retrieve_node_spec.assert_called_once() check._perform_kubelet_check.assert_called_once() check.poll.assert_called_once() check.process_cadvisor.assert_not_called() # called twice so pct metrics are guaranteed to be there check.check({}) for metric in EXPECTED_METRICS_COMMON: aggregator.assert_metric(metric) for metric in EXPECTED_METRICS_PROMETHEUS: aggregator.assert_metric(metric) assert aggregator.metrics_asserted_pct == 100.0
def test_kubelet_check_cadvisor(monkeypatch, aggregator): instance_with_tag = {"tags": ["instance:tag"], "cadvisor_port": 4194} cadvisor_url = "http://valid:port/url" check = KubeletCheck('kubelet', None, {}, [instance_with_tag]) monkeypatch.setattr( check, 'retrieve_pod_list', mock.Mock( return_value=json.loads(mock_from_file('pods_list_1.2.json')))) monkeypatch.setattr(check, '_retrieve_node_spec', mock.Mock(return_value=NODE_SPEC)) monkeypatch.setattr(check, '_perform_kubelet_check', mock.Mock(return_value=None)) monkeypatch.setattr( check, '_retrieve_cadvisor_metrics', mock.Mock( return_value=json.loads(mock_from_file('cadvisor_1.2.json')))) monkeypatch.setattr(check.cadvisor_scraper, 'process', mock.Mock(return_value=None)) monkeypatch.setattr(check.kubelet_scraper, 'process', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'detect_cadvisor', mock.Mock(return_value=cadvisor_url)) # We filter out slices unknown by the tagger, mock a non-empty taglist monkeypatch.setattr('datadog_checks.kubelet.cadvisor.get_tags', mock.Mock(return_value=["foo:bar"])) monkeypatch.setattr('datadog_checks.kubelet.cadvisor.tags_for_pod', mock.Mock(return_value=["foo:bar"])) check.check(instance_with_tag) assert check.cadvisor_legacy_url == cadvisor_url check.retrieve_pod_list.assert_called_once() check._retrieve_node_spec.assert_called_once() check._retrieve_cadvisor_metrics.assert_called_once() check._perform_kubelet_check.assert_called_once() check.cadvisor_scraper.process.assert_not_called() check.kubelet_scraper.process.assert_called_once() # called twice so pct metrics are guaranteed to be there check.check(instance_with_tag) for metric in EXPECTED_METRICS_COMMON: aggregator.assert_metric(metric) aggregator.assert_metric_has_tag(metric, "instance:tag") for metric in EXPECTED_METRICS_CADVISOR: aggregator.assert_metric(metric) aggregator.assert_metric_has_tag(metric, "instance:tag") assert aggregator.metrics_asserted_pct == 100.0
def test_prometheus_cpu_summed(monkeypatch, aggregator): check = KubeletCheck('kubelet', None, {}, [{}]) monkeypatch.setattr( check, 'retrieve_pod_list', mock.Mock(return_value=json.loads(mock_from_file('pods.json')))) monkeypatch.setattr(check, '_retrieve_node_spec', mock.Mock(return_value=NODE_SPEC)) monkeypatch.setattr(check, '_perform_kubelet_check', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'rate', mock.Mock()) attrs = { 'close.return_value': True, 'iter_lines.return_value': mock_from_file('metrics.txt').split('\n') } mock_resp = mock.Mock(headers={'Content-Type': 'text/plain'}, **attrs) monkeypatch.setattr(check, 'poll', mock.Mock(return_value=mock_resp)) with mock.patch("datadog_checks.kubelet.kubelet.get_tags", side_effect=mocked_get_tags): check.check({"metrics_endpoint": "http://dummy"}) # Make sure we submit the summed rates correctly: # - fluentd-gcp-v2.0.10-9q9t4 uses two cpus, we need to sum (1228.32 + 825.32) * 10**9 = 2053640000000 # - demo-app-success-c485bc67b-klj45 is mono-threaded, we submit 7.756358313 * 10**9 = 7756358313 # calls = [ mock.call('kubernetes.cpu.usage.total', 2053640000000.0, ['pod_name:fluentd-gcp-v2.0.10-9q9t4']), mock.call('kubernetes.cpu.usage.total', 7756358313.0, ['pod_name=demo-app-success-c485bc67b-klj45']), ] check.rate.assert_has_calls(calls, any_order=True) # Make sure the per-core metrics are not submitted bad_calls = [ mock.call('kubernetes.cpu.usage.total', 1228320000000.0, ['pod_name:fluentd-gcp-v2.0.10-9q9t4']), mock.call('kubernetes.cpu.usage.total', 825320000000.0, ['pod_name:fluentd-gcp-v2.0.10-9q9t4']), ] for c in bad_calls: assert c not in check.rate.mock_calls
def test_kubelet_check_cadvisor(monkeypatch, aggregator, tagger): instance_with_tag = {"tags": ["instance:tag"], "cadvisor_port": 4194} cadvisor_url = "http://valid:port/url" check = KubeletCheck('kubelet', {}, [instance_with_tag]) monkeypatch.setattr( check, 'retrieve_pod_list', mock.Mock( return_value=json.loads(mock_from_file('pods_list_1.2.json')))) monkeypatch.setattr(check, '_retrieve_node_spec', mock.Mock(return_value=NODE_SPEC)) monkeypatch.setattr( check, '_retrieve_stats', mock.Mock( return_value=json.loads(mock_from_file('stats_summary.json')))) monkeypatch.setattr(check, '_perform_kubelet_check', mock.Mock(return_value=None)) monkeypatch.setattr( check, '_retrieve_cadvisor_metrics', mock.Mock( return_value=json.loads(mock_from_file('cadvisor_1.2.json')))) monkeypatch.setattr(check, 'detect_cadvisor', mock.Mock(return_value=cadvisor_url)) monkeypatch.setattr(check, 'process', mock.Mock(return_value=None)) check.check(instance_with_tag) assert check.cadvisor_legacy_url == cadvisor_url check.retrieve_pod_list.assert_called_once() check._retrieve_node_spec.assert_called_once() check._retrieve_stats.assert_called_once() check._retrieve_cadvisor_metrics.assert_called_once() check._perform_kubelet_check.assert_called_once() # called twice so pct metrics are guaranteed to be there check.check(instance_with_tag) for metric in EXPECTED_METRICS_COMMON: aggregator.assert_metric(metric) aggregator.assert_metric_has_tag(metric, "instance:tag") for metric in EXPECTED_METRICS_CADVISOR: aggregator.assert_metric(metric) aggregator.assert_metric_has_tag(metric, "instance:tag") assert aggregator.metrics_asserted_pct == 100.0
def test_kubelet_check_neither(monkeypatch, aggregator): check = KubeletCheck('kubelet', None, {}, [{}]) monkeypatch.setattr( check, 'retrieve_pod_list', mock.Mock(return_value=json.loads(mock_from_file('pods.json')))) monkeypatch.setattr(check, '_retrieve_node_spec', mock.Mock(return_value=NODE_SPEC)) monkeypatch.setattr(check, '_perform_kubelet_check', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'process', mock.Mock(return_value=None)) monkeypatch.setattr(check, 'process_cadvisor', mock.Mock(return_value=None)) check.check({"cadvisor_port": 0, "metrics_endpoint": ""}) assert check.cadvisor_legacy_url is None check.retrieve_pod_list.assert_called_once() check._retrieve_node_spec.assert_called_once() check._perform_kubelet_check.assert_called_once() check.process_cadvisor.assert_not_called() check.process.assert_not_called()