class TestRemoteWriteProvider(unittest.TestCase): @patch_network_get(private_address="1.1.1.1") def setUp(self, *unused): self.harness = Harness(PrometheusCharm) self.harness.set_model_info("lma", "123456") self.addCleanup(self.harness.cleanup) @patch.object(KubernetesServicePatch, "_service_object", new=lambda *args: None) @patch.object(Prometheus, "reload_configuration", new=lambda _: True) @patch_network_get(private_address="1.1.1.1") def test_port_is_set(self, *unused): self.harness.begin_with_initial_hooks() rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.add_relation_unit(rel_id, "consumer/0") self.assertEqual( self.harness.get_relation_data(rel_id, self.harness.charm.unit.name), {"remote_write": json.dumps({"url": "http://1.1.1.1:9090/api/v1/write"})}, ) self.assertIsInstance(self.harness.charm.unit.status, ActiveStatus) @patch.object(KubernetesServicePatch, "_service_object", new=lambda *args: None) @patch.object(Prometheus, "reload_configuration", new=lambda _: True) @patch_network_get(private_address="1.1.1.1") def test_alert_rules(self, *unused): self.harness.begin_with_initial_hooks() rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", {"alert_rules": json.dumps(ALERT_RULES)}, ) self.harness.add_relation_unit(rel_id, "consumer/0") alerts = self.harness.charm.remote_write_provider.alerts() alerts = list(alerts.values())[0] # drop the topology identifier self.assertEqual(len(alerts), 1) self.assertDictEqual(alerts, ALERT_RULES)
class TestEndpointAggregator(unittest.TestCase): def setUp(self): self.harness = Harness(EndpointAggregatorCharm, meta=AGGREGATOR_META) self.harness.set_model_info(name="testmodel", uuid="1234567890") self.addCleanup(self.harness.cleanup) self.harness.set_leader(True) self.harness.begin_with_initial_hooks() def test_adding_prometheus_then_target_forwards_a_labeled_scrape_job(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") target_rel_id = self.harness.add_relation(SCRAPE_TARGET_RELATION, "target-app") self.harness.add_relation_unit(target_rel_id, "target-app/0") hostname = "scrape_target_0" port = "1234" self.harness.update_relation_data( target_rel_id, "target-app/0", { "hostname": f"{hostname}", "port": f"{port}", }, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) expected_jobs = [{ "job_name": "juju_testmodel_1234567_target-app_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_0:1234"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app", "juju_unit": "target-app/0", "host": "scrape_target_0", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }] self.assertListEqual(scrape_jobs, expected_jobs) def test_adding_prometheus_then_target_forwards_a_labeled_alert_rule(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") alert_rules_rel_id = self.harness.add_relation(ALERT_RULES_RELATION, "rules-app") self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0") self.harness.update_relation_data(alert_rules_rel_id, "rules-app/0", {"groups": ALERT_RULE_1}) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 1) group = groups[0] expected_group = { "name": "juju_testmodel_1234567_rules-app_alert_rules", "rules": [{ "alert": "CPU_Usage", "expr": 'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "cloud": "juju", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app", "juju_unit": "rules-app/0", }, "annotations": { "description": "Host {{ $labels.host }} has had < 10% idle cpu for the last 5m\n", "summary": "Host {{ $labels.host }} CPU free is less than 10%", }, }], } self.assertDictEqual(group, expected_group) def test_adding_target_then_prometheus_forwards_a_labeled_scrape_job(self): target_rel_id = self.harness.add_relation(SCRAPE_TARGET_RELATION, "target-app") self.harness.add_relation_unit(target_rel_id, "target-app/0") hostname = "scrape_target_0" port = "1234" self.harness.update_relation_data( target_rel_id, "target-app/0", { "hostname": f"{hostname}", "port": f"{port}", }, ) prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) expected_jobs = [{ "job_name": "juju_testmodel_1234567_target-app_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_0:1234"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app", "juju_unit": "target-app/0", "host": "scrape_target_0", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }] self.assertListEqual(scrape_jobs, expected_jobs) def test_adding_target_then_prometheus_forwards_a_labeled_alert_rule(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") alert_rules_rel_id = self.harness.add_relation(ALERT_RULES_RELATION, "rules-app") self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0") self.harness.update_relation_data(alert_rules_rel_id, "rules-app/0", {"groups": ALERT_RULE_1}) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 1) group = groups[0] expected_group = { "name": "juju_testmodel_1234567_rules-app_alert_rules", "rules": [{ "alert": "CPU_Usage", "expr": 'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "cloud": "juju", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app", "juju_unit": "rules-app/0", }, "annotations": { "description": "Host {{ $labels.host }} has had < 10% idle cpu for the last 5m\n", "summary": "Host {{ $labels.host }} CPU free is less than 10%", }, }], } self.assertDictEqual(group, expected_group) def test_scrape_jobs_from_multiple_target_applications_are_forwarded(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") target_rel_id_1 = self.harness.add_relation(SCRAPE_TARGET_RELATION, "target-app-1") self.harness.add_relation_unit(target_rel_id_1, "target-app-1/0") self.harness.update_relation_data( target_rel_id_1, "target-app-1/0", { "hostname": "scrape_target_0", "port": "1234", }, ) target_rel_id_2 = self.harness.add_relation(SCRAPE_TARGET_RELATION, "target-app-2") self.harness.add_relation_unit(target_rel_id_2, "target-app-2/0") self.harness.update_relation_data( target_rel_id_2, "target-app-2/0", { "hostname": "scrape_target_1", "port": "5678", }, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) self.assertEqual(len(scrape_jobs), 2) expected_jobs = [ { "job_name": "juju_testmodel_1234567_target-app-1_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_0:1234"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app-1", "juju_unit": "target-app-1/0", "host": "scrape_target_0", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }, { "job_name": "juju_testmodel_1234567_target-app-2_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_1:5678"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app-2", "juju_unit": "target-app-2/0", "host": "scrape_target_1", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }, ] self.assertListEqual(scrape_jobs, expected_jobs) def test_alert_rules_from_multiple_target_applications_are_forwarded(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") alert_rules_rel_id_1 = self.harness.add_relation( ALERT_RULES_RELATION, "rules-app-1") self.harness.add_relation_unit(alert_rules_rel_id_1, "rules-app-1/0") self.harness.update_relation_data( alert_rules_rel_id_1, "rules-app-1/0", {"groups": ALERT_RULE_1}, ) alert_rules_rel_id_2 = self.harness.add_relation( ALERT_RULES_RELATION, "rules-app-2") self.harness.add_relation_unit(alert_rules_rel_id_2, "rules-app-2/0") self.harness.update_relation_data( alert_rules_rel_id_2, "rules-app-2/0", {"groups": ALERT_RULE_2}, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 2) expected_groups = [ { "name": "juju_testmodel_1234567_rules-app-1_alert_rules", "rules": [{ "alert": "CPU_Usage", "expr": 'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "cloud": "juju", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app-1", "juju_unit": "rules-app-1/0", }, "annotations": { "description": "Host {{ $labels.host }} has had < 10% idle cpu for the last 5m\n", "summary": "Host {{ $labels.host }} CPU free is less than 10%", }, }], }, { "name": "juju_testmodel_1234567_rules-app-2_alert_rules", "rules": [{ "alert": "DiskFull", "expr": 'disk_free{is_container!="True", fstype!~".*tmpfs|squashfs|overlay"} <1024', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app-2", "juju_unit": "rules-app-2/0", }, "annotations": { "description": "Host {{ $labels.host}} {{ $labels.path }} is full\nsummary: Host {{ $labels.host }} {{ $labels.path}} is full\n" }, }], }, ] self.assertListEqual(groups, expected_groups) def test_scrape_job_removal_differentiates_between_applications(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") target_rel_id_1 = self.harness.add_relation("prometheus-target", "target-app-1") self.harness.add_relation_unit(target_rel_id_1, "target-app-1/0") self.harness.update_relation_data( target_rel_id_1, "target-app-1/0", { "hostname": "scrape_target_0", "port": "1234", }, ) target_rel_id_2 = self.harness.add_relation("prometheus-target", "target-app-2") self.harness.add_relation_unit(target_rel_id_2, "target-app-2/0") self.harness.update_relation_data( target_rel_id_2, "target-app-2/0", { "hostname": "scrape_target_1", "port": "5678", }, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) self.assertEqual(len(scrape_jobs), 2) self.harness.remove_relation_unit(target_rel_id_2, "target-app-2/0") scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) self.assertEqual(len(scrape_jobs), 1) expected_jobs = [{ "job_name": "juju_testmodel_1234567_target-app-1_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_0:1234"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app-1", "juju_unit": "target-app-1/0", "host": "scrape_target_0", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }] self.assertListEqual(scrape_jobs, expected_jobs) def test_alert_rules_removal_differentiates_between_applications(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") alert_rules_rel_id_1 = self.harness.add_relation( "prometheus-rules", "rules-app-1") self.harness.add_relation_unit(alert_rules_rel_id_1, "rules-app-1/0") self.harness.update_relation_data( alert_rules_rel_id_1, "rules-app-1/0", {"groups": ALERT_RULE_1}, ) alert_rules_rel_id_2 = self.harness.add_relation( "prometheus-rules", "rules-app-2") self.harness.add_relation_unit(alert_rules_rel_id_2, "rules-app-2/0") self.harness.update_relation_data( alert_rules_rel_id_2, "rules-app-2/0", {"groups": ALERT_RULE_2}, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 2) self.harness.remove_relation_unit(alert_rules_rel_id_2, "rules-app-2/0") alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 1) expected_groups = [ { "name": "juju_testmodel_1234567_rules-app-1_alert_rules", "rules": [{ "alert": "CPU_Usage", "expr": 'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "cloud": "juju", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app-1", "juju_unit": "rules-app-1/0", }, "annotations": { "description": "Host {{ $labels.host }} has had < 10% idle cpu for the last 5m\n", "summary": "Host {{ $labels.host }} CPU free is less than 10%", }, }], }, ] self.assertListEqual(groups, expected_groups) def test_removing_scrape_jobs_differentiates_between_units(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") target_rel_id = self.harness.add_relation("prometheus-target", "target-app") self.harness.add_relation_unit(target_rel_id, "target-app/0") self.harness.update_relation_data( target_rel_id, "target-app/0", { "hostname": "scrape_target_0", "port": "1234", }, ) self.harness.add_relation_unit(target_rel_id, "target-app/1") self.harness.update_relation_data( target_rel_id, "target-app/1", { "hostname": "scrape_target_1", "port": "5678", }, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) self.assertEqual(len(scrape_jobs), 1) self.assertEqual(len(scrape_jobs[0].get("static_configs")), 2) self.harness.remove_relation_unit(target_rel_id, "target-app/1") scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]")) self.assertEqual(len(scrape_jobs), 1) self.assertEqual(len(scrape_jobs[0].get("static_configs")), 1) expected_jobs = [{ "job_name": "juju_testmodel_1234567_target-app_prometheus_scrape", "static_configs": [{ "targets": ["scrape_target_0:1234"], "labels": { "juju_model": "testmodel", "juju_model_uuid": "1234567890", "juju_application": "target-app", "juju_unit": "target-app/0", "host": "scrape_target_0", }, }], "relabel_configs": [RELABEL_INSTANCE_CONFIG], }] self.assertListEqual(scrape_jobs, expected_jobs) def test_removing_alert_rules_differentiates_between_units(self): prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION, "prometheus") self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0") alert_rules_rel_id = self.harness.add_relation("prometheus-rules", "rules-app") self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0") self.harness.update_relation_data( alert_rules_rel_id, "rules-app/0", {"groups": ALERT_RULE_1}, ) self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/1") self.harness.update_relation_data( alert_rules_rel_id, "rules-app/1", {"groups": ALERT_RULE_2}, ) prometheus_rel_data = self.harness.get_relation_data( prometheus_rel_id, self.harness.model.app.name) alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 1) self.harness.remove_relation_unit(alert_rules_rel_id, "rules-app/1") alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}")) groups = alert_rules.get("groups", []) self.assertEqual(len(groups), 1) expected_groups = [ { "name": "juju_testmodel_1234567_rules-app_alert_rules", "rules": [{ "alert": "CPU_Usage", "expr": 'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10', "for": "5m", "labels": { "override_group_by": "host", "severity": "page", "cloud": "juju", "juju_model": "testmodel", "juju_model_uuid": "1234567", "juju_application": "rules-app", "juju_unit": "rules-app/0", }, "annotations": { "description": "Host {{ $labels.host }} has had < 10% idle cpu for the last 5m\n", "summary": "Host {{ $labels.host }} CPU free is less than 10%", }, }], }, ] self.assertListEqual(groups, expected_groups)
class TestDashboardConsumer(unittest.TestCase): def setUp(self): meta = open("metadata.yaml") self.harness = Harness(ConsumerCharm, meta=meta) self.harness.set_model_info(name=MODEL_INFO["name"], uuid=MODEL_INFO["uuid"]) self.addCleanup(self.harness.cleanup) self.harness.set_leader(True) self.harness.begin() def setup_charm_relations(self): """Create relations used by test cases. Args: multi: a boolean indicating if multiple relations must be created. """ rel_ids = [] self.assertEqual(self.harness.charm._stored.dashboard_events, 0) source_rel_id = self.harness.add_relation("grafana-source", "source") self.harness.add_relation_unit(source_rel_id, "source/0") rel_id = self.harness.add_relation("grafana-dashboard", "provider") self.harness.add_relation_unit(rel_id, "provider/0") rel_ids.append(rel_id) self.harness.update_relation_data( rel_id, "provider", { "dashboards": json.dumps(SOURCE_DATA), }, ) return rel_ids def test_consumer_notifies_on_new_dashboards(self): self.assertEqual( len(self.harness.charm.grafana_consumer._stored.dashboards), 0) self.assertEqual(self.harness.charm._stored.dashboard_events, 0) self.setup_charm_relations() self.assertEqual(self.harness.charm._stored.dashboard_events, 1) self.assertEqual( self.harness.charm.grafana_consumer.dashboards, [{ "id": "file:tester", "relation_id": 1, "charm": "grafana-k8s", "content": DASHBOARD_RENDERED, }], ) def test_consumer_error_on_bad_template(self): self.assertEqual( len(self.harness.charm.grafana_consumer._stored.dashboards), 0) self.assertEqual(self.harness.charm._stored.dashboard_events, 0) rels = self.setup_charm_relations() self.assertEqual(self.harness.charm._stored.dashboard_events, 1) bad_data = { "templates": { "file:tester": { "charm": "grafana-k8s", "content": "{{ unclosed variable", "juju_topology": { "model": MODEL_INFO["name"], "model_uuid": MODEL_INFO["uuid"], "application": "provider-tester", "unit": "provider-tester/0", }, } }, "uuid": "12345678", } self.harness.update_relation_data( rels[0], "provider", { "dashboards": json.dumps(bad_data), }, ) data = json.loads( self.harness.get_relation_data( rels[0], self.harness.model.app.name)["event"]) self.assertEqual( data["errors"], [{ "dashboard_id": "file:tester", "error": "expected token 'end of print statement', got 'variable'", }], )