def test_add_resource_but_oci(self): harness = Harness(CharmBase, meta=''' name: test-app resources: image: type: oci-image description: "Image to deploy." ''') self.addCleanup(harness.cleanup) with self.assertRaises(RuntimeError): harness.add_resource('image', 'content')
def test_add_resource_unknown_filename(self): harness = Harness(CharmBase, meta=''' name: test-app resources: image: type: file description: "Image to deploy." ''') self.addCleanup(harness.cleanup) harness.add_resource('image', 'foo contents\n') path = harness.model.resources.fetch('image') self.assertTrue(str(path).endswith('/image/image'), msg='expected {} to end with /image/image')
class TestRemoteWriteConsumer(unittest.TestCase): def setUp(self): self.harness = Harness(RemoteWriteConsumerCharm, meta=METADATA) self.harness.add_resource( "promql-transform-amd64", open("./promql-transform", "rb").read(), ) self.addCleanup(self.harness.cleanup) self.harness.set_leader(True) self.harness.begin_with_initial_hooks() def test_address_is_set(self): rel_id = self.harness.add_relation(RELATION_NAME, "provider") self.harness.add_relation_unit(rel_id, "provider/0") self.harness.update_relation_data( rel_id, "provider/0", {"remote_write": json.dumps({"url": "http://1.1.1.1:9090/api/v1/write"})}, ) assert list(self.harness.charm.remote_write_consumer.endpoints) == [ {"url": "http://1.1.1.1:9090/api/v1/write"} ] @patch.object(RemoteWriteConsumerCharm, "_handle_endpoints_changed") def test_config_is_set(self, mock_handle_endpoints_changed): rel_id = self.harness.add_relation(RELATION_NAME, "provider") self.harness.add_relation_unit(rel_id, "provider/0") self.harness.update_relation_data( rel_id, "provider/0", {"remote_write": json.dumps({"url": "http://1.1.1.1:9090/api/v1/write"})}, ) mock_handle_endpoints_changed.assert_called() event = mock_handle_endpoints_changed.call_args.args[0] self.assertEqual(rel_id, event.relation_id) assert list(self.harness.charm.remote_write_consumer.endpoints) == [ {"url": "http://1.1.1.1:9090/api/v1/write"} ] def test_no_remote_write_endpoint_provided(self): rel_id = self.harness.add_relation(RELATION_NAME, "provider") self.harness.add_relation_unit(rel_id, "provider/0") self.harness.update_relation_data(rel_id, "provider/0", {}) assert list(self.harness.charm.remote_write_consumer.endpoints) == []
def test_add_resource_string(self): harness = Harness(CharmBase, meta=''' name: test-app resources: image: type: file filename: foo.txt description: "Image to deploy." ''') self.addCleanup(harness.cleanup) harness.add_resource('image', 'foo contents\n') path = harness.model.resources.fetch('image') self.assertTrue(str(path).endswith('/image/foo.txt'), msg='expected {} to end with /image/foo.zip') with path.open('rt') as f: self.assertEqual('foo contents\n', f.read())
def test_add_resource_bytes(self): harness = Harness(CharmBase, meta=''' name: test-app resources: image: type: file filename: foo.zip description: "Image to deploy." ''') self.addCleanup(harness.cleanup) raw_contents = b'\xff\xff\x00blah\n' harness.add_resource('image', raw_contents) path = harness.model.resources.fetch('image') self.assertTrue(str(path).endswith('/image/foo.zip'), msg='expected {} to end with /image/foo.zip') with path.open('rb') as f: self.assertEqual(raw_contents, f.read())
class TestTransform(unittest.TestCase): """Test that the promql-transform implementation works.""" def setUp(self): self.harness = Harness(TransformProviderCharm, meta=META) self.harness.set_model_name("transform") self.addCleanup(self.harness.cleanup) self.harness.add_resource("promql-transform-amd64", "dummy resource") self.harness.begin() # pylint: disable=protected-access @unittest.mock.patch("platform.processor", lambda: "teakettle") def test_disable_on_invalid_arch(self): transform = self.harness.charm.transformer self.assertIsNone(transform.path) self.assertTrue(transform._disabled) # pylint: disable=protected-access @unittest.mock.patch("platform.processor", lambda: "x86_64") def test_gives_path_on_valid_arch(self): """When given a valid arch, it should return the resource path.""" transformer = self.harness.charm.transformer self.assertIsInstance(transformer.path, PosixPath) @unittest.mock.patch("platform.processor", lambda: "x86_64") def test_setup_transformer(self): """When setup it should know the path to the binary.""" transform = self.harness.charm.transformer self.assertIsInstance(transform.path, PosixPath) p = str(transform.path) self.assertTrue( p.startswith("/") and p.endswith("promql-transform-amd64")) @unittest.mock.patch("platform.processor", lambda: "x86_64") @unittest.mock.patch("subprocess.run") def test_returns_original_expression_when_subprocess_call_errors( self, mocked_run): mocked_run.side_effect = subprocess.CalledProcessError( returncode=10, cmd="promql-transform", stderr="") transform = self.harness.charm.transformer output = transform.apply_label_matchers({ "groups": [{ "alert": "CPUOverUse", "expr": "process_cpu_seconds_total > 0.12", "for": "0m", "labels": { "severity": "Low", "juju_model": "None", "juju_model_uuid": "f2c1b2a6-e006-11eb-ba80-0242ac130004", "juju_application": "consumer-tester", }, "annotations": { "summary": "Instance {{ $labels.instance }} CPU over use", "description": "{{ $labels.instance }} of job " "{{ $labels.job }} has used too much CPU.", }, }] }) self.assertEqual(output["groups"][0]["expr"], "process_cpu_seconds_total > 0.12") @unittest.mock.patch("platform.processor", lambda: "invalid") def test_uses_original_expression_when_resource_missing(self): transform = self.harness.charm.transformer output = transform.apply_label_matchers({ "groups": [{ "alert": "CPUOverUse", "expr": "process_cpu_seconds_total > 0.12", "for": "0m", "labels": { "severity": "Low", "juju_model": "None", "juju_model_uuid": "f2c1b2a6-e006-11eb-ba80-0242ac130004", "juju_application": "consumer-tester", }, "annotations": { "summary": "Instance {{ $labels.instance }} CPU over use", "description": "{{ $labels.instance }} of job " "{{ $labels.job }} has used too much CPU.", }, }] }) self.assertEqual(output["groups"][0]["expr"], "process_cpu_seconds_total > 0.12") @unittest.mock.patch("platform.processor", lambda: "x86_64") def test_fetches_the_correct_expression(self): self.harness.add_resource( "promql-transform-amd64", open("./promql-transform", "rb").read(), ) transform = self.harness.charm.transformer output = transform._apply_label_matcher( "up", {"juju_model": "some_juju_model"}) assert output == 'up{juju_model="some_juju_model"}' @unittest.mock.patch("platform.processor", lambda: "x86_64") def test_handles_comparisons(self): self.harness.add_resource( "promql-transform-amd64", open("./promql-transform", "rb").read(), ) transform = self.harness.charm.transformer output = transform._apply_label_matcher( "up > 1", {"juju_model": "some_juju_model"}) assert output == 'up{juju_model="some_juju_model"} > 1' @unittest.mock.patch("platform.processor", lambda: "x86_64") def test_handles_multiple_labels(self): self.harness.add_resource( "promql-transform-amd64", open("./promql-transform", "rb").read(), ) transform = self.harness.charm.transformer output = transform._apply_label_matcher( "up > 1", { "juju_model": "some_juju_model", "juju_model_uuid": "123ABC", "juju_application": "some_application", "juju_unit": "some_application/1", }, ) assert ( output == 'up{juju_application="some_application",juju_model="some_juju_model"' ',juju_model_uuid="123ABC",juju_unit="some_application/1"} > 1')
class TestEndpointConsumer(unittest.TestCase): def setUp(self): metadata_file = open("metadata.yaml") self.harness = Harness(EndpointConsumerCharm, meta=metadata_file) self.harness.add_resource( "promql-transform-amd64", open("./promql-transform", "rb").read(), ) self.addCleanup(self.harness.cleanup) self.harness.begin() def setup_charm_relations(self, multi=False): """Create relations used by test cases. Args: multi: a boolean indicating if multiple relations must be created. """ rel_ids = [] self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") rel_ids.append(rel_id) self.harness.update_relation_data( rel_id, "consumer", { "scrape_metadata": json.dumps(SCRAPE_METADATA), "scrape_jobs": json.dumps(SCRAPE_JOBS), }, ) self.harness.add_relation_unit(rel_id, "consumer/0") self.harness.update_relation_data( rel_id, "consumer/0", { "prometheus_scrape_unit_address": "1.1.1.1", "prometheus_scrape_unit_name": "consumer/0", }, ) self.assertEqual(self.harness.charm._stored.num_events, 2) if multi: rel_id = self.harness.add_relation(RELATION_NAME, "other-consumer") rel_ids.append(rel_id) self.harness.update_relation_data( rel_id, "other-consumer", { "scrape_metadata": json.dumps(OTHER_SCRAPE_METADATA), "scrape_jobs": json.dumps(OTHER_SCRAPE_JOBS), }, ) self.harness.add_relation_unit(rel_id, "other-consumer/0") self.harness.update_relation_data( rel_id, "other-consumer/0", { "prometheus_scrape_unit_address": "2.2.2.2", "prometheus_scrape_unit_name": "other-consumer/0", }, ) self.assertEqual(self.harness.charm._stored.num_events, 4) return rel_ids def validate_jobs(self, jobs): """Valdiate that a list of jobs has the expected fields. Existence for unit labels is not checked since these do not exist for all jobs. Args: jobs: list of jobs where each job is a dictionary. Raises: assertion failures if any job is not as expected. """ for job in jobs: self.assertIn("job_name", job) self.assertIn("static_configs", job) static_configs = job["static_configs"] for static_config in static_configs: self.assertIn("targets", static_config) self.assertIn("labels", static_config) labels = static_config["labels"] self.assertIn("juju_model", labels) self.assertIn("juju_model_uuid", labels) self.assertIn("juju_application", labels) self.assertIn("juju_charm", labels) relabel_configs = job["relabel_configs"] self.assertEqual(len(relabel_configs), 1) relabel_config = relabel_configs[0] self.assertEqual( relabel_config.get("source_labels"), [ "juju_model", "juju_model_uuid", "juju_application", "juju_unit" ], ) def test_consumer_notifies_on_new_scrape_relation(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", {"scrape_metadata": json.dumps(SCRAPE_METADATA)}) self.assertEqual(self.harness.charm._stored.num_events, 1) def test_consumer_notifies_on_new_scrape_target(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.add_relation_unit(rel_id, "consumer/0") self.harness.update_relation_data( rel_id, "consumer/0", {"prometheus_scrape_host": "1.1.1.1"}) self.assertEqual(self.harness.charm._stored.num_events, 1) def test_consumer_returns_all_static_scrape_labeled_jobs(self): self.setup_charm_relations() jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), len(SCRAPE_JOBS)) self.validate_jobs(jobs) def test_consumer_does_not_unit_label_fully_qualified_targets(self): self.setup_charm_relations() jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), len(SCRAPE_JOBS)) for job in jobs: for static_config in job["static_configs"]: if FULL_TARGET in static_config.get("targets"): self.assertNotIn("juju_unit", static_config.get("labels")) def test_consumer_does_attach_unit_labels_to_wildcard_hosts(self): self.setup_charm_relations() jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), len(SCRAPE_JOBS)) for job in jobs: for static_config in job["static_configs"]: if FULL_TARGET not in static_config.get("targets"): self.assertIn("juju_unit", static_config.get("labels")) def test_consumer_allows_custom_metrics_paths(self): rel_ids = self.setup_charm_relations() self.assertEqual(len(rel_ids), 1) rel_id = rel_ids[0] jobs = self.harness.charm.prometheus_consumer.jobs() for job in jobs: if job.get("metrics_path"): name_suffix = job_name_suffix(job["job_name"], juju_job_labels(job), rel_id) path = named_job_attribute(name_suffix, "metrics_path", "/metrics") self.assertEqual(job["metrics_path"], path) def test_consumer_sanitizes_jobs(self): self.setup_charm_relations() jobs = self.harness.charm.prometheus_consumer.jobs() for job in jobs: job_keys = set(job.keys()) self.assertTrue(job_keys.issubset(ALLOWED_KEYS)) def test_consumer_returns_jobs_for_all_relations(self): self.setup_charm_relations(multi=True) jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), len(SCRAPE_JOBS) + len(OTHER_SCRAPE_JOBS)) def test_consumer_scrapes_each_port_for_wildcard_hosts(self): rel_ids = self.setup_charm_relations() self.assertEqual(len(rel_ids), 1) rel_id = rel_ids[0] jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), len(SCRAPE_JOBS)) ports = wildcard_target_ports(SCRAPE_JOBS) targets = wildcard_targets(jobs, ports) consumers = self.harness.charm.model.get_relation( RELATION_NAME, rel_id) self.assertEqual(len(targets), len(ports) * len(consumers.units)) def test_consumer_handles_default_scrape_job(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", { "scrape_metadata": json.dumps(SCRAPE_METADATA), "scrape_jobs": json.dumps(DEFAULT_JOBS), }, ) self.assertEqual(self.harness.charm._stored.num_events, 1) self.harness.add_relation_unit(rel_id, "consumer/0") self.harness.update_relation_data( rel_id, "consumer/0", { "prometheus_scrape_unit_address": "1.1.1.1", "prometheus_scrape_unit_name": "provider/0", }, ) self.assertEqual(self.harness.charm._stored.num_events, 2) jobs = self.harness.charm.prometheus_consumer.jobs() self.validate_jobs(jobs) def test_consumer_overwrites_juju_topology_labels(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", { "scrape_metadata": json.dumps(SCRAPE_METADATA), "scrape_jobs": json.dumps(BAD_JOBS), }, ) self.assertEqual(self.harness.charm._stored.num_events, 1) self.harness.add_relation_unit(rel_id, "consumer/0") self.harness.update_relation_data( rel_id, "consumer/0", { "prometheus_scrape_unit_address": "1.1.1.1", "prometheus_scrape_unit_name": "provider/0", }, ) self.assertEqual(self.harness.charm._stored.num_events, 2) jobs = self.harness.charm.prometheus_consumer.jobs() self.assertEqual(len(jobs), 1) self.validate_jobs(jobs) bad_labels = juju_job_labels(BAD_JOBS[0]) labels = juju_job_labels(jobs[0]) for label_name, label_value in labels.items(): self.assertNotEqual(label_value, bad_labels[label_name]) def test_consumer_returns_alerts_rules_file(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", { "scrape_metadata": json.dumps(SCRAPE_METADATA), "alert_rules": json.dumps(ALERT_RULES), }, ) self.harness.add_relation_unit(rel_id, "consumer/0") self.assertEqual(self.harness.charm._stored.num_events, 1) rules_file = self.harness.charm.prometheus_consumer.alerts() alerts = list(rules_file.values())[0] alert_names = [x["alert"] for x in alerts["groups"][0]["rules"]] self.assertEqual(alert_names, ["CPUOverUse", "PrometheusTargetMissing"]) def test_consumer_logs_an_error_on_missing_alerting_data(self): self.assertEqual(self.harness.charm._stored.num_events, 0) bad_metadata = {"bad": "metadata"} bad_rules = {"bad": "rule"} rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", { "scrape_metadata": json.dumps(bad_metadata), "alert_rules": json.dumps(bad_rules), }, ) self.harness.add_relation_unit(rel_id, "consumer/0") self.assertEqual(self.harness.charm._stored.num_events, 1) with self.assertLogs(level="WARNING") as logger: _ = self.harness.charm.prometheus_consumer.alerts() messages = logger.output self.assertEqual(len(messages), 2) self.assertIn( "Alert rules were found but no usable group or identifier was present", messages[1]) def test_consumer_accepts_rules_with_no_identifier(self): self.assertEqual(self.harness.charm._stored.num_events, 0) rel_id = self.harness.add_relation(RELATION_NAME, "consumer") self.harness.update_relation_data( rel_id, "consumer", { "alert_rules": json.dumps(UNLABELED_ALERT_RULES), }, ) self.harness.add_relation_unit(rel_id, "consumer/0") self.assertEqual(self.harness.charm._stored.num_events, 1) with self.assertLogs(level="DEBUG") as logger: _ = self.harness.charm.prometheus_consumer.alerts() messages = logger.output self.assertIn( "Alert rules were found but no usable labels were present", messages[1]) self.assertIn( "No labeled alert rules were found, and no 'scrape_metadata' " "was available. Using the alert group name as filename.", messages[2], ) alerts = self.harness.charm.prometheus_consumer.alerts() self.assertIn("unlabeled_external_cpu_alerts", alerts.keys()) self.assertEqual(UNLABELED_ALERT_RULES, alerts["unlabeled_external_cpu_alerts"])