Exemple #1
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(HookTestCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_config_changed(self):
        self.assertEqual(list(self.harness.charm._stored.things), [])
        self.harness.update_config({"thing": "foo"})
        self.assertEqual(list(self.harness.charm._stored.things), ["foo"])

    def test_action(self):
        # the harness doesn't (yet!) help much with actions themselves
        action_event = Mock(params={"fail": ""})
        self.harness.charm._on_fortune_action(action_event)

        self.assertTrue(action_event.set_results.called)

    def test_action_fail(self):
        action_event = Mock(params={"fail": "fail this"})
        self.harness.charm._on_fortune_action(action_event)

        self.assertEqual(action_event.fail.call_args, [("fail this", )])

    def test_httpbin_pebble_ready(self):
        # Check the initial Pebble plan is empty
        initial_plan = self.harness.get_container_pebble_plan("httpbin")
        self.assertEqual(initial_plan.to_yaml(), "{}\n")
        # Expected plan after Pebble ready with default config
        expected_plan = {
            "services": {
                "httpbin": {
                    "override": "replace",
                    "summary": "httpbin",
                    "command": "gunicorn -b 0.0.0.0:80 httpbin:app -k gevent",
                    "startup": "enabled",
                    "environment": {
                        "thing": "🎁"
                    },
                }
            },
        }
        # Get the httpbin container from the model
        container = self.harness.model.unit.get_container("httpbin")
        # Emit the PebbleReadyEvent carrying the httpbin container
        self.harness.charm.on.httpbin_pebble_ready.emit(container)
        # Get the plan now we've run PebbleReady
        updated_plan = self.harness.get_container_pebble_plan(
            "httpbin").to_dict()
        # Check we've got the plan we expected
        self.assertEqual(expected_plan, updated_plan)
        # Check the service was started
        service = self.harness.model.unit.get_container("httpbin").get_service(
            "httpbin")
        self.assertTrue(service.is_running())
        # Ensure we set an ActiveStatus with no message
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())
Exemple #2
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(WireguardSidecarCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_wireguard_pebble_ready(self):
        # Check the initial Pebble plan is empty
        initial_plan = self.harness.get_container_pebble_plan("wireguard")
        self.assertEqual(initial_plan.to_yaml(), "{}\n")
        # Expected plan after Pebble ready with default config
        expected_plan = {
            "summary": "wireguard layer",
            "description": "pebble config layer for wireguard",
            "services": {
                "wireguard": {
                    "override": "replace",
                    "summary": "wireguard",
                    "command": "/scripts/run",
                    "startup": "enabled",
                    "environment": {
                        "server_port":
                        self.harness.charm.model.config["server_port"]
                    },
                }
            },
        }
        # Get the wireguard container from the model
        container = self.harness.model.unit.get_container("wireguard")
        # Emit the PebbleReadyEvent carrying the wireguard container
        self.harness.charm.on.wireguard_pebble_ready.emit(container)
        # Get the plan now we've run PebbleReady
        updated_plan = self.harness.get_container_pebble_plan(
            "wireguard").to_dict()
        # Check we've got the plan we expected
        self.assertEqual(expected_plan, updated_plan)
        # Check the service was started
        service = self.harness.model.unit.get_container(
            "wireguard").get_service("wireguard")
        self.assertTrue(service.is_running())
        # Ensure we set an ActiveStatus with no message
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())
Exemple #3
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(HelloKubeconCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_gosherve_layer(self):
        # Test with empty config.
        self.assertEqual(self.harness.charm.config["redirect-map"], "https://jnsgr.uk/demo-routes")
        expected = {
            "summary": "gosherve layer",
            "description": "pebble config layer for gosherve",
            "services": {
                "gosherve": {
                    "override": "replace",
                    "summary": "gosherve",
                    "command": "/gosherve",
                    "startup": "enabled",
                    "environment": {
                        "REDIRECT_MAP_URL": "https://jnsgr.uk/demo-routes",
                        "WEBROOT": "/srv",
                    },
                }
            },
        }
        self.assertEqual(self.harness.charm._gosherve_layer(), expected)
        # And now test with a different value in the redirect-map config option.
        # Disable hook firing first.
        self.harness.disable_hooks()
        self.harness.update_config({"redirect-map": "test value"})
        expected["services"]["gosherve"]["environment"]["REDIRECT_MAP_URL"] = "test value"
        self.assertEqual(self.harness.charm._gosherve_layer(), expected)

    def test_on_config_changed(self):
        plan = self.harness.get_container_pebble_plan("gosherve")
        self.assertEqual(plan.to_dict(), {})
        # Trigger a config-changed hook. Since there was no plan initially, the
        # "gosherve" service in the container won't be running so we'll be
        # testing the `is_running() == False` codepath.
        self.harness.update_config({"redirect-map": "test value"})
        plan = self.harness.get_container_pebble_plan("gosherve")
        # Get the expected layer from the gosherve_layer method (tested above)
        expected = self.harness.charm._gosherve_layer()
        expected.pop("summary", "")
        expected.pop("description", "")
        # Check the plan is as expected
        self.assertEqual(plan.to_dict(), expected)
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())
        container = self.harness.model.unit.get_container("gosherve")
        self.assertEqual(container.get_service("gosherve").is_running(), True)

        # Now test again with different config, knowing that the "gosherve"
        # service is running (because we've just tested it above), so we'll
        # be testing the `is_running() == True` codepath.
        self.harness.update_config({"redirect-map": "test2 value"})
        plan = self.harness.get_container_pebble_plan("gosherve")
        # Adjust the expected plan
        expected["services"]["gosherve"]["environment"]["REDIRECT_MAP_URL"] = "test2 value"
        self.assertEqual(plan.to_dict(), expected)
        self.assertEqual(container.get_service("gosherve").is_running(), True)
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())

        # And finally test again with the same config to ensure we exercise
        # the case where the plan we've created matches the active one. We're
        # going to mock the container.stop and container.start calls to confirm
        # they were not called.
        with patch('ops.model.Container.start') as _start, patch('ops.model.Container.stop') as _stop:
            self.harness.charm.on.config_changed.emit()
            _start.assert_not_called()
            _stop.assert_not_called()

    @patch("charm.HelloKubeconCharm._fetch_site")
    def test_on_install(self, _fetch_site):
        self.harness.charm._on_install("mock_event")
        _fetch_site.assert_called_once

    @patch("charm.HelloKubeconCharm._fetch_site")
    def test_pull_site_action(self, _fetch_site):
        mock_event = Mock()
        self.harness.charm._pull_site_action(mock_event)
        _fetch_site.assert_called_once
        mock_event.called_once_with({"result": "site pulled"})
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()
        self.peer_rel_id = self.harness.add_relation('mongodb', 'mongodb')

    @patch('ops.testing._TestingPebbleClient.pull')
    def test_replica_set_name_can_be_changed(self, _):
        self.harness.set_leader(True)
        self.harness.container_pebble_ready("mongodb")

        # check default replica set name
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "new_name")

    @patch("mongoserver.MongoDB.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        self.harness.add_relation_unit(self.peer_rel_id, 'mongodb/1')
        self.harness.update_relation_data(self.peer_rel_id, 'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        peers = [
            'mongodb-k8s-0.mongodb-k8s-endpoints',
            'mongodb-k8s-1.mongodb-k8s-endpoints'
        ]
        mock_reconf.assert_called_once_with(peers)

    def test_replica_set_uri_data_is_generated_correctly(self):
        self.harness.set_leader(True)
        replica_set_uri = self.harness.charm.mongo.replica_set_uri()
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        cred = "root:{}".format(data['root_password'])
        self.assertEqual(
            replica_set_uri,
            'mongodb://{}@mongodb-k8s-0.mongodb-k8s-endpoints:27017/admin'.
            format(cred))

    def test_leader_sets_key_and_root_credentials(self):
        self.harness.set_leader(False)
        self.harness.set_leader(True)
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        self.assertIsNotNone(data['root_password'])
        self.assertIsNotNone(data['security_key'])

    @patch('mongoserver.MongoDB.version')
    def test_charm_provides_version(self, mock_version):
        self.harness.set_leader(True)
        mock_version.return_value = "4.4.1"
        version = self.harness.charm.mongo.version()
        self.assertEqual(version, "4.4.1")

    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deferred_if_monog_is_not_ready(self, is_ready):
        is_ready.return_value = False
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            for message in sorted(logger.output):
                if "DEBUG:ops.framework:Deferring" in message:
                    self.assertIn("StartEvent", message)

    @patch('mongoserver.MongoDB.initialize_replica_set')
    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deffered_if_monog_is_not_initialized(
            self, is_ready, initialize):
        is_ready.return_value = True
        initialize.side_effect = RuntimeError("Not Initialized")
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            self.assertIn(
                "INFO:charm:Deferring on_start since : error=Not Initialized",
                sorted(logger.output))
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(ZookeeperK8SCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    @patch('ops.model.Container.push')
    @patch('charm.ZookeeperK8SCharm._get_all_unit_ingress_addresses')
    @patch('charm.ZookeeperK8SCharm._share_address_with_peers')
    @patch('charm.ZookeeperK8SCharm._get_my_ingress_address')
    def test_config_changed(self, mock_my_address, mock_share_address,
                            mock_get_all_addresses, mock_push):
        mock_my_address.return_value = '10.1.0.42'
        mock_get_all_addresses.return_value = ['10.1.0.42', '10.1.0.43',
                                               '10.1.0.44']

        self.harness.update_config({'client-port': 1234})
        mock_share_address.assert_called_once_with('10.1.0.42', ANY)
        mock_push.assert_has_calls([
            call(path='/conf/zoo.cfg', source=SuperstringOf(
                ['clientPort=1234', '10.1.0.42', '10.1.0.43', '10.1.0.44'])),
            call(path='/data/myid', source=SuperstringOf(['1']))
        ], any_order=True)

    @patch('charm.KazooClient')
    def test_dump_data_action(self, mock_zk):
        def get_children_mock(path):
            if path == '/':
                return ['first-child', 'second-child']
            return []

        mock_zk.return_value.get_children.side_effect = get_children_mock
        mock_zk.return_value.get.return_value = ('my value', 'some metadata')
        action_event = Mock()

        self.harness.charm._on_dump_data_action(action_event)

        mock_zk.assert_called_once_with(hosts='127.0.0.1:2181')
        action_event.set_results.assert_called_once_with({
            'content': {
                'first-child': 'my value',
                'second-child': 'my value',
            }
        })

    @patch('charm.KazooClient')
    def test_seed_data_action(self, mock_zk):
        action_event = Mock()

        self.harness.charm._on_seed_data_action(action_event)

        mock_zk.assert_called_once_with(hosts='127.0.0.1:2181')
        mock_zk.return_value.ensure_path.assert_called_once_with('/test-seed')
        self.assertTrue(action_event.set_results.called)

    @patch('ops.model.Container.push')
    @patch('charm.ZookeeperK8SCharm._get_all_unit_ingress_addresses')
    @patch('charm.ZookeeperK8SCharm._share_address_with_peers')
    @patch('charm.ZookeeperK8SCharm._get_my_ingress_address')
    def test_zookeeper_pebble_ready(self, mock_my_address, mock_share_address,
                                    mock_get_all_addresses, mock_push):
        mock_my_address.return_value = '10.1.0.42'
        mock_get_all_addresses.return_value = ['10.1.0.42', '10.1.0.43',
                                               '10.1.0.44']

        # Check the initial Pebble plan is empty
        initial_plan = self.harness.get_container_pebble_plan("zookeeper")
        self.assertEqual(initial_plan.to_yaml(), "{}\n")
        # Expected plan after Pebble ready with default config
        expected_plan = {
            "services": {
                "zookeeper": {
                    "override": "replace",
                    "summary": "zookeeper",
                    "command": "/docker-entrypoint.sh zkServer.sh start-foreground",
                    "startup": "enabled",
                }
            },
        }
        # Get the zookeeper container from the model
        container = self.harness.model.unit.get_container("zookeeper")
        # Emit the PebbleReadyEvent carrying the zookeeper container
        self.harness.charm.on.zookeeper_pebble_ready.emit(container)
        # Get the plan now we've run PebbleReady
        updated_plan = self.harness.get_container_pebble_plan(
            "zookeeper").to_dict()
        # Check we've got the plan we expected
        self.assertEqual(expected_plan, updated_plan)
        # Check the service was started
        service = self.harness.model.unit.get_container(
            "zookeeper").get_service("zookeeper")
        self.assertTrue(service.is_running())
        # Ensure we set an ActiveStatus with no message
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())
        # Check the ZooKeeper config was written on disk
        mock_push.assert_has_calls([
            call(path='/conf/zoo.cfg',
                 source=SuperstringOf(['clientPort=2181'])),
            call(path='/data/myid', source=SuperstringOf(['1']))
        ], any_order=True)
Exemple #6
0
class TestCharm(unittest.TestCase):
    @patch("charm.KubernetesServicePatch", lambda x, y: None)
    @patch_network_get(private_address="1.1.1.1")
    def setUp(self, *unused):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin_with_initial_hooks()

    @patch_network_get(private_address="1.1.1.1")
    def test_grafana_is_provided_port_and_source(self, *unused):
        rel_id = self.harness.add_relation("grafana-source", "grafana")
        self.harness.add_relation_unit(rel_id, "grafana/0")
        grafana_host = self.harness.get_relation_data(
            rel_id, self.harness.model.unit.name)["grafana_source_host"]
        self.assertEqual(grafana_host, "{}:{}".format("1.1.1.1", "9090"))

    def test_default_cli_log_level_is_info(self):
        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "info")

    def test_invalid_log_level_defaults_to_debug(self):
        bad_log_config = {"log_level": "bad-level"}
        with self.assertLogs(level="ERROR") as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "debug")

    def test_valid_log_level_is_accepted(self):
        valid_log_config = {"log_level": "warn"}
        self.harness.update_config(valid_log_config)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "warn")

    @patch_network_get(private_address="1.1.1.1")
    def test_ingress_relation_not_set(self):
        self.harness.set_leader(True)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--web.external-url"),
                         "http://1.1.1.1:9090")

    @patch_network_get(private_address="1.1.1.1")
    def test_ingress_relation_set(self):
        self.harness.set_leader(True)

        rel_id = self.harness.add_relation("ingress", "traefik-ingress")
        self.harness.add_relation_unit(rel_id, "traefik-ingress/0")

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--web.external-url"),
                         "http://1.1.1.1:9090")

    @patch_network_get(private_address="1.1.1.1")
    def test_web_external_url_has_precedence_over_ingress_relation(self):
        self.harness.set_leader(True)

        self.harness.update_config({"web_external_url": "http://*****:*****@patch_network_get(private_address="1.1.1.1")
    def test_web_external_url_set(self):
        self.harness.set_leader(True)

        self.harness.update_config({"web_external_url": "http://*****:*****@patch("prometheus_server.Prometheus.reload_configuration")
    def test_configuration_reload(self, trigger_configuration_reload):
        self.harness.container_pebble_ready("prometheus")

        trigger_configuration_reload.assert_called()

        self.harness.update_config({"log_level": "INFO"})
        trigger_configuration_reload.assert_called()
class TestCharm(unittest.TestCase):
    @patch("charm.KubernetesServicePatch", lambda x, y: None)
    def setUp(self, *unused):
        self.harness = Harness(charm.RabbitMQOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_config_changed(self):
        self.assertEqual(
            list(self.harness.charm._stored.enabled_plugins),
            ['rabbitmq_management', 'rabbitmq_peer_discovery_k8s'])
        # Mock the file push
        self.harness.charm._render_and_push_config_files = Mock()
        self.harness.update_config({"enabled_plugins": "rabbitmq_foobar"})
        self.assertEqual(
            list(self.harness.charm._stored.enabled_plugins),
            ['rabbitmq_management', 'rabbitmq_peer_discovery_k8s'])

    def test_action(self):
        action_event = Mock()
        self.harness.charm._on_get_operator_info_action(action_event)

        self.assertTrue(action_event.set_results.called)

    def test_rabbitmq_pebble_ready(self):
        # Check the initial Pebble plan is empty
        initial_plan = self.harness.get_container_pebble_plan("rabbitmq")
        self.assertEqual(initial_plan.to_yaml(), "{}\n")
        # Expected plan after Pebble ready with default config
        expected_plan = {
            "services": {
                "rabbitmq-server": {
                    "override": "replace",
                    "summary": "RabbitMQ Server",
                    "command": "rabbitmq-server",
                    "startup": "enabled",
                },
            },
        }
        # Get the rabbitmq container from the model
        container = self.harness.model.unit.get_container("rabbitmq")
        # Emit the PebbleReadyEvent carrying the rabbitmq container
        self.harness.charm.on.rabbitmq_pebble_ready.emit(container)
        # Get the plan now we've run PebbleReady
        updated_plan = self.harness.get_container_pebble_plan(
            "rabbitmq").to_dict()
        # Check we've got the plan we expected
        self.assertEqual(expected_plan, updated_plan)
        # Check the service was started
        service = self.harness.model.unit.get_container(
            "rabbitmq").get_service("rabbitmq-server")
        self.assertTrue(service.is_running())

    def test_update_status(self):
        """This test validates the charm, the peers relation and the amqp relation.
        """
        self.harness.set_leader(True)
        self.harness.model.get_binding = Mock()
        self.harness.charm._get_admin_api = Mock()

        # Early not initialized
        self.harness.charm.on.update_status.emit()
        self.assertEqual(
            self.harness.model.unit.status,
            ops.model.WaitingStatus(
                'Waiting for leader to create operator user'))

        # RabbitMQ is up, operator user initialized
        peers_relation_id = self.harness.add_relation("peers",
                                                      "rabbitmq-operator")
        self.harness.add_relation_unit(peers_relation_id,
                                       "rabbitmq-operator/0")

        # AMQP relation incomplete
        amqp_relation_id = self.harness.add_relation("amqp", "amqp-client-app")
        self.harness.add_relation_unit(amqp_relation_id, "amqp-client-app/0")

        # AMQP relation complete
        self.harness.update_relation_data(amqp_relation_id, "amqp-client-app",
                                          {
                                              "username": "******",
                                              "vhost": "client-vhost"
                                          })
        self.harness.charm.on.update_status.emit()
        self.assertEqual(self.harness.model.unit.status,
                         ops.model.ActiveStatus())
class TestCharm(unittest.TestCase):
    @patch("charm.KubernetesServicePatch", lambda x, y: None)
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    @patch("ops.testing._TestingModelBackend.network_get")
    def test_grafana_is_provided_port_and_source(self, mock_net_get, *unused):
        self.harness.update_config(MINIMAL_CONFIG)
        ip = "1.1.1.1"
        net_info = {"bind-addresses": [{"interface-name": "ens1", "addresses": [{"value": ip}]}]}
        mock_net_get.return_value = net_info

        rel_id = self.harness.add_relation("grafana-source", "grafana")
        self.harness.add_relation_unit(rel_id, "grafana/0")
        grafana_host = self.harness.get_relation_data(rel_id, self.harness.model.unit.name)[
            "grafana_source_host"
        ]
        self.assertEqual(grafana_host, "{}:{}".format(ip, "9090"))

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_default_cli_log_level_is_info(self, *unused):
        self.harness.update_config(MINIMAL_CONFIG)
        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "info")

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_invalid_log_level_defaults_to_debug(self, *unused):
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config["log-level"] = "bad-level"
        with self.assertLogs(level="ERROR") as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "debug")

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_valid_log_level_is_accepted(self, *unused):
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config["log-level"] = "warn"
        self.harness.update_config(valid_log_config)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--log.level"), "warn")

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_ingress_relation_not_set(self, *unused):
        self.harness.set_leader(True)

        valid_log_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(valid_log_config)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertIsNone(cli_arg(plan, "--web.external-url"))

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_ingress_relation_set(self, *unused):
        self.harness.set_leader(True)

        self.harness.update_config(MINIMAL_CONFIG.copy())

        rel_id = self.harness.add_relation("ingress", "ingress")
        self.harness.add_relation_unit(rel_id, "ingress/0")

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(
            cli_arg(plan, "--web.external-url"),
            "http://*****:*****@patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_metrics_wal_compression_is_not_enabled_by_default(self, *unused):
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--storage.tsdb.wal-compression"), None)

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_metrics_wal_compression_can_be_enabled(self, *unused):
        compress_config = MINIMAL_CONFIG.copy()
        compress_config["metrics-wal-compression"] = True
        self.harness.update_config(compress_config)

        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(
            cli_arg(plan, "--storage.tsdb.wal-compression"),
            "--storage.tsdb.wal-compression",
        )

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_valid_metrics_retention_times_can_be_set(self, *unused):
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ["y", "w", "d", "h", "m", "s"]
        for unit in acceptable_units:
            retention_time = "{}{}".format(1, unit)
            retention_time_config["metrics-retention-time"] = retention_time
            self.harness.update_config(retention_time_config)

            plan = self.harness.get_container_pebble_plan("prometheus")
            self.assertEqual(cli_arg(plan, "--storage.tsdb.retention.time"), retention_time)

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_invalid_metrics_retention_times_can_not_be_set(self, *unused):
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = "1x"
        retention_time_config["metrics-retention-time"] = retention_time

        self.harness.update_config(retention_time_config)
        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--storage.tsdb.retention.time"), None)

        # invalid time value
        retention_time = "0d"
        retention_time_config["metrics-retention-time"] = retention_time

        self.harness.update_config(retention_time_config)
        plan = self.harness.get_container_pebble_plan("prometheus")
        self.assertEqual(cli_arg(plan, "--storage.tsdb.retention.time"), None)

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_global_evaluation_interval_can_be_set(self, push, _):
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ["y", "w", "d", "h", "m", "s"]
        for unit in acceptable_units:
            push.reset()
            evalint_config["evaluation-interval"] = "{}{}".format(1, unit)
            self.harness.update_config(evalint_config)
            config = push.call_args[0]
            gconfig = global_config(config)
            self.assertEqual(gconfig["evaluation_interval"], evalint_config["evaluation-interval"])

    @patch("ops.testing._TestingPebbleClient.remove_path")
    @patch("ops.testing._TestingPebbleClient.push")
    def test_default_scrape_config_is_always_set(self, push, _):
        self.harness.update_config(MINIMAL_CONFIG)
        config = push.call_args[0]
        prometheus_scrape_config = scrape_config(config, "prometheus")
        self.assertIsNotNone(prometheus_scrape_config, "No default config found")
class TestCharm(unittest.TestCase):
    @patch.object(CassandraOperatorCharm, "_goal_units", new=lambda x: 1)
    @patch.object(CassandraOperatorCharm, "_bind_address", new=lambda x: "1.1.1.1")
    @patch.object(ops.model.Container, "pull", new=fake_pull)
    @patch.object(ops.model.Container, "push", new=fake_push)
    def setUp(self):
        self.harness = Harness(CassandraOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin_with_initial_hooks()
        self.harness.set_leader(True)

    def tearDown(self):
        global FILES
        FILES = {}

    def test_relation_is_set(self):
        rel_id = self.harness.add_relation("database", "otherapp")
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, "otherapp/0")
        self.harness.update_relation_data(rel_id, "otherapp", {})
        self.assertEqual(
            self.harness.get_relation_data(rel_id, self.harness.model.app.name)["port"],
            "9042",
        )
        self.assertEqual(
            self.harness.get_relation_data(rel_id, self.harness.model.app.name)["address"],
            "cassandra-k8s-0.cassandra-k8s-endpoints.None.svc.cluster.local",
        )

    def test_root_password_is_set(self):
        rel = self.harness.charm.model.get_relation("cassandra-peers")
        self.assertEqual(rel.data[self.harness.charm.app].get("root_password", None), None)
        self.assertEqual(bool(self.harness.charm.cassandra.root_password(None)), True)

    def test_config_file_is_set(self):
        self.harness.container_pebble_ready("cassandra")
        sample_content = yaml.safe_load(SAMPLE_CONFIG)
        content_str = (
            self.harness.charm.unit.get_container("cassandra")
            .pull("/etc/cassandra/cassandra.yaml")
            .read()
        )
        content = yaml.safe_load(content_str)
        assert content == sample_content

    @patch("ops.testing._TestingModelBackend.network_get")
    @patch("ops.testing._TestingPebbleClient.list_files")
    def test_prometheus_data_set(self, mock_net_get, mock_list_files):
        bind_address = "1.1.1.1"
        fake_network = {
            "bind-addresses": [
                {
                    "interface-name": "eth0",
                    "addresses": [{"hostname": "cassandra-tester-0", "value": bind_address}],
                }
            ]
        }
        mock_net_get.return_value = fake_network
        rel_id = self.harness.add_relation("monitoring", "otherapp")
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, "otherapp/0")
        self.harness.update_relation_data(rel_id, "otherapp", {})
        self.assertEqual(
            json.loads(
                self.harness.get_relation_data(rel_id, self.harness.model.app.name)["scrape_jobs"]
            )[0]["static_configs"][0]["targets"],
            ["*:9500"],
        )

    @patch("ops.testing._TestingModelBackend.network_get")
    @patch("ops.testing._TestingPebbleClient.list_files")
    def test_heap_size_default(self, mock_net_get, mock_list_files):
        cassandra_environment = self._start_cassandra_and_get_pebble_service().environment

        self.assertEqual(cassandra_environment["JVM_OPTS"], "-Xms6G -Xmx6G")
        self.assertEqual(self.harness.model.unit.status, ops.model.ActiveStatus())

    @patch("ops.testing._TestingModelBackend.network_get")
    @patch("ops.testing._TestingPebbleClient.list_files")
    def test_heap_size_config_success(self, mock_net_get, mock_list_files):
        self.harness.update_config({"heap_size": "1g"})

        cassandra_environment = self._start_cassandra_and_get_pebble_service().environment

        self.assertEqual(cassandra_environment["JVM_OPTS"], "-Xms1g -Xmx1g")
        self.assertEqual(self.harness.model.unit.status, ops.model.ActiveStatus())

    @patch("ops.testing._TestingModelBackend.network_get")
    @patch("ops.testing._TestingPebbleClient.list_files")
    def test_heap_size_config_invalid(self, mock_net_get, mock_list_files):
        self.harness.update_config({"heap_size": "0.5g"})

        self.assertEqual(
            self.harness.model.unit.status,
            ops.model.BlockedStatus("Invalid Cassandra heap size setting: '0.5g'"),
        )

    def _start_cassandra_and_get_pebble_service(self):
        container = self.harness.model.unit.get_container("cassandra")

        self.harness.charm.on.cassandra_pebble_ready.emit(container)

        pebble_plan = self.harness.get_container_pebble_plan("cassandra")
        return pebble_plan.services["cassandra"]