class TestMySQLProvider(unittest.TestCase):
    def setup_harness(self, config: dict, meta: dict) -> None:
        config_yaml = CONFIG_YAML.format(**config)
        meta_yaml = PROVIDER_META.format(**meta)
        self.harness = Harness(MySQLCharm, meta=meta_yaml, config=config_yaml)
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def test_databases_are_created_when_requested(self):
        config = CONFIG.copy()
        meta = METADATA.copy()
        self.setup_harness(config, meta)

        requested_database = ["mysql_database"]
        json_request = json.dumps(requested_database)
        consumer_data = {"databases": json_request}

        rel_id = self.harness.add_relation("database", "consumer")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertDictEqual(data, {})
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(rel_id, "consumer", consumer_data)
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        databases = json.loads(data["databases"])
        self.assertListEqual(databases, requested_database)
Пример #2
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()

    def test_replica_set_name_can_be_changed(self):
        self.harness.set_leader(True)

        # check default replica set name
        self.harness.charm.on.config_changed.emit()
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "new_name")

    @patch("mongoserver.MongoDB.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.add_relation_unit(rel_id, 'mongodb/1')
        self.harness.update_relation_data(rel_id,
                                          'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        peers = ['mongodb-0.mongodb-endpoints',
                 'mongodb-1.mongodb-endpoints']
        mock_reconf.assert_called_once_with(peers)

    def test_uri_data_is_generated_correctly(self):
        self.harness.set_leader(True)
        standalone_uri = self.harness.charm.mongo.standalone_uri
        replica_set_uri = self.harness.charm.mongo.replica_set_uri
        self.assertEqual(standalone_uri, 'mongodb://mongodb:27017/')
        self.assertEqual(replica_set_uri, 'mongodb://mongodb-0.mongodb-endpoints:27017/')

    def test_database_relation_data_is_set_correctly(self):
        self.harness.set_leader(True)
        rel_id = self.harness.add_relation('database', 'client')
        self.harness.add_relation_unit(rel_id, 'client/1')
        rel = self.harness.framework.model.get_relation('database', rel_id)
        unit = self.harness.framework.model.get_unit('client/1')
        self.harness.charm.on['database'].relation_changed.emit(rel, unit)
        got = self.harness.get_relation_data(rel_id, self.harness.framework.model.unit.name)
        expected = {
            'replicated': 'False',
            'replica_set_name': 'rs0',
            'standalone_uri': 'mongodb://mongodb:27017/',
            'replica_set_uri': 'mongodb://mongodb-0.mongodb-endpoints:27017/'
        }
        self.assertDictEqual(got, expected)
Пример #3
0
 def test_get_relation_data(self):
     # language=YAML
     harness = Harness(CharmBase, meta='''
         name: test-app
         requires:
             db:
                 interface: pgsql
         ''')
     rel_id = harness.add_relation('db', 'postgresql')
     harness.update_relation_data(rel_id, 'postgresql', {'remote': 'data'})
     self.assertEqual(harness.get_relation_data(rel_id, 'test-app'), {})
     self.assertEqual(harness.get_relation_data(rel_id, 'test-app/0'), {})
     self.assertEqual(harness.get_relation_data(rel_id, 'test-app/1'), None)
     self.assertEqual(harness.get_relation_data(rel_id, 'postgresql'), {'remote': 'data'})
     with self.assertRaises(KeyError):
         # unknown relation id
         harness.get_relation_data(99, 'postgresql')
Пример #4
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(MongodbCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_configure_pod(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "mongodb",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "mongodb",
                            "containerPort": 27017,
                            "protocol": "TCP",
                        }
                    ],
                    "command": [
                        "mongod",
                        "--bind_ip",
                        "mongodb-endpoints",
                        "--port",
                        "27017",
                    ],
                }
            ],
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_publish_mongodb_info(self) -> NoReturn:
        """Test to see if mongodb relation is updated."""
        expected_result = {
            "hostname": "mongodb",
            "mongodb_uri": "mongodb://mongodb:27017",
        }
        relation_id = self.harness.add_relation("mongodb", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mongodb")
        print("relation_data", relation_data)
        self.assertDictEqual(expected_result, relation_data)
Пример #5
0
 def test_publish_relation_joined(self, mock_open_call, os_path_isdir,
                                  os_makedirs, os_symlink):
     harness = Harness(SimpleStreamsCharm)
     harness.begin()
     default_config = self.default_config()
     self.assertEqual(harness.charm._stored.config, {})
     harness.update_config(default_config)
     relation_id = harness.add_relation('publish', 'webserver')
     harness.add_relation_unit(relation_id, 'webserver/0')
     assert harness.get_relation_data(relation_id, harness._unit_name)\
            == {'path': '{}/publish'.format(default_config['image-dir'])}
Пример #6
0
 def test_relation_set_app_not_leader(self):
     # language=YAML
     harness = Harness(RecordingCharm, meta='''
         name: test-charm
         requires:
             db:
                 interface: pgsql
         ''')
     harness.set_leader(False)
     rel_id = harness.add_relation('db', 'postgresql')
     harness.add_relation_unit(rel_id, 'postgresql/0')
     harness.begin()
     rel = harness.charm.model.get_relation('db')
     with self.assertRaises(ModelError):
         rel.data[harness.charm.app]['foo'] = 'bar'
     # The data has not actually been changed
     self.assertEqual(harness.get_relation_data(rel_id, 'test-charm'), {})
     harness.set_leader(True)
     rel.data[harness.charm.app]['foo'] = 'bar'
     self.assertEqual(harness.get_relation_data(rel_id, 'test-charm'), {'foo': 'bar'})
Пример #7
0
 def test_relation_set_deletes(self):
     harness = Harness(CharmBase,
                       meta='''
         name: test-charm
         requires:
             db:
                 interface: pgsql
         ''')
     harness.begin()
     harness.set_leader(False)
     rel_id = harness.add_relation('db', 'postgresql')
     harness.update_relation_data(rel_id, 'test-charm/0', {'foo': 'bar'})
     harness.add_relation_unit(rel_id, 'postgresql/0')
     rel = harness.charm.model.get_relation('db', rel_id)
     del rel.data[harness.charm.model.unit]['foo']
     self.assertEqual({}, harness.get_relation_data(rel_id, 'test-charm/0'))
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(CassandraOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.set_leader(True)

    def test_relation_is_set(self):
        rel_id = self.harness.add_relation("cql", "otherapp")
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, "otherapp/0")
        self.harness.update_relation_data(rel_id, "otherapp", {})
        self.assertEqual(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["port"],
            "9042",
        )
class TestRemoteWriteProvider(unittest.TestCase):
    @patch_network_get(private_address="1.1.1.1")
    def setUp(self, *unused):
        self.harness = Harness(PrometheusCharm)
        self.harness.set_model_info("lma", "123456")
        self.addCleanup(self.harness.cleanup)

    @patch.object(KubernetesServicePatch, "_service_object", new=lambda *args: None)
    @patch.object(Prometheus, "reload_configuration", new=lambda _: True)
    @patch_network_get(private_address="1.1.1.1")
    def test_port_is_set(self, *unused):
        self.harness.begin_with_initial_hooks()

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.assertEqual(
            self.harness.get_relation_data(rel_id, self.harness.charm.unit.name),
            {"remote_write": json.dumps({"url": "http://1.1.1.1:9090/api/v1/write"})},
        )
        self.assertIsInstance(self.harness.charm.unit.status, ActiveStatus)

    @patch.object(KubernetesServicePatch, "_service_object", new=lambda *args: None)
    @patch.object(Prometheus, "reload_configuration", new=lambda _: True)
    @patch_network_get(private_address="1.1.1.1")
    def test_alert_rules(self, *unused):
        self.harness.begin_with_initial_hooks()

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {"alert_rules": json.dumps(ALERT_RULES)},
        )

        self.harness.add_relation_unit(rel_id, "consumer/0")

        alerts = self.harness.charm.remote_write_provider.alerts()
        alerts = list(alerts.values())[0]  # drop the topology identifier
        self.assertEqual(len(alerts), 1)
        self.assertDictEqual(alerts, ALERT_RULES)
Пример #10
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(AlertmanagerCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.set_leader(True)
        self.harness.update_config({"pagerduty_key": "123"})

    def test_config_changed(self):
        self.harness.update_config({"pagerduty_key": "abc"})
        config = self.get_config()
        self.assertEqual(
            config["receivers"][0]["pagerduty_configs"][0]["service_key"],
            "abc")

    def test_port_change(self):
        rel_id = self.harness.add_relation("alerting", "prometheus")
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, "prometheus/0")
        self.harness.update_config({"port": "9096"})
        self.assertEqual(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["port"],
            "9096",
        )

    def test_bad_config(self):
        self.harness.update_config({"pagerduty_key": ""})
        self.assertEqual(type(self.harness.model.unit.status),
                         ops.model.BlockedStatus)

    # TODO figure out how to test scaling up the application

    def get_config(self):
        pod_spec = self.harness.get_pod_spec()
        config_yaml = pod_spec[0]["containers"][0]["volumeConfig"][0]["files"][
            0]["content"]
        return yaml.safe_load(config_yaml)
Пример #11
0
class TestDashboardConsumer(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(ConsumerCharm, meta=CONSUMER_META)
        self.harness._backend.model_name = "testing"
        self.harness._backend.model_uuid = "abcdefgh-1234"
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def test_consumer_does_not_set_dashboard_without_monitoring(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

    def test_consumer_sets_dashboard_data(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)

    def test_consumer_can_remove_dashboard(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)
        self.harness.charm.consumer.remove_dashboard()
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": True,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }

    def test_consumer_resends_dashboard_after_monitoring_established(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)

    def test_consumer_invalidates_dashboard_after_monitoring_established_then_broken(
        self, ):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.remove_relation(mon_rel_id)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": True,
            "invalidated_reason":
            "Waiting for a monitoring relation to send dashboard data",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)
Пример #12
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(Upf1Charm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        annot = {
            "annotations": {
                "k8s.v1.cni.cncf.io/networks":
                '[\n{\n"name" : "n6-network",'
                '\n"interface": "eth1",\n"ips": []\n}\n]'
            },
            "securityContext": {
                "runAsUser": 0000,
                "runAsGroup": 0000
            },
        }
        service = [{
            "name": "upf-e",
            "labels": {
                "juju-app": "upf1"
            },
            "spec": {
                "selector": {
                    "juju-app": "upf1"
                },
                "ports": [{
                    "protocol": "TCP",
                    "port": 80,
                    "targetPort": 80
                }],
                "type": "ClusterIP",
            },
        }]

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP",
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "services": service,
                "pod": annot,
            },
        }
        # Check if natapp is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)
        self.assertIsNone(self.harness.charm.state.natapp_host)

        # Initializing the natapp relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )
        # Checking if natapp data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_config_change(self) -> NoReturn:
        """Test installation without any relation."""

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP"
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "pod": {
                    "annotations": {
                        "k8s.v1.cni.cncf.io/networks":
                        '[\n{\n"name" : "n6-network",'
                        '\n"interface": "eth1",\n"ips": []\n}\n]'
                    },
                    "securityContext": {
                        "runAsUser": 0,
                        "runAsGroup": 0
                    },
                },
                "services": [{
                    "name": "upf-e",
                    "labels": {
                        "juju-app": "upf1"
                    },
                    "spec": {
                        "selector": {
                            "juju-app": "upf1"
                        },
                        "ports": [{
                            "protocol": "TCP",
                            "port": 80,
                            "targetPort": 80
                        }],
                        "type":
                        "ClusterIP",
                    },
                }],
            },
        }

        # Check if nrf,upf is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the nrf relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )

        # Checking if nrf,upf data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_natapp_app_relation_changed(self) -> NoReturn:
        """Test to see if upf app relation is updated."""
        self.harness.charm.on.config_changed.emit()

        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the upf relation
        natapp_relation_id = self.harness.add_relation("natapp", "upf")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        relation_data = {"static_ip": "192.168.70.15"}
        self.harness.update_relation_data(natapp_relation_id, "natapp/0",
                                          relation_data)

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_upf_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "private_address": "127.1.1.1",
        }
        relation_id = self.harness.add_relation("upf", "smf")
        relation_data = {"private_address": "127.1.1.1"}
        self.harness.update_relation_data(relation_id, "upf1", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "upf1")
        self.assertDictEqual(expected_result, relation_data)
class TestDashboardProvider(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(ProviderCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def setup_charm_relations(self, multi=False):
        """Create relations used by test cases.
        Args:
            multi: a boolean indicating if multiple relations must be
            created.
        """
        self.harness.charm.grafana_provider._stored.active_sources = [
            {
                "source-name": "testing_abcdefgh-1234_monitoring"
            },
            {
                "source-name": "testing_abcdefgh-2345_monitoring"
            },
        ]

        rel_ids = []
        self.assertEqual(self.harness.charm._stored.dashboard_events, 0)
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        rel_ids.append(rel_id)
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "dashboards": json.dumps(SOURCE_DATA),
            },
        )
        if multi:
            rel_id = self.harness.add_relation("grafana-source",
                                               "other-consumer")
            rel_ids.append(rel_id)
            self.harness.add_relation_unit(rel_id, "other-consumer/0")
            self.harness.update_relation_data(
                rel_id,
                "other-consumer",
                {
                    "dashboards": json.dumps(SOURCE_DATA),
                },
            )

        return rel_ids

    def test_provider_notifies_on_new_dashboards(self):
        self.assertEqual(
            len(self.harness.charm.grafana_provider._stored.dashboards), 0)
        self.assertEqual(self.harness.charm._stored.dashboard_events, 0)
        self.setup_charm_relations()
        self.assertEqual(self.harness.charm._stored.dashboard_events, 1)

        # Terrible type conversions again
        stored = self.harness.charm.grafana_provider.dashboards[0]
        stored = dict(stored)
        stored["data"] = dict(stored["data"])
        self.maxDiff = None
        self.assertEqual(
            stored,
            {
                "target": "testing_abcdefgh-1234_monitoring",
                "data": {
                    "monitoring_identifier":
                    "testing_abcdefgh-1234_monitoring",
                    "monitoring_target":
                    "Consumer-tester [ testing / abcdefgh-1234 ]",
                    "monitoring_query":
                    "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
                    "template": DASHBOARD_TMPL,
                    "removed": False,
                    "invalidated": False,
                    "invalidated_reason": "",
                },
                "dashboard": DASHBOARD_RENDERED.rstrip(),
            },
        )

    def test_provider_error_on_bad_template(self):
        self.assertEqual(
            len(self.harness.charm.grafana_provider._stored.dashboards), 0)
        self.assertEqual(self.harness.charm._stored.dashboard_events, 0)
        rels = self.setup_charm_relations()
        self.assertEqual(self.harness.charm._stored.dashboard_events, 1)

        bad_data = copy.deepcopy(SOURCE_DATA)
        bad_data["template"] = "JUNK! {{{novar}}}"

        self.harness.update_relation_data(
            rels[0],
            "consumer",
            {
                "dashboards": json.dumps(bad_data),
            },
        )

        data = json.loads(
            self.harness.get_relation_data(
                rels[0], self.harness.model.app.name)["event"])
        self.assertEqual(data["valid"], False)
        self.assertIn(
            "Cannot add Grafana dashboard. Template is not valid Jinja",
            data["errors"])

    def test_provider_error_on_invalidation(self):
        self.assertEqual(
            len(self.harness.charm.grafana_provider._stored.dashboards), 0)
        self.assertEqual(self.harness.charm._stored.dashboard_events, 0)
        rels = self.setup_charm_relations()
        self.assertEqual(self.harness.charm._stored.dashboard_events, 1)

        bad_data = copy.deepcopy(SOURCE_DATA)
        bad_data["invalidated"] = True
        bad_data["invalidated_reason"] = "Doesn't matter"

        self.harness.update_relation_data(
            rels[0],
            "consumer",
            {
                "dashboards": json.dumps(bad_data),
            },
        )

        data = json.loads(
            self.harness.get_relation_data(
                rels[0], self.harness.model.app.name)["event"])
        self.assertEqual(data["valid"], False)
        self.assertIn("Doesn't matter", data["errors"])

    def test_provider_error_on_no_sources(self):
        self.assertEqual(
            len(self.harness.charm.grafana_provider._stored.dashboards), 0)
        self.assertEqual(self.harness.charm._stored.dashboard_events, 0)
        rels = self.setup_charm_relations()
        self.assertEqual(self.harness.charm._stored.dashboard_events, 1)
        self.harness.charm.grafana_provider._stored.active_sources = []

        self.harness.update_relation_data(
            rels[0],
            "consumer",
            {
                "dashboards": json.dumps(SOURCE_DATA),
            },
        )

        data = json.loads(
            self.harness.get_relation_data(
                rels[0], self.harness.model.app.name)["event"])
        self.assertEqual(data["valid"], False)
        self.assertIn("No configured datasources", data["errors"])
Пример #14
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()
        self.peer_rel_id = self.harness.add_relation('mongodb', 'mongodb')

    @patch('ops.testing._TestingPebbleClient.pull')
    def test_replica_set_name_can_be_changed(self, _):
        self.harness.set_leader(True)
        self.harness.container_pebble_ready("mongodb")

        # check default replica set name
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "new_name")

    @patch("mongoserver.MongoDB.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        self.harness.add_relation_unit(self.peer_rel_id, 'mongodb/1')
        self.harness.update_relation_data(self.peer_rel_id, 'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        peers = [
            'mongodb-k8s-0.mongodb-k8s-endpoints',
            'mongodb-k8s-1.mongodb-k8s-endpoints'
        ]
        mock_reconf.assert_called_once_with(peers)

    def test_replica_set_uri_data_is_generated_correctly(self):
        self.harness.set_leader(True)
        replica_set_uri = self.harness.charm.mongo.replica_set_uri()
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        cred = "root:{}".format(data['root_password'])
        self.assertEqual(
            replica_set_uri,
            'mongodb://{}@mongodb-k8s-0.mongodb-k8s-endpoints:27017/admin'.
            format(cred))

    def test_leader_sets_key_and_root_credentials(self):
        self.harness.set_leader(False)
        self.harness.set_leader(True)
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        self.assertIsNotNone(data['root_password'])
        self.assertIsNotNone(data['security_key'])

    @patch('mongoserver.MongoDB.version')
    def test_charm_provides_version(self, mock_version):
        self.harness.set_leader(True)
        mock_version.return_value = "4.4.1"
        version = self.harness.charm.mongo.version()
        self.assertEqual(version, "4.4.1")

    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deferred_if_monog_is_not_ready(self, is_ready):
        is_ready.return_value = False
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            for message in sorted(logger.output):
                if "DEBUG:ops.framework:Deferring" in message:
                    self.assertIn("StartEvent", message)

    @patch('mongoserver.MongoDB.initialize_replica_set')
    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deffered_if_monog_is_not_initialized(
            self, is_ready, initialize):
        is_ready.return_value = True
        initialize.side_effect = RuntimeError("Not Initialized")
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            self.assertIn(
                "INFO:charm:Deferring on_start since : error=Not Initialized",
                sorted(logger.output))
Пример #15
0
class TestDBRouter(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MySQLOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.peer_relation_id = self.harness.add_relation(
            "database-peers", "database-peers")
        self.harness.add_relation_unit(self.peer_relation_id, "mysql/1")
        self.db_router_relation_id = self.harness.add_relation(
            "db-router", "app")
        self.harness.add_relation_unit(self.db_router_relation_id, "app/0")
        self.charm = self.harness.charm

    @patch_network_get(private_address="1.1.1.1")
    @patch("relations.db_router.generate_random_password",
           return_value="super_secure_password")
    @patch("mysqlsh_helpers.MySQL.get_cluster_primary_address",
           return_value="2.2.2.2")
    @patch("mysqlsh_helpers.MySQL.does_mysql_user_exist", return_value=False)
    @patch("mysqlsh_helpers.MySQL.configure_mysqlrouter_user")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    def test_db_router_relation_changed(
        self,
        _create_application_database_and_scoped_user,
        _configure_mysqlrouter_user,
        _does_mysql_user_exist,
        _get_cluster_primary_address,
        _generate_random_password,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        db_router_relation_databag = self.harness.get_relation_data(
            self.db_router_relation_id, self.harness.charm.app)
        db_router_relation = self.charm.model.get_relation("db-router")
        app_unit = list(db_router_relation.units)[0]

        self.assertEqual(db_router_relation_databag, {})
        self.assertEqual(db_router_relation.data.get(app_unit), {})
        self.assertEqual(db_router_relation.data.get(self.charm.unit), {})

        # update the app leader unit data to trigger db_router_relation_changed event
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertEqual(_generate_random_password.call_count, 2)
        self.assertEqual(_does_mysql_user_exist.call_count, 2)
        self.assertEqual(
            sorted(_does_mysql_user_exist.mock_calls),
            sorted([
                call("mysqlrouteruser", "1.1.1.3"),
                call("keystone_user", "1.1.1.2"),
            ]),
        )

        _configure_mysqlrouter_user.assert_called_once_with(
            "mysqlrouteruser", "super_secure_password", "1.1.1.3", "app/0")
        _create_application_database_and_scoped_user.assert_called_once_with(
            "keystone_database", "keystone_user", "super_secure_password",
            "1.1.1.2", "app/0")

        # confirm that credentials in the mysql leader unit databag is set correctly
        self.assertEqual(
            db_router_relation.data.get(app_unit),
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertEqual(
            db_router_relation.data.get(self.charm.unit),
            {
                "db_host": '"2.2.2.2"',
                "mysqlrouter_password": '******',
                "mysqlrouter_allowed_units": '"app/0"',
                "MRUP_password": '******',
                "MRUP_allowed_units": '"app/0"',
            },
        )

    @patch_network_get(private_address="1.1.1.1")
    @patch("relations.db_router.generate_random_password",
           return_value="super_secure_password")
    @patch("mysqlsh_helpers.MySQL.does_mysql_user_exist", return_value=False)
    @patch("mysqlsh_helpers.MySQL.configure_mysqlrouter_user")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    def test_db_router_relation_changed_exceptions(
        self,
        _create_application_database_and_scoped_user,
        _configure_mysqlrouter_user,
        _does_mysql_user_exist,
        _generate_random_password,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        db_router_relation_databag = self.harness.get_relation_data(
            self.db_router_relation_id, self.harness.charm.app)
        db_router_relation = self.charm.model.get_relation("db-router")
        app_unit = list(db_router_relation.units)[0]

        self.assertEqual(db_router_relation_databag, {})
        self.assertEqual(db_router_relation.data.get(app_unit), {})
        self.assertEqual(db_router_relation.data.get(self.charm.unit), {})

        # test an exception while configuring mysql users
        _does_mysql_user_exist.side_effect = MySQLCheckUserExistenceError
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _does_mysql_user_exist.reset_mock()

        # test an exception while creating the mysql router user
        _configure_mysqlrouter_user.side_effect = MySQLConfigureRouterUserError
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _configure_mysqlrouter_user.reset_mock()

        # test an exception while creating the application database and scoped user
        _create_application_database_and_scoped_user.side_effect = (
            MySQLCreateApplicationDatabaseAndScopedUserError)
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _create_application_database_and_scoped_user.reset_mock()
Пример #16
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(RedisCharm)
        self.addCleanup(self.harness.cleanup)
        redis_resource = {
            "registrypath": "ubuntu/redis"
        }
        self.harness.add_oci_resource("redis-image", redis_resource)
        self.harness.begin()

    def test_on_start_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.start.emit()
        # Then
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_start_when_redis_is_not_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = False
        # When
        self.harness.charm.on.start.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(
            self.harness.charm.unit.status,
            WaitingStatus("Waiting for Redis ...")
        )

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_start_when_redis_is_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.start.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    def test_on_stop(self):
        # When
        self.harness.charm.on.stop.emit()
        # Then
        self.assertEqual(
            self.harness.charm.unit.status,
            MaintenanceStatus('Pod is terminating.')
        )

    def test_on_config_changed_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_config_changed_when_unit_is_leader_and_redis_is_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch.object(OCIImageResource, 'fetch')
    def test_on_config_changed_when_unit_is_leader_but_image_fetch_breaks(self, fetch):
        # Given
        self.harness.set_leader(True)
        fetch.side_effect = OCIImageResourceError("redis-image")
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        fetch.assert_called_once_with()
        self.assertEqual(
            self.harness.charm.unit.status,
            BlockedStatus("Error fetching image information.")
        )

    def test_on_update_status_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_update_status_when_redis_is_not_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = False
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(
            self.harness.charm.unit.status,
            WaitingStatus("Waiting for Redis ...")
        )

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_update_status_when_redis_is_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch.object(RedisProvides, '_bind_address')
    def test_on_relation_changed_status_when_unit_is_leader(self, bind_address):
        # Given
        self.harness.set_leader(True)
        bind_address.return_value = '10.2.1.5'

        rel_id = self.harness.add_relation('redis', 'wordpress')
        self.harness.add_relation_unit(rel_id, 'wordpress/0')
        # When
        self.harness.update_relation_data(rel_id, 'wordpress/0', {})
        rel_data = self.harness.get_relation_data(
            rel_id, self.harness.charm.unit.name
        )
        # Then
        self.assertEqual(rel_data.get('hostname'), '10.2.1.5')
        self.assertEqual(rel_data.get('port'), '6379')
Пример #17
0
class TestDashboardProvider(unittest.TestCase):
    def setUp(self):
        patcher = patch(
            "charms.grafana_k8s.v0.grafana_dashboard._resolve_dir_against_charm_path"
        )
        self.mock_resolve_dir = patcher.start()
        self.addCleanup(patcher.stop)

        self.mock_resolve_dir.return_value = "./tests/unit/dashboard_templates"
        self.harness = Harness(ProviderCharm, meta=CONSUMER_META)
        self.harness._backend.model_name = "testing"
        self.harness._backend.model_uuid = "abcdefgh-1234"
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.set_leader(True)

    def test_provider_sets_dashboard_data(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])

        self.assertDictEqual(
            {
                "templates": RELATION_TEMPLATES_DATA,
                "uuid": "12345678",
            },
            data,
        )

    def test_provider_can_remove_programmatically_added_dashboards(self):
        self.harness.charm.provider.add_dashboard("third")

        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])

        expected_data_builtin_dashboards = {
            "templates": copy.deepcopy(RELATION_TEMPLATES_DATA),
            "uuid": "12345678",
        }

        expected_data = copy.deepcopy(expected_data_builtin_dashboards)
        expected_templates = expected_data["templates"]
        expected_templates["prog:uC2Arx+2"] = {  # type: ignore
            "charm": "provider-tester",
            "content": "/Td6WFoAAATm1rRGAgAhARYAAAB0L+WjAQAEdGhpcmQAAAAAtr5hbOrisy0AAR0FuC2Arx+2830BAAAAAARZWg==",
            "juju_topology": {
                "model": "testing",
                "model_uuid": "abcdefgh-1234",
                "application": "provider-tester",
                "unit": "provider-tester/0",
            },
        }

        self.assertDictEqual(expected_data, actual_data)
        self.harness.charm.provider.remove_non_builtin_dashboards()
        self.assertEqual(
            expected_data_builtin_dashboards,
            json.loads(
                self.harness.get_relation_data(
                    rel_id, self.harness.model.app.name)["dashboards"]),
        )

    def test_provider_cannot_remove_builtin_dashboards(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])

        expected_data = {
            "templates": RELATION_TEMPLATES_DATA,
            "uuid": "12345678",
        }

        self.assertDictEqual(expected_data, actual_data)

        self.harness.charm.provider.remove_non_builtin_dashboards()
        self.assertEqual(
            expected_data,
            json.loads(
                self.harness.get_relation_data(
                    rel_id, self.harness.model.app.name)["dashboards"]),
        )

    def test_provider_destroys_old_data_on_rescan(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        expected_data = {
            "templates": RELATION_TEMPLATES_DATA,
            "uuid": "12345678",
        }
        self.assertDictEqual(expected_data, actual_data)

        self.harness.charm.provider._dashboards_path = "./tests/unit/manual_dashboards"
        self.harness.charm.provider._reinitialize_dashboard_data()
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        expected_data = {
            "templates": MANUAL_TEMPLATE_DATA,
            "uuid": "12345678",
        }
        self.assertDictEqual(expected_data, actual_data)

    def test_provider_empties_data_on_exception(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        expected_data = {
            "templates": RELATION_TEMPLATES_DATA,
            "uuid": "12345678",
        }
        self.assertDictEqual(expected_data, actual_data)

        self.mock_resolve_dir.side_effect = InvalidDirectoryPathError(
            "foo", "bar")
        self.harness.charm.provider._reinitialize_dashboard_data()
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        empty_data = {
            "templates": {},
            "uuid": "12345678",
        }
        self.assertDictEqual(empty_data, actual_data)

    def test_provider_clears_data_on_empty_dir(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "other_app")
        self.harness.add_relation_unit(rel_id, "other_app/0")
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        expected_data = {
            "templates": RELATION_TEMPLATES_DATA,
            "uuid": "12345678",
        }
        self.assertDictEqual(expected_data, actual_data)

        self.harness.charm.provider._dashboards_path = "./tests/unit/empty_dashboards"
        self.harness.charm.provider._reinitialize_dashboard_data()
        actual_data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        empty_data = {
            "templates": {},
            "uuid": "12345678",
        }
        self.assertDictEqual(empty_data, actual_data)
Пример #18
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(MysqlCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_config_changed(self):
        """Test script for pod spec."""
        self.harness.charm.on.config_changed.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "mysql",
                "image":
                "mysql:5.7",
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "sql",
                    "containerPort": 3306,
                    "protocol": "TCP"
                }],
                "envConfig": {
                    "MYSQL_ROOT_PASSWORD": "******",
                },
            }],
        }
        pod_spec, _ = self.harness.get_pod_spec()

        self.assertDictEqual(expected_result, pod_spec)
        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_pcscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "pcscf")
        self.harness.add_relation_unit(relation_id, "pcscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_icscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "icscf")
        self.harness.add_relation_unit(relation_id, "icscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_scscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "scscf")
        self.harness.add_relation_unit(relation_id, "scscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_hss_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "hss")
        self.harness.add_relation_unit(relation_id, "hss/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)
Пример #19
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(HssCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with relation."""
        self.harness.charm.on.start.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "hss",
                "image":
                "localhost:32000/ims_hss:1.0",
                "imagePullPolicy":
                "Always",
                "ports": [
                    {
                        "name": "diahss",
                        "containerPort": 3868,
                        "protocol": "TCP"
                    },
                    {
                        "name": "hss",
                        "containerPort": 8080,
                        "protocol": "TCP"
                    },
                ],
                "envConfig": {
                    "MYSQL_HOST": "mysql-endpoints",
                    "MYSQL_USER": "******",
                    "MYSQL_ROOT_PASSWORD": "******",
                },
                "command": ["./init_hss.sh", "&"],
            }],
        }
        # Check if mysql is initialized
        self.assertIsNone(self.harness.charm.state.mysql)

        # Initializing mysql relation
        mysql_relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(mysql_relation_id, "mysql/0")
        self.harness.update_relation_data(
            mysql_relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mysql, "mysql")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mysql_app_relation_changed(self) -> NoReturn:
        """Test to see if mysql app relation is updated."""
        self.harness.charm.on.start.emit()

        self.assertIsNone(self.harness.charm.state.mysql)

        relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(relation_id, "mysql/0")
        self.harness.update_relation_data(
            relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_hss_info(self) -> NoReturn:
        """Test to see if hss relation is updated."""
        expected_result = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.charm.on.start.emit()
        relation_id = self.harness.add_relation("dns-source", "dns_source")
        relation_data = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.update_relation_data(relation_id, "dns_source",
                                          relation_data)
        relation_data = self.harness.get_relation_data(relation_id,
                                                       "dns_source")
        self.assertDictEqual(expected_result, relation_data)
Пример #20
0
class TestEndpointAggregator(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(EndpointAggregatorCharm, meta=AGGREGATOR_META)
        self.harness.set_model_info(name="testmodel", uuid="1234567890")
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin_with_initial_hooks()

    def test_adding_prometheus_then_target_forwards_a_labeled_scrape_job(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        target_rel_id = self.harness.add_relation(SCRAPE_TARGET_RELATION,
                                                  "target-app")
        self.harness.add_relation_unit(target_rel_id, "target-app/0")

        hostname = "scrape_target_0"
        port = "1234"
        self.harness.update_relation_data(
            target_rel_id,
            "target-app/0",
            {
                "hostname": f"{hostname}",
                "port": f"{port}",
            },
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))
        expected_jobs = [{
            "job_name":
            "juju_testmodel_1234567_target-app_prometheus_scrape",
            "static_configs": [{
                "targets": ["scrape_target_0:1234"],
                "labels": {
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567890",
                    "juju_application": "target-app",
                    "juju_unit": "target-app/0",
                    "host": "scrape_target_0",
                },
            }],
            "relabel_configs": [RELABEL_INSTANCE_CONFIG],
        }]
        self.assertListEqual(scrape_jobs, expected_jobs)

    def test_adding_prometheus_then_target_forwards_a_labeled_alert_rule(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        alert_rules_rel_id = self.harness.add_relation(ALERT_RULES_RELATION,
                                                       "rules-app")
        self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0")
        self.harness.update_relation_data(alert_rules_rel_id, "rules-app/0",
                                          {"groups": ALERT_RULE_1})

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 1)
        group = groups[0]

        expected_group = {
            "name":
            "juju_testmodel_1234567_rules-app_alert_rules",
            "rules": [{
                "alert": "CPU_Usage",
                "expr":
                'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10',
                "for": "5m",
                "labels": {
                    "override_group_by": "host",
                    "severity": "page",
                    "cloud": "juju",
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567",
                    "juju_application": "rules-app",
                    "juju_unit": "rules-app/0",
                },
                "annotations": {
                    "description":
                    "Host {{ $labels.host }} has had <  10% idle cpu for the last 5m\n",
                    "summary":
                    "Host {{ $labels.host }} CPU free is less than 10%",
                },
            }],
        }
        self.assertDictEqual(group, expected_group)

    def test_adding_target_then_prometheus_forwards_a_labeled_scrape_job(self):
        target_rel_id = self.harness.add_relation(SCRAPE_TARGET_RELATION,
                                                  "target-app")
        self.harness.add_relation_unit(target_rel_id, "target-app/0")

        hostname = "scrape_target_0"
        port = "1234"
        self.harness.update_relation_data(
            target_rel_id,
            "target-app/0",
            {
                "hostname": f"{hostname}",
                "port": f"{port}",
            },
        )

        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))
        expected_jobs = [{
            "job_name":
            "juju_testmodel_1234567_target-app_prometheus_scrape",
            "static_configs": [{
                "targets": ["scrape_target_0:1234"],
                "labels": {
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567890",
                    "juju_application": "target-app",
                    "juju_unit": "target-app/0",
                    "host": "scrape_target_0",
                },
            }],
            "relabel_configs": [RELABEL_INSTANCE_CONFIG],
        }]
        self.assertListEqual(scrape_jobs, expected_jobs)

    def test_adding_target_then_prometheus_forwards_a_labeled_alert_rule(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        alert_rules_rel_id = self.harness.add_relation(ALERT_RULES_RELATION,
                                                       "rules-app")
        self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0")
        self.harness.update_relation_data(alert_rules_rel_id, "rules-app/0",
                                          {"groups": ALERT_RULE_1})

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 1)
        group = groups[0]

        expected_group = {
            "name":
            "juju_testmodel_1234567_rules-app_alert_rules",
            "rules": [{
                "alert": "CPU_Usage",
                "expr":
                'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10',
                "for": "5m",
                "labels": {
                    "override_group_by": "host",
                    "severity": "page",
                    "cloud": "juju",
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567",
                    "juju_application": "rules-app",
                    "juju_unit": "rules-app/0",
                },
                "annotations": {
                    "description":
                    "Host {{ $labels.host }} has had <  10% idle cpu for the last 5m\n",
                    "summary":
                    "Host {{ $labels.host }} CPU free is less than 10%",
                },
            }],
        }
        self.assertDictEqual(group, expected_group)

    def test_scrape_jobs_from_multiple_target_applications_are_forwarded(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        target_rel_id_1 = self.harness.add_relation(SCRAPE_TARGET_RELATION,
                                                    "target-app-1")
        self.harness.add_relation_unit(target_rel_id_1, "target-app-1/0")
        self.harness.update_relation_data(
            target_rel_id_1,
            "target-app-1/0",
            {
                "hostname": "scrape_target_0",
                "port": "1234",
            },
        )

        target_rel_id_2 = self.harness.add_relation(SCRAPE_TARGET_RELATION,
                                                    "target-app-2")
        self.harness.add_relation_unit(target_rel_id_2, "target-app-2/0")
        self.harness.update_relation_data(
            target_rel_id_2,
            "target-app-2/0",
            {
                "hostname": "scrape_target_1",
                "port": "5678",
            },
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))
        self.assertEqual(len(scrape_jobs), 2)

        expected_jobs = [
            {
                "job_name":
                "juju_testmodel_1234567_target-app-1_prometheus_scrape",
                "static_configs": [{
                    "targets": ["scrape_target_0:1234"],
                    "labels": {
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567890",
                        "juju_application": "target-app-1",
                        "juju_unit": "target-app-1/0",
                        "host": "scrape_target_0",
                    },
                }],
                "relabel_configs": [RELABEL_INSTANCE_CONFIG],
            },
            {
                "job_name":
                "juju_testmodel_1234567_target-app-2_prometheus_scrape",
                "static_configs": [{
                    "targets": ["scrape_target_1:5678"],
                    "labels": {
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567890",
                        "juju_application": "target-app-2",
                        "juju_unit": "target-app-2/0",
                        "host": "scrape_target_1",
                    },
                }],
                "relabel_configs": [RELABEL_INSTANCE_CONFIG],
            },
        ]

        self.assertListEqual(scrape_jobs, expected_jobs)

    def test_alert_rules_from_multiple_target_applications_are_forwarded(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        alert_rules_rel_id_1 = self.harness.add_relation(
            ALERT_RULES_RELATION, "rules-app-1")
        self.harness.add_relation_unit(alert_rules_rel_id_1, "rules-app-1/0")
        self.harness.update_relation_data(
            alert_rules_rel_id_1,
            "rules-app-1/0",
            {"groups": ALERT_RULE_1},
        )

        alert_rules_rel_id_2 = self.harness.add_relation(
            ALERT_RULES_RELATION, "rules-app-2")
        self.harness.add_relation_unit(alert_rules_rel_id_2, "rules-app-2/0")
        self.harness.update_relation_data(
            alert_rules_rel_id_2,
            "rules-app-2/0",
            {"groups": ALERT_RULE_2},
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 2)
        expected_groups = [
            {
                "name":
                "juju_testmodel_1234567_rules-app-1_alert_rules",
                "rules": [{
                    "alert": "CPU_Usage",
                    "expr":
                    'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10',
                    "for": "5m",
                    "labels": {
                        "override_group_by": "host",
                        "severity": "page",
                        "cloud": "juju",
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567",
                        "juju_application": "rules-app-1",
                        "juju_unit": "rules-app-1/0",
                    },
                    "annotations": {
                        "description":
                        "Host {{ $labels.host }} has had <  10% idle cpu for the last 5m\n",
                        "summary":
                        "Host {{ $labels.host }} CPU free is less than 10%",
                    },
                }],
            },
            {
                "name":
                "juju_testmodel_1234567_rules-app-2_alert_rules",
                "rules": [{
                    "alert": "DiskFull",
                    "expr":
                    'disk_free{is_container!="True", fstype!~".*tmpfs|squashfs|overlay"}  <1024',
                    "for": "5m",
                    "labels": {
                        "override_group_by": "host",
                        "severity": "page",
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567",
                        "juju_application": "rules-app-2",
                        "juju_unit": "rules-app-2/0",
                    },
                    "annotations": {
                        "description":
                        "Host {{ $labels.host}} {{ $labels.path }} is full\nsummary: Host {{ $labels.host }} {{ $labels.path}} is full\n"
                    },
                }],
            },
        ]
        self.assertListEqual(groups, expected_groups)

    def test_scrape_job_removal_differentiates_between_applications(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        target_rel_id_1 = self.harness.add_relation("prometheus-target",
                                                    "target-app-1")
        self.harness.add_relation_unit(target_rel_id_1, "target-app-1/0")
        self.harness.update_relation_data(
            target_rel_id_1,
            "target-app-1/0",
            {
                "hostname": "scrape_target_0",
                "port": "1234",
            },
        )

        target_rel_id_2 = self.harness.add_relation("prometheus-target",
                                                    "target-app-2")
        self.harness.add_relation_unit(target_rel_id_2, "target-app-2/0")
        self.harness.update_relation_data(
            target_rel_id_2,
            "target-app-2/0",
            {
                "hostname": "scrape_target_1",
                "port": "5678",
            },
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))
        self.assertEqual(len(scrape_jobs), 2)

        self.harness.remove_relation_unit(target_rel_id_2, "target-app-2/0")
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))
        self.assertEqual(len(scrape_jobs), 1)

        expected_jobs = [{
            "job_name":
            "juju_testmodel_1234567_target-app-1_prometheus_scrape",
            "static_configs": [{
                "targets": ["scrape_target_0:1234"],
                "labels": {
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567890",
                    "juju_application": "target-app-1",
                    "juju_unit": "target-app-1/0",
                    "host": "scrape_target_0",
                },
            }],
            "relabel_configs": [RELABEL_INSTANCE_CONFIG],
        }]
        self.assertListEqual(scrape_jobs, expected_jobs)

    def test_alert_rules_removal_differentiates_between_applications(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        alert_rules_rel_id_1 = self.harness.add_relation(
            "prometheus-rules", "rules-app-1")
        self.harness.add_relation_unit(alert_rules_rel_id_1, "rules-app-1/0")
        self.harness.update_relation_data(
            alert_rules_rel_id_1,
            "rules-app-1/0",
            {"groups": ALERT_RULE_1},
        )

        alert_rules_rel_id_2 = self.harness.add_relation(
            "prometheus-rules", "rules-app-2")
        self.harness.add_relation_unit(alert_rules_rel_id_2, "rules-app-2/0")
        self.harness.update_relation_data(
            alert_rules_rel_id_2,
            "rules-app-2/0",
            {"groups": ALERT_RULE_2},
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 2)

        self.harness.remove_relation_unit(alert_rules_rel_id_2,
                                          "rules-app-2/0")
        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 1)

        expected_groups = [
            {
                "name":
                "juju_testmodel_1234567_rules-app-1_alert_rules",
                "rules": [{
                    "alert": "CPU_Usage",
                    "expr":
                    'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10',
                    "for": "5m",
                    "labels": {
                        "override_group_by": "host",
                        "severity": "page",
                        "cloud": "juju",
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567",
                        "juju_application": "rules-app-1",
                        "juju_unit": "rules-app-1/0",
                    },
                    "annotations": {
                        "description":
                        "Host {{ $labels.host }} has had <  10% idle cpu for the last 5m\n",
                        "summary":
                        "Host {{ $labels.host }} CPU free is less than 10%",
                    },
                }],
            },
        ]

        self.assertListEqual(groups, expected_groups)

    def test_removing_scrape_jobs_differentiates_between_units(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        target_rel_id = self.harness.add_relation("prometheus-target",
                                                  "target-app")
        self.harness.add_relation_unit(target_rel_id, "target-app/0")
        self.harness.update_relation_data(
            target_rel_id,
            "target-app/0",
            {
                "hostname": "scrape_target_0",
                "port": "1234",
            },
        )

        self.harness.add_relation_unit(target_rel_id, "target-app/1")
        self.harness.update_relation_data(
            target_rel_id,
            "target-app/1",
            {
                "hostname": "scrape_target_1",
                "port": "5678",
            },
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))

        self.assertEqual(len(scrape_jobs), 1)
        self.assertEqual(len(scrape_jobs[0].get("static_configs")), 2)

        self.harness.remove_relation_unit(target_rel_id, "target-app/1")
        scrape_jobs = json.loads(prometheus_rel_data.get("scrape_jobs", "[]"))

        self.assertEqual(len(scrape_jobs), 1)
        self.assertEqual(len(scrape_jobs[0].get("static_configs")), 1)

        expected_jobs = [{
            "job_name":
            "juju_testmodel_1234567_target-app_prometheus_scrape",
            "static_configs": [{
                "targets": ["scrape_target_0:1234"],
                "labels": {
                    "juju_model": "testmodel",
                    "juju_model_uuid": "1234567890",
                    "juju_application": "target-app",
                    "juju_unit": "target-app/0",
                    "host": "scrape_target_0",
                },
            }],
            "relabel_configs": [RELABEL_INSTANCE_CONFIG],
        }]
        self.assertListEqual(scrape_jobs, expected_jobs)

    def test_removing_alert_rules_differentiates_between_units(self):
        prometheus_rel_id = self.harness.add_relation(PROMETHEUS_RELATION,
                                                      "prometheus")
        self.harness.add_relation_unit(prometheus_rel_id, "prometheus/0")

        alert_rules_rel_id = self.harness.add_relation("prometheus-rules",
                                                       "rules-app")
        self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/0")
        self.harness.update_relation_data(
            alert_rules_rel_id,
            "rules-app/0",
            {"groups": ALERT_RULE_1},
        )

        self.harness.add_relation_unit(alert_rules_rel_id, "rules-app/1")
        self.harness.update_relation_data(
            alert_rules_rel_id,
            "rules-app/1",
            {"groups": ALERT_RULE_2},
        )

        prometheus_rel_data = self.harness.get_relation_data(
            prometheus_rel_id, self.harness.model.app.name)

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 1)

        self.harness.remove_relation_unit(alert_rules_rel_id, "rules-app/1")

        alert_rules = json.loads(prometheus_rel_data.get("alert_rules", "{}"))
        groups = alert_rules.get("groups", [])
        self.assertEqual(len(groups), 1)

        expected_groups = [
            {
                "name":
                "juju_testmodel_1234567_rules-app_alert_rules",
                "rules": [{
                    "alert": "CPU_Usage",
                    "expr":
                    'cpu_usage_idle{is_container!="True", group="promoagents-juju"} < 10',
                    "for": "5m",
                    "labels": {
                        "override_group_by": "host",
                        "severity": "page",
                        "cloud": "juju",
                        "juju_model": "testmodel",
                        "juju_model_uuid": "1234567",
                        "juju_application": "rules-app",
                        "juju_unit": "rules-app/0",
                    },
                    "annotations": {
                        "description":
                        "Host {{ $labels.host }} has had <  10% idle cpu for the last 5m\n",
                        "summary":
                        "Host {{ $labels.host }} CPU free is less than 10%",
                    },
                }],
            },
        ]
        self.assertListEqual(groups, expected_groups)
Пример #21
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)
        self.assertEqual(self.harness.charm.stored.alertmanagers, {})
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alertmanager_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)
        self.assertEqual(int(data['port']), MINIMAL_CONFIG['advertised-port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_web_admin_api_can_be_enabled(self):
        self.harness.set_leader(True)

        # without web admin enabled
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'), None)

        # with web admin enabled
        admin_api_config = MINIMAL_CONFIG.copy()
        admin_api_config['web-enable-admin-api'] = True
        self.harness.update_config(admin_api_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'),
                         '--web.enable-admin-api')

    def test_web_page_title_can_be_set(self):
        self.harness.set_leader(True)
        web_config = MINIMAL_CONFIG.copy()
        web_config['web-page-title'] = 'Prometheus Test Page'
        self.harness.update_config(web_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            cli_arg(pod_spec, '--web.page-title')[1:-1],  # striping quotes
            web_config['web-page-title'])

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_max_web_connections_can_be_set(self):
        self.harness.set_leader(True)
        maxcon_config = MINIMAL_CONFIG.copy()
        maxcon_config['web-max-connections'] = 512
        self.harness.update_config(maxcon_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(int(cli_arg(pod_spec, '--web.max-connections')),
                         maxcon_config['web-max-connections'])

    def test_alertmanager_queue_capacity_can_be_set(self):
        self.harness.set_leader(True)
        queue_config = MINIMAL_CONFIG.copy()
        queue_config['alertmanager-notification-queue-capacity'] = 512
        self.harness.update_config(queue_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            int(cli_arg(pod_spec,
                        '--alertmanager.notification-queue-capacity')),
            queue_config['alertmanager-notification-queue-capacity'])

    def test_alertmanager_timeout_can_be_set(self):
        self.harness.set_leader(True)
        timeout_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            timeout_config['alertmanager-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(timeout_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(cli_arg(pod_spec, '--alertmanager.timeout'),
                             timeout_config['alertmanager-timeout'])

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')

    def test_k8s_scrape_config_can_be_set(self):
        self.harness.set_leader(True)
        k8s_config = MINIMAL_CONFIG.copy()
        k8s_config['monitor-k8s'] = True
        self.harness.update_config(k8s_config)
        pod_spec = self.harness.get_pod_spec()
        k8s_api_scrape_config = scrape_config(pod_spec,
                                              'kubernetes-apiservers')
        self.assertIsNotNone(k8s_api_scrape_config,
                             'No k8s API server scrape config found')
        k8s_node_scrape_config = scrape_config(pod_spec, 'kubernetes-nodes')
        self.assertIsNotNone(k8s_node_scrape_config,
                             'No k8s nodes scrape config found')
        k8s_ca_scrape_config = scrape_config(pod_spec, 'kubernetes-cadvisor')
        self.assertIsNotNone(k8s_ca_scrape_config,
                             'No k8s cAdvisor scrape config found')
        k8s_ep_scrape_config = scrape_config(pod_spec,
                                             'kubernetes-service-endpoints')
        self.assertIsNotNone(k8s_ep_scrape_config,
                             'No k8s service endpoints scrape config found')
        k8s_svc_scrape_config = scrape_config(pod_spec, 'kubernetes-services')
        self.assertIsNotNone(k8s_svc_scrape_config,
                             'No k8s services scrape config found')
        k8s_in_scrape_config = scrape_config(pod_spec, 'kubernetes-ingresses')
        self.assertIsNotNone(k8s_in_scrape_config,
                             'No k8s ingress scrape config found')
        k8s_pod_scrape_config = scrape_config(pod_spec, 'kubernetes-pods')
        self.assertIsNotNone(k8s_pod_scrape_config,
                             'No k8s pods scrape config found')
Пример #22
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)

        self.assertEqual(self.harness.charm._stored.alertmanagers, [])
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')

        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alerting_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)

        self.assertEqual(int(data['port']), MINIMAL_CONFIG['port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')
Пример #23
0
class TestInterfaceMssqlDBProvider(unittest.TestCase):

    TEST_VIP_ADDRESS = '10.0.0.100'

    def setUp(self):
        self.harness = Harness(CharmBase,
                               meta='''
            name: mssql
            provides:
              db:
                interface: mssql
            peers:
              cluster:
                interface: mssql-cluster
            requires:
              ha:
                interface: hacluster
                scope: container
        ''')
        self.harness.update_config({'vip': self.TEST_VIP_ADDRESS})
        self.addCleanup(self.harness.cleanup)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'set_unit_rel_nonce')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch('charmhelpers.core.host.pwgen')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_primary_replica',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_hacluster.HaCluster,
                       'is_ha_cluster_ready',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_on_changed(self, _is_ag_ready, _is_ha_cluster_ready,
                        _is_primary_replica, _pwgen, _mssql_db_client,
                        _set_unit_rel_nonce):
        _is_ag_ready.return_value = True
        _is_ha_cluster_ready.return_value = True
        _is_primary_replica.return_value = True
        _pwgen.return_value = 'test-password'
        self.harness.set_leader()
        self.harness.begin()
        self.harness.charm.cluster = interface_mssql_cluster.MssqlCluster(
            self.harness.charm, 'cluster')
        self.harness.charm.ha = interface_hacluster.HaCluster(
            self.harness.charm, 'ha')
        self.harness.charm.db_provider = \
            interface_mssql_provider.MssqlDBProvider(self.harness.charm, 'db')
        rel_id = self.harness.add_relation('db', 'mssqlconsumer')
        self.harness.add_relation_unit(rel_id, 'mssqlconsumer/0')
        self.harness.update_relation_data(rel_id, 'mssqlconsumer/0', {
            'database': 'testdb',
            'username': '******'
        })

        _pwgen.assert_called_once_with(32)
        _mssql_db_client.assert_called_once_with()
        db_client_mock = _mssql_db_client.return_value
        db_client_mock.create_database.assert_called_once_with(
            db_name='testdb', ag_name=self.harness.charm.cluster.AG_NAME)
        db_client_mock.create_login.assert_called_once_with(
            name='testuser', password='******')
        db_client_mock.grant_access.assert_called_once_with(
            db_name='testdb', db_user_name='testuser')
        _set_unit_rel_nonce.assert_called_once_with()

        rel_unit_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(rel_unit_data.get('db_host'),
                         self.harness.charm.ha.bind_address)
        self.assertEqual(rel_unit_data.get('password'), 'test-password')
        rel_app_data = self.harness.get_relation_data(rel_id, 'mssql')
        self.assertEqual(rel_app_data.get('db_host'),
                         self.harness.charm.ha.bind_address)
        self.assertEqual(rel_app_data.get('password'), 'test-password')
Пример #24
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(AmfCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "amf",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "amf",
                    "containerPort": 29518,
                    "protocol": "TCP",
                }],
                "envConfig": {
                    "ALLOW_ANONYMOUS_LOGIN": "******",
                    "GIN_MODE": "release",
                    "NRF_HOST": "nrf",
                },
                "command": ["./amf_start.sh", "&"],
            }],
            "kubernetesResources": {
                "services": [{
                    "name": "amf-lb",
                    "labels": {
                        "juju-app": "amf"
                    },
                    "spec": {
                        "selector": {
                            "juju-app": "amf"
                        },
                        "ports": [{
                            "protocol": "SCTP",
                            "port": 38412,
                            "targetPort": 38412,
                        }],
                        "type":
                        "LoadBalancer",
                    },
                }],
            },
        }
        # Check if nrf is initialized
        self.assertIsNone(self.harness.charm.state.nrf_host)

        # Initializing the nrf relation
        nrf_relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(nrf_relation_id, "nrf/0")
        self.harness.update_relation_data(nrf_relation_id, "nrf",
                                          {"hostname": "nrf"})

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_nrf_app_relation_changed(self) -> NoReturn:
        """Test to see if nrf relation is updated."""

        self.assertIsNone(self.harness.charm.state.nrf_host)

        relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        self.harness.update_relation_data(relation_id, "nrf",
                                          {"hostname": "nrf"})

        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_amf_info(self) -> NoReturn:
        """Test to see if amf relation is updated."""
        expected_result = {
            "hostname": "amf",
        }

        relation_id = self.harness.add_relation("amf", "pcf")
        self.harness.add_relation_unit(relation_id, "pcf/0")
        self.harness.charm.publish_amf_info()
        relation_data = self.harness.get_relation_data(relation_id, "amf")
        self.assertDictEqual(expected_result, relation_data)
Пример #25
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations."""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(PcscfCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "pcscf",
                "image":
                "localhost:32000/ims_pcscf:1.0",
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "pcscf",
                    "containerPort": 4070,
                    "protocol": "TCP"
                }],
                "envConfig": {
                    "MODEL": None,
                    "MYSQL_HOST": "mysql-endpoints",
                    "MYSQL_USER": "******",
                    "MYSQL_ROOT_PASSWORD": "******",
                },
                "command": ["./init_pcscf.sh", "&"],
                "kubernetes": {
                    "startupProbe": {
                        "tcpSocket": {
                            "port": 4070
                        }
                    }
                },
            }],
            "serviceAccount": {
                "automountServiceAccountToken":
                True,
                "roles": [{
                    "rules": [{
                        "apiGroups": [""],
                        "resources": ["services"],
                        "verbs": ["get", "watch", "list"],
                    }]
                }],
            },
        }
        # Check if mysql is initialized
        self.assertIsNone(self.harness.charm.state.mysql)

        # Initializing mysql relation
        mysql_relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(mysql_relation_id, "mysql/0")
        self.harness.update_relation_data(
            mysql_relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mysql, "mysql")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mysql_app_relation_changed(self) -> NoReturn:
        """Test to see if mysql app relation is updated."""

        self.assertIsNone(self.harness.charm.state.mysql)

        relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(relation_id, "mysql/0")
        self.harness.update_relation_data(
            relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_pcscf_info(self) -> NoReturn:
        """Test to see if pcscf relation is updated."""
        expected_result = {"private-address": "127.1.1.1", "hostname": "pcscf"}
        relation_id = self.harness.add_relation("dns-source", "dns_source")
        relation_data = {"private-address": "127.1.1.1", "hostname": "pcscf"}
        self.harness.update_relation_data(relation_id, "dns_source",
                                          relation_data)
        relation_data = self.harness.get_relation_data(relation_id,
                                                       "dns_source")
        self.assertDictEqual(expected_result, relation_data)
class TestMongoProvider(unittest.TestCase):
    def setup_harness(self, config, meta):
        config_yaml = CONFIG_YAML.format(**config)
        meta_yaml = PROVIDER_META.format(**meta)
        self.harness = Harness(MongoDBCharm,
                               meta=meta_yaml,
                               config=config_yaml)
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.peer_rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.begin()

    def test_databases_are_created_when_requested(self):
        config = CONFIG.copy()
        meta = METADATA.copy()
        self.setup_harness(config, meta)

        requested_database = ['mydb']
        json_request = json.dumps(requested_database)
        consumer_data = {'databases': json_request}

        rel_id = self.harness.add_relation('database', 'consumer')
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertDictEqual(data, {})
        self.harness.add_relation_unit(rel_id, 'consumer/0')
        self.harness.update_relation_data(rel_id, 'consumer', consumer_data)
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        databases = json.loads(data['databases'])
        self.assertListEqual(databases, requested_database)

    def test_databases_are_not_created_without_a_new_request(self):
        config = CONFIG.copy()
        config['available_dbs'] = json.dumps(['mydb1'])
        meta = METADATA.copy()
        self.setup_harness(config, meta)

        requested_database = ['mydb1']
        json_request = json.dumps(requested_database)
        consumer_data = {'databases': json_request}

        rel_id = self.harness.add_relation('database', 'consumer')
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertDictEqual(data, {})
        self.harness.add_relation_unit(rel_id, 'consumer/0')
        self.harness.update_relation_data(rel_id, 'consumer', consumer_data)
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertDictEqual(data.get('databases', {}), {})

    def test_databases_are_created_only_by_leader_unit(self):
        config = CONFIG.copy()
        meta = METADATA.copy()
        self.setup_harness(config, meta)
        self.harness.set_leader(False)

        requested_database = ['mydb']
        json_request = json.dumps(requested_database)
        consumer_data = {'databases': json_request}

        rel_id = self.harness.add_relation(meta['relation_name'], 'consumer')
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertDictEqual(data, {})
        self.harness.add_relation_unit(rel_id, 'consumer/0')
        self.harness.update_relation_data(rel_id, 'consumer', consumer_data)
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        dbs = data.get('databases', '[]')
        databases = json.loads(dbs)
        self.assertListEqual(databases, [])
Пример #27
0
class TestInterfaceMssqlCluster(unittest.TestCase):

    TEST_BIND_ADDRESS = '10.0.0.10'
    TEST_NODE_NAME = 'test-mssql-node'
    TEST_MASTER_CERT = {
        'master_key_password': '******',
        'master_cert': b64encode('test_master_cert'.encode()).decode(),
        'master_cert_key': b64encode('test_master_cert_key'.encode()).decode(),
        'master_cert_key_password': '******',
    }
    TEST_PRIMARY_REPLICA_NAME = 'test-primary-name'
    TEST_PRIMARY_LOGINS = {
        'test-login-1': {
            'sid': 'sid1',
            'password_hash': 'test-password-hash1',
            'roles': ['test-role1']
        },
        'test-login-2': {
            'sid': 'sid2',
            'password_hash': 'test-password-hash2',
            'roles': ['test-role2']
        },
        'test-login-3': {
            'sid': 'sid3',
            'password_hash': 'test-password-hash3',
            'roles': ['test-role3']
        },
        'test-login-4': {
            'sid': 'sid4',
            'password_hash': 'test-password-hash4',
            'roles': ['test-role4']
        }
    }
    TEST_SECONDARY_LOGINS = {
        'test-login-1': {
            'sid': 'sid1',
            'password_hash': 'test-password-hash1',
            'roles': ['test-role1']
        },
        'test-login-2': {
            'sid': 'sid2',
            'password_hash': 'test-password-hash2',
            'roles': ['test-role2']
        }
    }

    def setUp(self):
        self.harness = Harness(CharmBase,
                               meta='''
            name: mssql
            peers:
              cluster:
                interface: mssql-cluster
        ''')
        self.addCleanup(self.harness.cleanup)

        mocked_node_name = mock.patch.object(
            interface_mssql_cluster.MssqlCluster,
            'node_name',
            new_callable=mock.PropertyMock).start()
        mocked_node_name.return_value = self.TEST_NODE_NAME
        mocked_bind_address = mock.patch.object(
            interface_mssql_cluster.MssqlCluster,
            'bind_address',
            new_callable=mock.PropertyMock).start()
        mocked_bind_address.return_value = self.TEST_BIND_ADDRESS
        self.addCleanup(mock.patch.stopall)

    def test_on_joined(self):
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes = {
            self.TEST_NODE_NAME: {
                'address': self.TEST_BIND_ADDRESS,
                'ready_to_cluster': 'true',
            }
        }
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')

        rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(rel_data.get('node_name'), self.TEST_NODE_NAME)
        self.assertEqual(rel_data.get('node_address'), self.TEST_BIND_ADDRESS)
        self.assertEqual(rel_data.get('ready_to_cluster'), 'true')
        self.assertIsNone(rel_data.get('clustered'))

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_cluster_node')
    @mock.patch.object(interface_mssql_cluster, 'append_hosts_entry')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'set_sa_password')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'master_cert',
                       new_callable=mock.PropertyMock)
    def test_on_changed(self, _master_cert, _set_sa_password,
                        _append_hosts_entry, _configure_cluster_node):
        _master_cert.return_value = self.TEST_MASTER_CERT
        self.harness.set_leader()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        self.harness.update_relation_data(
            rel_id, 'mssql/1', {
                'node_name': self.TEST_NODE_NAME,
                'node_address': self.TEST_BIND_ADDRESS,
                'ready_to_cluster': 'true',
            })

        node_state = cluster.state.initialized_nodes.get(self.TEST_NODE_NAME)
        self.assertIsNotNone(node_state)
        self.assertEqual(node_state.get('address'), self.TEST_BIND_ADDRESS)
        self.assertTrue(node_state.get('ready_to_cluster'))
        self.assertIsNone(node_state.get('clustered'))
        _set_sa_password.assert_called_once_with()
        _append_hosts_entry.assert_called_once_with(self.TEST_BIND_ADDRESS,
                                                    [self.TEST_NODE_NAME])
        _configure_cluster_node.assert_called_once_with()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_cluster_node')
    @mock.patch.object(interface_mssql_cluster, 'append_hosts_entry')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'set_master_cert')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'master_cert',
                       new_callable=mock.PropertyMock)
    def test_on_initialized_unit(self, _master_cert, _set_master_cert,
                                 _append_hosts_entry, _configure_cluster_node):
        _master_cert.return_value = None
        self.harness.set_leader()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.on.initialized_unit.emit()

        node_state = cluster.state.initialized_nodes.get(self.TEST_NODE_NAME)
        self.assertIsNotNone(node_state)
        self.assertEqual(node_state.get('address'), self.TEST_BIND_ADDRESS)
        self.assertIsNone(node_state.get('ready_to_cluster'))
        self.assertIsNone(node_state.get('clustered'))

        rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(rel_data.get('node_name'), self.TEST_NODE_NAME)
        self.assertEqual(rel_data.get('node_address'), self.TEST_BIND_ADDRESS)
        self.assertIsNone(rel_data.get('ready_to_cluster'))
        self.assertIsNone(rel_data.get('clustered'))

        _append_hosts_entry.assert_called_once_with(self.TEST_BIND_ADDRESS,
                                                    [self.TEST_NODE_NAME])
        _set_master_cert.assert_called_once_with()
        _configure_cluster_node.assert_not_called()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'master_cert',
                       new_callable=mock.PropertyMock)
    def test_configure_master_cert(self, _master_cert, _mssql_db_client):
        _master_cert.return_value = self.TEST_MASTER_CERT
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes = {
            self.TEST_NODE_NAME: {
                'address': self.TEST_BIND_ADDRESS,
                'ready_to_cluster': 'true',
            }
        }
        cluster.configure_master_cert()

        self.assertTrue(cluster.state.master_cert_configured)
        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.create_master_encryption_key.assert_called_once_with(
            'test_key_password')
        mock_ret_value.setup_master_cert.assert_called_once_with(
            'test_master_cert'.encode(), 'test_master_cert_key'.encode(),
            'test_cert_key_password')

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_secondary_replica')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_primary_replica')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_primary_replica',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_master_cert')
    def test_configure_cluster_node_primary_replica(
            self, _configure_master_cert, _mssql_db_client,
            _is_primary_replica, _configure_primary_replica,
            _configure_secondary_replica):

        _is_primary_replica.return_value = True
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes[self.TEST_NODE_NAME] = {
            'address': self.TEST_BIND_ADDRESS
        }
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.configure_cluster_node()

        _configure_master_cert.assert_called_once_with()
        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.setup_db_mirroring_endpoint.assert_called_once_with()
        self.assertTrue(cluster.state.initialized_nodes[self.TEST_NODE_NAME]
                        ['ready_to_cluster'])
        rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(rel_data.get('ready_to_cluster'), 'true')
        _configure_primary_replica.assert_called_once_with()
        _configure_secondary_replica.assert_not_called()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_secondary_replica')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_primary_replica')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_primary_replica',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'configure_master_cert')
    def test_configure_cluster_node_secondary_replica(
            self, _configure_master_cert, _mssql_db_client,
            _is_primary_replica, _configure_primary_replica,
            _configure_secondary_replica):

        _is_primary_replica.return_value = False
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes[self.TEST_NODE_NAME] = {
            'address': self.TEST_BIND_ADDRESS
        }
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.configure_cluster_node()

        _configure_master_cert.assert_called_once_with()
        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.setup_db_mirroring_endpoint.assert_called_once_with()
        self.assertTrue(cluster.state.initialized_nodes[self.TEST_NODE_NAME]
                        ['ready_to_cluster'])
        rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(rel_data.get('ready_to_cluster'), 'true')
        _configure_primary_replica.assert_not_called()
        _configure_secondary_replica.assert_called_once_with()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'create_ag')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_configure_primary_replica_ag_not_ready(self, _is_ag_ready,
                                                    _create_ag):
        _is_ag_ready.return_value = False
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.configure_primary_replica()
        _create_ag.assert_called_once_with()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'create_ag')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_replicas',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ready_nodes',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_configure_primary_replica_ag_ready(self, _is_ag_ready,
                                                _ready_nodes, _ag_replicas,
                                                _create_ag, _mssql_db_client):

        _is_ag_ready.return_value = True
        _ready_nodes.return_value = {
            'test-node-1': {
                'address': '10.0.0.11'
            },
            'test-node-2': {
                'address': '10.0.0.12'
            },
            'test-node-3': {
                'address': '10.0.0.13'
            }
        }
        _ag_replicas.return_value = ['test-node-1', 'test-node-2']
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.configure_primary_replica()

        _create_ag.assert_not_called()
        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.add_replicas.assert_called_once_with(
            cluster.AG_NAME, {'test-node-3': {
                'address': '10.0.0.13'
            }})
        rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertIsNotNone(rel_data.get('nonce'))

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'sync_logins_from_primary_replica')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'join_existing_ag')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_primary_replica',
                       new_callable=mock.PropertyMock)
    def test_configure_secondary_replica(self, _ag_primary_replica,
                                         _is_ag_ready, _mssql_db_client,
                                         _join_existing_ag,
                                         _sync_logins_from_primary_replica):

        _is_ag_ready.return_value = True
        _ag_primary_replica.return_value = self.TEST_PRIMARY_REPLICA_NAME
        _mssql_db_client.return_value.get_ag_replicas.return_value = \
            [self.TEST_PRIMARY_REPLICA_NAME, self.TEST_NODE_NAME]
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.configure_secondary_replica()

        _mssql_db_client.assert_called_once_with(
            self.TEST_PRIMARY_REPLICA_NAME)
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.get_ag_replicas.assert_called_once_with(cluster.AG_NAME)
        _join_existing_ag.assert_called_once_with()
        _sync_logins_from_primary_replica.assert_called_once_with()

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_primary_replica',
                       new_callable=mock.PropertyMock)
    def test_sync_logins_from_primary_replica(self, _ag_primary_replica,
                                              _mssql_db_client):
        _ag_primary_replica.return_value = self.TEST_PRIMARY_REPLICA_NAME
        mocked_primary_db_client = mock.MagicMock()
        mocked_primary_db_client.get_sql_logins.return_value = \
            self.TEST_PRIMARY_LOGINS
        mocked_this_db_client = mock.MagicMock()
        mocked_this_db_client.get_sql_logins.return_value = \
            self.TEST_SECONDARY_LOGINS
        _mssql_db_client.side_effect = [
            mocked_primary_db_client, mocked_this_db_client
        ]
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.sync_logins_from_primary_replica()

        _mssql_db_client.assert_has_calls(
            [mock.call(self.TEST_PRIMARY_REPLICA_NAME),
             mock.call()])
        mocked_this_db_client.assert_has_calls([mock.call.get_sql_logins()])
        mocked_this_db_client.assert_has_calls([
            mock.call.get_sql_logins(),
            mock.call.create_login(name='test-login-3',
                                   sid='sid3',
                                   password='******',
                                   is_hashed_password=True,
                                   server_roles=['test-role3']),
            mock.call.create_login(name='test-login-4',
                                   sid='sid4',
                                   password='******',
                                   is_hashed_password=True,
                                   server_roles=['test-role4'])
        ])

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ready_nodes',
                       new_callable=mock.PropertyMock)
    def test_create_ag(self, _ready_nodes, _mssql_db_client):
        _ready_nodes.return_value = ['node1', 'node2', 'node3']
        self.harness.set_leader()
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.create_ag()

        self.assertTrue(cluster.state.ag_configured)
        self.assertEqual(self.harness.charm.unit.status,
                         cluster.UNIT_ACTIVE_STATUS)
        _mssql_db_client.assert_called_once_with()
        _mssql_db_client.return_value.create_ag.assert_called_once_with(
            cluster.AG_NAME, ['node1', 'node2', 'node3'])
        unit_rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(unit_rel_data.get('clustered'), 'true')
        self.assertIsNotNone(unit_rel_data.get('nonce'))
        app_rel_data = self.harness.get_relation_data(rel_id, 'mssql')
        self.assertEqual(app_rel_data.get('ag_ready'), 'true')

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    def test_join_existing_ag(self, _mssql_db_client):
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.join_existing_ag()

        self.assertTrue(cluster.state.ag_configured)
        self.assertEqual(self.harness.charm.unit.status,
                         cluster.UNIT_ACTIVE_STATUS)
        _mssql_db_client.assert_called_once_with()
        _mssql_db_client.return_value.join_ag.assert_called_once_with(
            cluster.AG_NAME)
        unit_rel_data = self.harness.get_relation_data(rel_id, 'mssql/0')
        self.assertEqual(unit_rel_data.get('clustered'), 'true')

    @mock.patch.object(interface_mssql_cluster, 'append_hosts_entry')
    def test_add_to_initialized_nodes(self, _append_hosts_entry):
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.add_to_initialized_nodes(self.TEST_NODE_NAME,
                                         self.TEST_BIND_ADDRESS,
                                         ready_to_cluster=True,
                                         clustered=True)

        node_state = cluster.state.initialized_nodes.get(self.TEST_NODE_NAME)
        self.assertIsNotNone(node_state)
        self.assertEqual(node_state.get('address'), self.TEST_BIND_ADDRESS)
        self.assertTrue(node_state.get('ready_to_cluster'))
        self.assertTrue(node_state.get('clustered'))

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch('charmhelpers.core.host.pwgen')
    def test_set_master_cert(self, _pwgen, _mssql_db_client):
        _pwgen.side_effect = [
            'test-master-key-password', 'test-master-cert-key-password'
        ]
        _mssql_db_client.return_value.create_master_cert.return_value = \
            ('test-cert'.encode(), 'test-cert-key'.encode())
        self.harness.set_leader()
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.set_master_cert()

        self.assertTrue(cluster.state.master_cert_configured)
        _pwgen.assert_has_calls([mock.call(32), mock.call(32)])
        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.create_master_encryption_key.assert_called_once_with(
            'test-master-key-password')
        mock_ret_value.create_master_cert.assert_called_once_with(
            'test-master-cert-key-password')
        app_rel_data = self.harness.get_relation_data(rel_id, 'mssql')
        self.assertEqual(app_rel_data.get('master_key_password'),
                         'test-master-key-password')
        self.assertEqual(app_rel_data.get('master_cert'),
                         b64encode('test-cert'.encode()).decode())
        self.assertEqual(app_rel_data.get('master_cert_key'),
                         b64encode('test-cert-key'.encode()).decode())
        self.assertEqual(app_rel_data.get('master_cert_key_password'),
                         'test-master-cert-key-password')

    @mock.patch.object(interface_mssql_cluster.secrets, 'choice')
    def test_set_sa_password(self, _choice):
        _choice.return_value = 'p'
        self.harness.set_leader()
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        cluster.set_sa_password()

        _choice_calls = []
        _choice_calls += [mock.call(string.ascii_lowercase)] * 8
        _choice_calls += [mock.call(string.ascii_uppercase)] * 8
        _choice_calls += [mock.call(string.digits)] * 8
        _choice_calls += [mock.call(string.punctuation)] * 8
        _choice.assert_has_calls(_choice_calls)
        app_rel_data = self.harness.get_relation_data(rel_id, 'mssql')
        self.assertEqual(app_rel_data.get('sa_password'), 'p' * 32)

    def test_clustered_nodes(self):
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes['node-1'] = {
            'address': '10.0.0.11',
            'clustered': True
        }
        cluster.state.initialized_nodes['node-2'] = {
            'address': '10.0.0.12',
            'clustered': True
        }
        cluster.state.initialized_nodes['node-3'] = {'address': '10.0.0.13'}
        clustered_nodes = cluster.clustered_nodes

        self.assertEqual(
            clustered_nodes, {
                'node-1': {
                    'address': '10.0.0.11',
                    'clustered': True
                },
                'node-2': {
                    'address': '10.0.0.12',
                    'clustered': True
                }
            })

    def test_ready_nodes(self):
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.initialized_nodes['node-1'] = {
            'address': '10.0.0.11',
            'ready_to_cluster': True
        }
        cluster.state.initialized_nodes['node-2'] = {
            'address': '10.0.0.12',
            'ready_to_cluster': True,
            'clustered': True
        }
        cluster.state.initialized_nodes['node-3'] = {'address': '10.0.0.13'}
        ready_nodes = cluster.ready_nodes

        self.assertEqual(
            ready_nodes, {
                'node-1': {
                    'address': '10.0.0.11',
                    'ready_to_cluster': True
                },
                'node-2': {
                    'address': '10.0.0.12',
                    'ready_to_cluster': True,
                    'clustered': True
                }
            })

    def test_master_cert(self):
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        rel_id = self.harness.add_relation('cluster', 'mssql')
        self.harness.add_relation_unit(rel_id, 'mssql/1')
        self.harness.update_relation_data(
            rel_id, 'mssql', {
                'master_key_password': '******',
                'master_cert': 'test-cert',
                'master_cert_key': 'test-cert-key',
                'master_cert_key_password': '******',
            })
        master_cert = cluster.master_cert

        self.assertEqual(
            master_cert, {
                'master_key_password': '******',
                'master_cert': 'test-cert',
                'master_cert_key': 'test-cert-key',
                'master_cert_key_password': '******',
            })

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_primary_replica_ag_configured(self, _is_ag_ready,
                                              _mssql_db_client):

        _is_ag_ready.return_value = True
        _mssql_db_client.return_value.get_ag_primary_replica.return_value = \
            self.TEST_PRIMARY_REPLICA_NAME
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.ag_configured = True
        primary_replica = cluster.ag_primary_replica

        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.get_ag_primary_replica.assert_called_once_with(
            cluster.AG_NAME)
        self.assertEqual(primary_replica, self.TEST_PRIMARY_REPLICA_NAME)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_primary_replica_no_clustered_nodes(self, _is_ag_ready,
                                                   _mssql_db_client):
        _is_ag_ready.return_value = True
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.ag_configured = False
        primary_replica = cluster.ag_primary_replica

        _mssql_db_client.assert_not_called()
        self.assertIsNone(primary_replica)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'clustered_nodes',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_primary_replica_other_clustered_node(self, _is_ag_ready,
                                                     _clustered_nodes,
                                                     _mssql_db_client):

        _is_ag_ready.return_value = True
        _mssql_db_client.return_value.get_ag_primary_replica.return_value = \
            self.TEST_PRIMARY_REPLICA_NAME
        _clustered_nodes.return_value = {
            'node-1': {
                'address': '10.0.0.11',
                'clustered': True
            },
            'node-2': {
                'address': '10.0.0.12',
                'clustered': True
            }
        }
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.ag_configured = False
        primary_replica = cluster.ag_primary_replica

        _mssql_db_client.assert_called_once_with('node-2')
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.get_ag_primary_replica.assert_called_once_with(
            cluster.AG_NAME)
        self.assertEqual(primary_replica, self.TEST_PRIMARY_REPLICA_NAME)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_primary_replica',
                       new_callable=mock.PropertyMock)
    def test_is_primary_replica_current_node(self, _ag_primary_replica):
        _ag_primary_replica.return_value = self.TEST_NODE_NAME
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')

        self.assertTrue(cluster.is_primary_replica)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_primary_replica',
                       new_callable=mock.PropertyMock)
    def test_is_primary_replica_other_node(self, _ag_primary_replica):
        _ag_primary_replica.return_value = 'mssql-primary-replica'
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')

        self.assertFalse(cluster.is_primary_replica)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'ag_primary_replica',
                       new_callable=mock.PropertyMock)
    def test_is_primary_replica_leader_node(self, _ag_primary_replica):
        _ag_primary_replica.return_value = None
        self.harness.set_leader()
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')

        self.assertTrue(cluster.is_primary_replica)

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_replicas_ag_configured(self, _is_ag_ready, _mssql_db_client):
        _is_ag_ready.return_value = True
        _mssql_db_client.return_value.get_ag_replicas.return_value = \
            ['node-1', 'node-2', 'node-3']
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.ag_configured = True
        ag_replicas = cluster.ag_replicas

        _mssql_db_client.assert_called_once_with()
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.get_ag_replicas.assert_called_once_with(cluster.AG_NAME)
        self.assertListEqual(ag_replicas, ['node-1', 'node-2', 'node-3'])

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_replicas_no_clustered_nodes(self, _is_ag_ready,
                                            _mssql_db_client):
        _is_ag_ready.return_value = True
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        ag_replicas = cluster.ag_replicas

        _mssql_db_client.assert_not_called()
        self.assertListEqual(ag_replicas, [])

    @mock.patch.object(interface_mssql_cluster.MssqlCluster, 'mssql_db_client')
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'clustered_nodes',
                       new_callable=mock.PropertyMock)
    @mock.patch.object(interface_mssql_cluster.MssqlCluster,
                       'is_ag_ready',
                       new_callable=mock.PropertyMock)
    def test_ag_replicas_other_clustered_node(self, _is_ag_ready,
                                              _clustered_nodes,
                                              _mssql_db_client):

        _is_ag_ready.return_value = True
        _mssql_db_client.return_value.get_ag_replicas.return_value = \
            ['node-1', 'node-2']
        _clustered_nodes.return_value = {
            'node-1': {
                'address': '10.0.0.11',
                'clustered': True
            },
            'node-2': {
                'address': '10.0.0.12',
                'clustered': True
            }
        }
        self.harness.disable_hooks()
        self.harness.begin()
        cluster = interface_mssql_cluster.MssqlCluster(self.harness.charm,
                                                       'cluster')
        cluster.state.ag_configured = False
        ag_replicas = cluster.ag_replicas

        _mssql_db_client.assert_called_once_with('node-2')
        mock_ret_value = _mssql_db_client.return_value
        mock_ret_value.get_ag_replicas.assert_called_once_with(cluster.AG_NAME)
        self.assertListEqual(ag_replicas, ['node-1', 'node-2'])
Пример #28
0
class TestEndpointProvider(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(EndpointProviderCharm, meta=PROVIDER_META)
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def test_provider_default_scrape_relations_not_in_meta(self):
        """Tests that the Provider raises exception when no promethes_scrape in meta."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                provides:
                    non-standard-name:
                        interface: prometheus_scrape
                """,
        )
        self.assertRaises(RelationNotFoundError, harness.begin)

    def test_provider_default_scrape_relation_wrong_interface(self):
        """Tests that Provider raises exception if the default relation has the wrong interface."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                provides:
                    metrics-endpoint:
                        interface: not_prometheus_scrape
                """,
        )
        self.assertRaises(RelationInterfaceMismatchError, harness.begin)

    def test_provider_default_scrape_relation_wrong_role(self):
        """Tests that Provider raises exception if the default relation has the wrong role."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                requires:
                    metrics-endpoint:
                        interface: prometheus_scrape
                """,
        )
        self.assertRaises(RelationRoleMismatchError, harness.begin)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_sets_scrape_metadata(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_metadata", data)
        scrape_metadata = data["scrape_metadata"]
        self.assertIn("model", scrape_metadata)
        self.assertIn("model_uuid", scrape_metadata)
        self.assertIn("application", scrape_metadata)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_unit_sets_bind_address_on_pebble_ready(
            self, mock_net_get):
        bind_address = "192.0.8.2"
        fake_network = {
            "bind-addresses": [{
                "interface-name":
                "eth0",
                "addresses": [{
                    "hostname": "prometheus-tester-0",
                    "value": bind_address
                }],
            }]
        }
        mock_net_get.return_value = fake_network
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.container_pebble_ready("prometheus-tester")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.charm.unit.name)
        self.assertIn("prometheus_scrape_host", data)
        self.assertEqual(data["prometheus_scrape_host"], bind_address)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_unit_sets_bind_address_on_relation_joined(
            self, mock_net_get):
        bind_address = "192.0.8.2"
        fake_network = {
            "bind-addresses": [{
                "interface-name":
                "eth0",
                "addresses": [{
                    "hostname": "prometheus-tester-0",
                    "value": bind_address
                }],
            }]
        }
        mock_net_get.return_value = fake_network
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.charm.unit.name)
        self.assertIn("prometheus_scrape_host", data)
        self.assertEqual(data["prometheus_scrape_host"], bind_address)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_supports_multiple_jobs(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_jobs", data)
        jobs = json.loads(data["scrape_jobs"])
        self.assertEqual(len(jobs), len(JOBS))
        names = [job["job_name"] for job in jobs]
        job_names = [job["job_name"] for job in JOBS]
        self.assertListEqual(names, job_names)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_sanitizes_jobs(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_jobs", data)
        jobs = json.loads(data["scrape_jobs"])
        for job in jobs:
            keys = set(job.keys())
            self.assertTrue(keys.issubset(ALLOWED_KEYS))

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_each_alert_rule_is_topology_labeled(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("alert_rules", data)
        alerts = json.loads(data["alert_rules"])
        self.assertIn("groups", alerts)
        self.assertEqual(len(alerts["groups"]), 1)
        group = alerts["groups"][0]
        for rule in group["rules"]:
            self.assertIn("labels", rule)
            labels = rule["labels"]
            self.assertIn("juju_model", labels)
            self.assertIn("juju_application", labels)
            self.assertIn("juju_model_uuid", labels)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_each_alert_expression_is_topology_labeled(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("alert_rules", data)
        alerts = json.loads(data["alert_rules"])
        self.assertIn("groups", alerts)
        self.assertEqual(len(alerts["groups"]), 1)
        group = alerts["groups"][0]
        for rule in group["rules"]:
            self.assertIn("expr", rule)
            for labels in expression_labels(rule["expr"]):
                self.assertIn("juju_model", labels)
                self.assertIn("juju_model_uuid", labels)
                self.assertIn("juju_application", labels)
Пример #29
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(NatappCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_configure_change(self) -> NoReturn:
        """Test installation with any relation."""
        self.harness.charm.on.config_changed.emit()
        config_data = "192.168.1.216"
        second_interface = [
            {"name": "n6-network", "interface": "eth1", "ips": [config_data]}
        ]

        annot = {
            "annotations": {"k8s.v1.cni.cncf.io/networks": json.dumps(second_interface)}
        }
        custom_resource_def = [
            {
                "name": "network-attachment-definitions.k8s.cni.cncf.io",
                "spec": {
                    "group": "k8s.cni.cncf.io",
                    "scope": "Namespaced",
                    "names": {
                        "kind": "NetworkAttachmentDefinition",
                        "singular": "network-attachment-definition",
                        "plural": "network-attachment-definitions",
                    },
                    "versions": [{"name": "v1", "served": True, "storage": True}],
                },
            }
        ]
        pdn_subnet = "192.168.0.0/16"
        pdn_ip_range_start = "192.168.1.100"
        pdn_ip_range_end = "192.168.1.250"
        pdn_gateway_ip = "192.168.1.1"
        ipam_body = {
            "type": "host-local",
            "subnet": pdn_subnet,
            "rangeStart": pdn_ip_range_start,
            "rangeEnd": pdn_ip_range_end,
            "gateway": pdn_gateway_ip,
        }
        config_body = {
            "cniVersion": "0.3.1",
            "name": "n6-network",
            "type": "macvlan",
            "master": "ens3",
            "mode": "bridge",
            "ipam": ipam_body,
        }

        custom_resource = {
            "network-attachment-definitions.k8s.cni.cncf.io": [
                {
                    "apiVersion": "k8s.cni.cncf.io/v1",
                    "kind": "NetworkAttachmentDefinition",
                    "metadata": {"name": "n6-network"},
                    "spec": {"config": json.dumps(config_body)},
                }
            ]
        }

        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "natapp",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "natapp",
                            "containerPort": 2601,
                            "protocol": "UDP",
                        }
                    ],
                    "command": ["./start.sh", "&"],
                    "kubernetes": {"securityContext": {"privileged": True}},
                }
            ],
            "kubernetesResources": {
                "customResourceDefinitions": custom_resource_def,
                "customResources": custom_resource,
                "pod": annot,
            },
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_publish_natapp_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "hostname": "natapp",
            "static_ip": "192.168.70.15",
        }
        relation_id = self.harness.add_relation("natapp", "upf1")
        self.harness.add_relation_unit(relation_id, "upf1/0")
        relation_data = {"hostname": "natapp", "static_ip": "192.168.70.15"}
        self.harness.update_relation_data(relation_id, "natapp", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "natapp")
        self.assertDictEqual(expected_result, relation_data)
Пример #30
0
class TestDatase(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MySQLOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.peer_relation_id = self.harness.add_relation(
            "database-peers", "database-peers")
        self.harness.add_relation_unit(self.peer_relation_id, "mysql/1")
        self.database_relation_id = self.harness.add_relation(
            DB_RELATION_NAME, "app")
        self.harness.add_relation_unit(self.database_relation_id, "app/0")
        self.charm = self.harness.charm

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.get_mysql_version",
           return_value="8.0.29-0ubuntu0.20.04.3")
    @patch(
        "mysqlsh_helpers.MySQL.get_cluster_members_addresses",
        return_value={"2.2.2.1:3306", "2.2.2.3:3306", "2.2.2.2:3306"},
    )
    @patch("mysqlsh_helpers.MySQL.get_cluster_primary_address",
           return_value="2.2.2.2:3306")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    @patch("relations.database.generate_random_password",
           return_value="super_secure_password")
    def test_database_requested(
        self,
        _generate_random_password,
        _create_application_database_and_scoped_user,
        _get_cluster_primary_address,
        _get_cluster_members_addresses,
        _get_mysql_version,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        database_relation_databag = self.harness.get_relation_data(
            self.database_relation_id, self.harness.charm.app)
        database_relation = self.charm.model.get_relation(DB_RELATION_NAME)
        app_unit = list(database_relation.units)[0]

        # simulate cluster initialized by editing the flag
        self.harness.update_relation_data(self.peer_relation_id,
                                          self.charm.app.name,
                                          {"units-added-to-cluster": "1"})

        self.assertEqual(database_relation_databag, {})
        self.assertEqual(database_relation.data.get(app_unit), {})
        self.assertEqual(database_relation.data.get(self.charm.unit), {})

        # update the app leader unit data to trigger database_requested event
        self.harness.update_relation_data(self.database_relation_id, "app",
                                          {"database": "test_db"})

        self.assertEqual(
            database_relation_databag,
            {
                "data": '{"database": "test_db"}',
                "password": "******",
                "username": f"relation-{self.database_relation_id}",
                "endpoints": "2.2.2.2:3306",
                "version": "8.0.29-0ubuntu0.20.04.3",
                "read-only-endpoints": "2.2.2.1:3306,2.2.2.3:3306",
            },
        )

        _generate_random_password.assert_called_once()
        _create_application_database_and_scoped_user.assert_called_once()
        _get_cluster_primary_address.assert_called_once()
        _get_cluster_members_addresses.assert_called_once()
        _get_mysql_version.assert_called_once()

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.delete_user_for_relation")
    def test_database_broken(self, _delete_user_for_relation):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        self.harness.remove_relation(self.database_relation_id)

        _delete_user_for_relation.assert_called_once_with(
            self.database_relation_id)

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.delete_user_for_relation")
    def test_database_broken_failure(self, _delete_user_for_relation):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        _delete_user_for_relation.side_effect = MySQLDeleteUserForRelationError(
        )

        self.harness.remove_relation(self.database_relation_id)

        _delete_user_for_relation.assert_called_once()