Exemplo n.º 1
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()

    def test_replica_set_name_can_be_changed(self):
        self.harness.set_leader(True)

        # check default replica set name
        self.harness.charm.on.config_changed.emit()
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "new_name")

    @patch("mongo.Mongo.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.add_relation_unit(rel_id, 'mongodb/1')
        self.harness.update_relation_data(rel_id, 'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        self.assertEqual(self.harness.charm.num_peers, 2)
        peers = ['mongodb-0.mongodb-endpoints', 'mongodb-1.mongodb-endpoints']
        mock_reconf.assert_called_once_with(peers)
Exemplo n.º 2
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()

    def test_replica_set_name_can_be_changed(self):
        self.harness.set_leader(True)

        # check default replica set name
        self.harness.charm.on.config_changed.emit()
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(replica_set_name(pod_spec), "new_name")

    @patch("mongoserver.MongoDB.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.add_relation_unit(rel_id, 'mongodb/1')
        self.harness.update_relation_data(rel_id,
                                          'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        peers = ['mongodb-0.mongodb-endpoints',
                 'mongodb-1.mongodb-endpoints']
        mock_reconf.assert_called_once_with(peers)

    def test_uri_data_is_generated_correctly(self):
        self.harness.set_leader(True)
        standalone_uri = self.harness.charm.mongo.standalone_uri
        replica_set_uri = self.harness.charm.mongo.replica_set_uri
        self.assertEqual(standalone_uri, 'mongodb://mongodb:27017/')
        self.assertEqual(replica_set_uri, 'mongodb://mongodb-0.mongodb-endpoints:27017/')

    def test_database_relation_data_is_set_correctly(self):
        self.harness.set_leader(True)
        rel_id = self.harness.add_relation('database', 'client')
        self.harness.add_relation_unit(rel_id, 'client/1')
        rel = self.harness.framework.model.get_relation('database', rel_id)
        unit = self.harness.framework.model.get_unit('client/1')
        self.harness.charm.on['database'].relation_changed.emit(rel, unit)
        got = self.harness.get_relation_data(rel_id, self.harness.framework.model.unit.name)
        expected = {
            'replicated': 'False',
            'replica_set_name': 'rs0',
            'standalone_uri': 'mongodb://mongodb:27017/',
            'replica_set_uri': 'mongodb://mongodb-0.mongodb-endpoints:27017/'
        }
        self.assertDictEqual(got, expected)
Exemplo n.º 3
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(MongodbCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_configure_pod(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "mongodb",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "mongodb",
                            "containerPort": 27017,
                            "protocol": "TCP",
                        }
                    ],
                    "command": [
                        "mongod",
                        "--bind_ip",
                        "mongodb-endpoints",
                        "--port",
                        "27017",
                    ],
                }
            ],
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_publish_mongodb_info(self) -> NoReturn:
        """Test to see if mongodb relation is updated."""
        expected_result = {
            "hostname": "mongodb",
            "mongodb_uri": "mongodb://mongodb:27017",
        }
        relation_id = self.harness.add_relation("mongodb", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mongodb")
        print("relation_data", relation_data)
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 4
0
class TestCharm(unittest.TestCase):
    def setUp(self) -> None:
        self.harness = Harness(TrainingCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__grafana_port_config_changed(self):
        self.harness.set_leader(True)
        self.harness.update_config({"grafana_port": 4000})
        pod_spec = self.harness.get_pod_spec()[0]
        self.assertEqual(pod_spec["containers"][0]["ports"][0]["containerPort"], 4000)
        self.assertEqual(pod_spec["containers"][0]["readinessProbe"]["httpGet"]["port"], 4000)
Exemplo n.º 5
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(AlertmanagerCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.set_leader(True)
        self.harness.update_config({"pagerduty_key": "123"})

    def test_config_changed(self):
        self.harness.update_config({"pagerduty_key": "abc"})
        config = self.get_config()
        self.assertEqual(
            config["receivers"][0]["pagerduty_configs"][0]["service_key"],
            "abc")

    def test_port_change(self):
        rel_id = self.harness.add_relation("alerting", "prometheus")
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, "prometheus/0")
        self.harness.update_config({"port": "9096"})
        self.assertEqual(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["port"],
            "9096",
        )

    def test_bad_config(self):
        self.harness.update_config({"pagerduty_key": ""})
        self.assertEqual(type(self.harness.model.unit.status),
                         ops.model.BlockedStatus)

    # TODO figure out how to test scaling up the application

    def get_config(self):
        pod_spec = self.harness.get_pod_spec()
        config_yaml = pod_spec[0]["containers"][0]["volumeConfig"][0]["files"][
            0]["content"]
        return yaml.safe_load(config_yaml)
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(GrafanaK8s)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__image_details_in_pod_spec(self):
        """Test whether image details come from config properly."""

        # basic setup
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # emit the "config_changed" event and get pod spec
        self.harness.charm.on.config_changed.emit()
        pod_spec, _ = self.harness.get_pod_spec()

        # test image details
        expected_image_details = {
            'imagePath': 'grafana/grafana:latest',
        }

        self.assertEqual(expected_image_details,
                         pod_spec['containers'][0]['imageDetails'])
Exemplo n.º 7
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)
        self.assertEqual(self.harness.charm.stored.alertmanagers, {})
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alertmanager_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)
        self.assertEqual(int(data['port']), MINIMAL_CONFIG['advertised-port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_web_admin_api_can_be_enabled(self):
        self.harness.set_leader(True)

        # without web admin enabled
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'), None)

        # with web admin enabled
        admin_api_config = MINIMAL_CONFIG.copy()
        admin_api_config['web-enable-admin-api'] = True
        self.harness.update_config(admin_api_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'),
                         '--web.enable-admin-api')

    def test_web_page_title_can_be_set(self):
        self.harness.set_leader(True)
        web_config = MINIMAL_CONFIG.copy()
        web_config['web-page-title'] = 'Prometheus Test Page'
        self.harness.update_config(web_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            cli_arg(pod_spec, '--web.page-title')[1:-1],  # striping quotes
            web_config['web-page-title'])

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_max_web_connections_can_be_set(self):
        self.harness.set_leader(True)
        maxcon_config = MINIMAL_CONFIG.copy()
        maxcon_config['web-max-connections'] = 512
        self.harness.update_config(maxcon_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(int(cli_arg(pod_spec, '--web.max-connections')),
                         maxcon_config['web-max-connections'])

    def test_alertmanager_queue_capacity_can_be_set(self):
        self.harness.set_leader(True)
        queue_config = MINIMAL_CONFIG.copy()
        queue_config['alertmanager-notification-queue-capacity'] = 512
        self.harness.update_config(queue_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            int(cli_arg(pod_spec,
                        '--alertmanager.notification-queue-capacity')),
            queue_config['alertmanager-notification-queue-capacity'])

    def test_alertmanager_timeout_can_be_set(self):
        self.harness.set_leader(True)
        timeout_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            timeout_config['alertmanager-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(timeout_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(cli_arg(pod_spec, '--alertmanager.timeout'),
                             timeout_config['alertmanager-timeout'])

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')

    def test_k8s_scrape_config_can_be_set(self):
        self.harness.set_leader(True)
        k8s_config = MINIMAL_CONFIG.copy()
        k8s_config['monitor-k8s'] = True
        self.harness.update_config(k8s_config)
        pod_spec = self.harness.get_pod_spec()
        k8s_api_scrape_config = scrape_config(pod_spec,
                                              'kubernetes-apiservers')
        self.assertIsNotNone(k8s_api_scrape_config,
                             'No k8s API server scrape config found')
        k8s_node_scrape_config = scrape_config(pod_spec, 'kubernetes-nodes')
        self.assertIsNotNone(k8s_node_scrape_config,
                             'No k8s nodes scrape config found')
        k8s_ca_scrape_config = scrape_config(pod_spec, 'kubernetes-cadvisor')
        self.assertIsNotNone(k8s_ca_scrape_config,
                             'No k8s cAdvisor scrape config found')
        k8s_ep_scrape_config = scrape_config(pod_spec,
                                             'kubernetes-service-endpoints')
        self.assertIsNotNone(k8s_ep_scrape_config,
                             'No k8s service endpoints scrape config found')
        k8s_svc_scrape_config = scrape_config(pod_spec, 'kubernetes-services')
        self.assertIsNotNone(k8s_svc_scrape_config,
                             'No k8s services scrape config found')
        k8s_in_scrape_config = scrape_config(pod_spec, 'kubernetes-ingresses')
        self.assertIsNotNone(k8s_in_scrape_config,
                             'No k8s ingress scrape config found')
        k8s_pod_scrape_config = scrape_config(pod_spec, 'kubernetes-pods')
        self.assertIsNotNone(k8s_pod_scrape_config,
                             'No k8s pods scrape config found')
Exemplo n.º 8
0
class TestCharm(unittest.TestCase):
    """Test Charm"""

    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(UeCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for")
        )

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.start.emit()
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "ue",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "ueport",
                            "containerPort": 22,
                            "protocol": "TCP",
                        },
                    ],
                    "envConfig": {
                        "ALLOW_ANONYMOUS_LOGIN": "******",
                        "RELATION": "ran",
                    },
                    "command": [
                        "/bin/bash",
                        "-ec",
                        "while :; do service ssh restart; sleep 5 ; done",
                    ],
                    "kubernetes": {
                        "securityContext": {"capabilities": {"add": ["NET_ADMIN"]}}
                    },
                }
            ],
            "kubernetesResources": {
                "pod": {
                    "annotations": {
                        "k8s.v1.cni.cncf.io/networks": '[\n{\n"name" : "internet-network",'
                        '\n"interface": "eth1",\n"ips": []\n}\n]'
                    }
                },
            },
        }

        self.assertIsNone(self.harness.charm.state.ran_host)
        ran_relation_id = self.harness.add_relation("ran", "ran")
        self.harness.add_relation_unit(ran_relation_id, "ran/0")
        self.harness.update_relation_data(ran_relation_id, "ran", {"hostname": "ran"})

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.ran_host, "ran")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)
Exemplo n.º 9
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(UdmCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "udm",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "udm",
                    "containerPort": 29503,
                    "protocol": "TCP",
                }],
                "envConfig": {
                    "ALLOW_ANONYMOUS_LOGIN": "******",
                    "GIN_MODE": "release",
                    "NRF_HOST": "nrf",
                },
                "command": ["./udm_start.sh", "&"],
            }],
        }

        self.harness.charm.on.start.emit()
        # Check if nrf,db is initialized
        self.assertIsNone(self.harness.charm.state.nrf_host)

        # Initializing the nrf relation
        nrf_relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(nrf_relation_id, "nrf/0")
        self.harness.update_relation_data(nrf_relation_id, "nrf",
                                          {"hostname": "nrf"})

        # Checking if nrf,db data is stored
        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_nrf_app_relation_changed(self) -> NoReturn:
        """Test to see if kafka relation is updated."""

        self.assertIsNone(self.harness.charm.state.nrf_host)

        relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        self.harness.update_relation_data(relation_id, "nrf",
                                          {"hostname": "nrf"})

        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))
Exemplo n.º 10
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(AmfCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "amf",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "amf",
                    "containerPort": 29518,
                    "protocol": "TCP",
                }],
                "envConfig": {
                    "ALLOW_ANONYMOUS_LOGIN": "******",
                    "GIN_MODE": "release",
                    "NRF_HOST": "nrf",
                },
                "command": ["./amf_start.sh", "&"],
            }],
            "kubernetesResources": {
                "services": [{
                    "name": "amf-lb",
                    "labels": {
                        "juju-app": "amf"
                    },
                    "spec": {
                        "selector": {
                            "juju-app": "amf"
                        },
                        "ports": [{
                            "protocol": "SCTP",
                            "port": 38412,
                            "targetPort": 38412,
                        }],
                        "type":
                        "LoadBalancer",
                    },
                }],
            },
        }
        # Check if nrf is initialized
        self.assertIsNone(self.harness.charm.state.nrf_host)

        # Initializing the nrf relation
        nrf_relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(nrf_relation_id, "nrf/0")
        self.harness.update_relation_data(nrf_relation_id, "nrf",
                                          {"hostname": "nrf"})

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_nrf_app_relation_changed(self) -> NoReturn:
        """Test to see if nrf relation is updated."""

        self.assertIsNone(self.harness.charm.state.nrf_host)

        relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        self.harness.update_relation_data(relation_id, "nrf",
                                          {"hostname": "nrf"})

        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_amf_info(self) -> NoReturn:
        """Test to see if amf relation is updated."""
        expected_result = {
            "hostname": "amf",
        }

        relation_id = self.harness.add_relation("amf", "pcf")
        self.harness.add_relation_unit(relation_id, "pcf/0")
        self.harness.charm.publish_amf_info()
        relation_data = self.harness.get_relation_data(relation_id, "amf")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 11
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(MysqlCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_config_changed(self):
        """Test script for pod spec."""
        self.harness.charm.on.config_changed.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "mysql",
                "image":
                "mysql:5.7",
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "sql",
                    "containerPort": 3306,
                    "protocol": "TCP"
                }],
                "envConfig": {
                    "MYSQL_ROOT_PASSWORD": "******",
                },
            }],
        }
        pod_spec, _ = self.harness.get_pod_spec()

        self.assertDictEqual(expected_result, pod_spec)
        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_pcscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "pcscf")
        self.harness.add_relation_unit(relation_id, "pcscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_icscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "icscf")
        self.harness.add_relation_unit(relation_id, "icscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_scscf_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "scscf")
        self.harness.add_relation_unit(relation_id, "scscf/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)

    def test_publish_hss_mysql_info(self) -> NoReturn:
        """Test to see if mysql relation is updated."""
        expected_result = {
            "hostname": "mysql",
            "mysql_user": "******",
            "mysql_pwd": "root",
        }
        relation_id = self.harness.add_relation("mysql", "hss")
        self.harness.add_relation_unit(relation_id, "hss/0")
        relation_data = self.harness.get_relation_data(relation_id, "mysql")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 12
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(NatappCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_configure_change(self) -> NoReturn:
        """Test installation with any relation."""
        self.harness.charm.on.config_changed.emit()
        config_data = "192.168.1.216"
        second_interface = [
            {"name": "n6-network", "interface": "eth1", "ips": [config_data]}
        ]

        annot = {
            "annotations": {"k8s.v1.cni.cncf.io/networks": json.dumps(second_interface)}
        }
        custom_resource_def = [
            {
                "name": "network-attachment-definitions.k8s.cni.cncf.io",
                "spec": {
                    "group": "k8s.cni.cncf.io",
                    "scope": "Namespaced",
                    "names": {
                        "kind": "NetworkAttachmentDefinition",
                        "singular": "network-attachment-definition",
                        "plural": "network-attachment-definitions",
                    },
                    "versions": [{"name": "v1", "served": True, "storage": True}],
                },
            }
        ]
        pdn_subnet = "192.168.0.0/16"
        pdn_ip_range_start = "192.168.1.100"
        pdn_ip_range_end = "192.168.1.250"
        pdn_gateway_ip = "192.168.1.1"
        ipam_body = {
            "type": "host-local",
            "subnet": pdn_subnet,
            "rangeStart": pdn_ip_range_start,
            "rangeEnd": pdn_ip_range_end,
            "gateway": pdn_gateway_ip,
        }
        config_body = {
            "cniVersion": "0.3.1",
            "name": "n6-network",
            "type": "macvlan",
            "master": "ens3",
            "mode": "bridge",
            "ipam": ipam_body,
        }

        custom_resource = {
            "network-attachment-definitions.k8s.cni.cncf.io": [
                {
                    "apiVersion": "k8s.cni.cncf.io/v1",
                    "kind": "NetworkAttachmentDefinition",
                    "metadata": {"name": "n6-network"},
                    "spec": {"config": json.dumps(config_body)},
                }
            ]
        }

        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "natapp",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "natapp",
                            "containerPort": 2601,
                            "protocol": "UDP",
                        }
                    ],
                    "command": ["./start.sh", "&"],
                    "kubernetes": {"securityContext": {"privileged": True}},
                }
            ],
            "kubernetesResources": {
                "customResourceDefinitions": custom_resource_def,
                "customResources": custom_resource,
                "pod": annot,
            },
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_publish_natapp_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "hostname": "natapp",
            "static_ip": "192.168.70.15",
        }
        relation_id = self.harness.add_relation("natapp", "upf1")
        self.harness.add_relation_unit(relation_id, "upf1/0")
        relation_data = {"hostname": "natapp", "static_ip": "192.168.70.15"}
        self.harness.update_relation_data(relation_id, "natapp", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "natapp")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 13
0
class GrafanaCharmTest(unittest.TestCase):
    def setUp(self) -> None:
        self.harness = Harness(GrafanaK8s)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__grafana_source_data(self):

        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')
        self.assertIsInstance(rel_id, int)

        # test that the unit data propagates the correct way
        # which is through the triggering of on_relation_changed
        self.harness.update_relation_data(
            rel_id, 'prometheus/0', {
                'private-address': '192.0.2.1',
                'port': 1234,
                'source-type': 'prometheus',
                'source-name': 'prometheus-app',
            })

        expected_first_source_data = {
            'private-address': '192.0.2.1',
            'port': 1234,
            'source-name': 'prometheus-app',
            'source-type': 'prometheus',
            'isDefault': 'true',
            'unit_name': 'prometheus/0'
        }
        self.assertEqual(expected_first_source_data,
                         dict(self.harness.charm.datastore.sources[rel_id]))

        # test that clearing the relation data leads to
        # the datastore for this data source being cleared
        self.harness.update_relation_data(rel_id, 'prometheus/0', {
            'private-address': None,
            'port': None,
        })
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(rel_id))

    def test__ha_database_and_status_check(self):
        """If there is a peer connection and no database (needed for HA),
        the charm should put the application in a blocked state."""

        # start charm with one peer and no database relation
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        # ensure _check_high_availability() ends up with the correct status
        status = self.harness.charm._check_high_availability()
        self.assertEqual(status, SINGLE_NODE_STATUS)

        # make sure that triggering 'update-status' hook does not
        # overwrite the current active status
        self.harness.charm.on.update_status.emit()
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        peer_rel_id = self.harness.add_relation('grafana', 'grafana')

        # add main unit and its data
        # self.harness.add_relation_unit(peer_rel_id, 'grafana/0')
        # will trigger the grafana-changed hook
        self.harness.update_relation_data(peer_rel_id, 'grafana/0',
                                          {'private-address': '10.1.2.3'})

        # add peer unit and its data
        self.harness.add_relation_unit(peer_rel_id, 'grafana/1')
        self.harness.update_relation_data(peer_rel_id, 'grafana/1',
                                          {'private-address': '10.0.0.1'})

        self.assertTrue(self.harness.charm.has_peer)
        self.assertFalse(self.harness.charm.has_db)
        self.assertEqual(self.harness.charm.unit.status, HA_NOT_READY_STATUS)

        # ensure update-status hook doesn't overwrite this
        self.harness.charm.on.update_status.emit()
        self.assertEqual(self.harness.charm.unit.status, HA_NOT_READY_STATUS)

        # now add the database connection and the model should
        # not have a blocked status
        db_rel_id = self.harness.add_relation('database', 'mysql')
        self.harness.add_relation_unit(db_rel_id, 'mysql/0')
        self.harness.update_relation_data(
            db_rel_id, 'mysql/0', {
                'type': 'mysql',
                'host': '10.10.10.10:3306',
                'name': 'test_mysql_db',
                'user': '******',
                'password': '******',
            })
        self.assertTrue(self.harness.charm.has_db)
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        # ensure _check_high_availability() ends up with the correct status
        status = self.harness.charm._check_high_availability()
        self.assertEqual(status, HA_READY_STATUS)

    def test__database_relation_data(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.database, {})

        # add relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        rel = self.harness.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)
        # check that charm datastore was properly set
        self.assertEqual(dict(self.harness.charm.datastore.database),
                         test_relation_data)

        # now depart this relation and ensure the datastore is emptied
        self.harness.charm.on.database_relation_departed.emit(rel)
        self.assertEqual({}, dict(self.harness.charm.datastore.database))

    def test__multiple_database_relation_handling(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.database, {})

        # add first database relation
        self.harness.add_relation('database', 'mysql')

        # add second database relation -- should fail here
        with self.assertRaises(TooManyRelatedAppsError):
            self.harness.add_relation('database', 'mysql')
            self.harness.charm.model.get_relation('database')

    def test__multiple_source_relations(self):
        """This will test data-source config text with multiple sources.

        Specifically, it will test multiple grafana-source relations."""
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        rel_id0 = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id0, 'prometheus/0')

        # add test data to grafana-source relation
        # and test that _make_data_source_config_text() works as expected
        prom_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus'
        }
        self.harness.update_relation_data(rel_id0, 'prometheus/0',
                                          prom_source_data)
        header_text = textwrap.dedent("""
                apiVersion: 1

                datasources:""")
        correct_config_text0 = header_text + textwrap.dedent("""
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text0 + '\n', generated_text)

        # add another source relation and check the resulting config text
        jaeger_source_data = {
            'private-address': '255.255.255.0',
            'port': 7890,
            'source-type': 'jaeger',
            'source-name': 'jaeger-application'
        }
        rel_id1 = self.harness.add_relation('grafana-source', 'jaeger')
        self.harness.add_relation_unit(rel_id1, 'jaeger/0')
        self.harness.update_relation_data(rel_id1, 'jaeger/0',
                                          jaeger_source_data)

        correct_config_text1 = correct_config_text0 + textwrap.dedent("""
            - name: jaeger-application
              type: jaeger
              access: proxy
              url: http://255.255.255.0:7890
              isDefault: false
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text1 + '\n', generated_text)

        # test removal of second source results in config_text
        # that is the same as the original
        # self.harness.charm.on.grafana_source_relation_departed.emit(rel)
        self.harness.update_relation_data(rel_id1, 'jaeger/0', {
            'private-address': None,
            'port': None,
        })
        generated_text = self.harness.charm._make_data_source_config_text()
        correct_text_after_removal = textwrap.dedent("""
            apiVersion: 1

            deleteDatasources:
            - name: jaeger-application
              orgId: 1

            datasources:
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        self.assertEqual(correct_text_after_removal + '\n', generated_text)

        # now test that the 'deleteDatasources' is gone
        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text0 + '\n', generated_text)

    def test__check_config_missing_image_path(self):
        self.harness.update_config(MISSING_IMAGE_PASSWORD_CONFIG)

        # test the return value of _check_config
        missing = self.harness.charm._check_config()
        expected = ['grafana_image_password']
        self.assertEqual(missing, expected)

    def test__check_config_missing_password(self):
        self.harness.update_config(MISSING_IMAGE_CONFIG)

        # test the return value of _check_config
        missing = self.harness.charm._check_config()
        expected = ['grafana_image_path']
        self.assertEqual(missing, expected)

    def test__pod_spec_container_datasources(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')

        # add test data to grafana-source relation
        # and test that _make_data_source_config_text() works as expected
        prom_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus'
        }
        self.harness.update_relation_data(rel_id, 'prometheus/0',
                                          prom_source_data)

        data_source_file_text = textwrap.dedent("""
            apiVersion: 1

            datasources:
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}
              """).format(self.harness.model.config['basic_auth_password'],
                          self.harness.model.config['basic_auth_username'])

        config_ini_file_text = textwrap.dedent("""
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}
        """).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        expected_container_files_spec = [{
            'name':
            'grafana-datasources',
            'mountPath':
            self.harness.model.config['datasource_mount_path'],
            'files': {
                'datasources.yaml': data_source_file_text,
            },
        }, {
            'name':
            'grafana-config-ini',
            'mountPath':
            self.harness.model.config['config_ini_mount_path'],
            'files': {
                'grafana.ini': config_ini_file_text
            }
        }]
        pod_spec = self.harness.get_pod_spec()[0]
        container = get_container(pod_spec, 'grafana')
        actual_container_files_spec = container['files']
        self.assertEqual(expected_container_files_spec,
                         actual_container_files_spec)

    def test__access_sqlite_storage_location(self):
        expected_path = '/var/lib/grafana'
        actual_path = self.harness.charm.meta.storages['sqlitedb'].location
        self.assertEqual(expected_path, actual_path)

    def test__config_ini_without_database(self):
        self.harness.update_config(BASE_CONFIG)
        expected_config_text = textwrap.dedent("""
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}
        """).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        actual_config_text = self.harness.charm._make_config_ini_text()
        self.assertEqual(expected_config_text, actual_config_text)

    def test__config_ini_with_database(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # add database relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        # rel = self.harness.charm.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)

        # test the results of _make_config_ini_text()
        expected_config_text = textwrap.dedent(
            """
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}

        [database]
        type = mysql
        host = 0.1.2.3:3306
        name = my-test-db
        user = test-user
        password = super!secret!password
        url = mysql://test-user:[email protected]:3306/my-test-db"""
        ).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        actual_config_text = self.harness.charm._make_config_ini_text()
        self.assertEqual(expected_config_text, actual_config_text)

    def test__duplicate_source_names(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        p_rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        p_rel = self.harness.model.get_relation('grafana-source', p_rel_id)
        self.harness.add_relation_unit(p_rel_id, 'prometheus/0')

        # add test data to grafana-source relation
        prom_source_data0 = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus',
            'source-name': 'duplicate-source-name'
        }
        self.harness.update_relation_data(p_rel_id, 'prometheus/0',
                                          prom_source_data0)
        expected_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-name': 'duplicate-source-name',
            'source-type': 'prometheus',
            'isDefault': 'true',
            'unit_name': 'prometheus/0'
        }
        self.assertEqual(dict(self.harness.charm.datastore.sources[p_rel_id]),
                         expected_source_data)

        # add second source with the same name as the first source
        g_rel_id = self.harness.add_relation('grafana-source', 'graphite')
        self.harness.add_relation_unit(g_rel_id, 'graphite/0')

        graphite_source_data0 = {
            'private-address': '192.12.23.34',
            'port': 4321,
            'source-type': 'graphite',
            'source-name': 'duplicate-source-name'
        }
        self.harness.update_relation_data(g_rel_id, 'graphite/0',
                                          graphite_source_data0)
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(g_rel_id))
        self.assertEqual(1, len(self.harness.charm.datastore.sources))

        # now remove the relation and ensure datastore source-name is removed
        self.harness.charm.on.grafana_source_relation_departed.emit(p_rel)
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(p_rel_id))
        self.assertEqual(0, len(self.harness.charm.datastore.sources))

    def test__idempotent_datasource_file_hash(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')
        self.assertIsInstance(rel_id, int)

        # test that the unit data propagates the correct way
        # which is through the triggering of on_relation_changed
        self.harness.update_relation_data(
            rel_id, 'prometheus/0', {
                'private-address': '192.0.2.1',
                'port': 1234,
                'source-type': 'prometheus',
                'source-name': 'prometheus-app',
            })

        # get a hash of the created file and check that it matches the pod spec
        container = get_container(self.harness.get_pod_spec()[0], 'grafana')
        hash_text = hashlib.md5(container['files'][0]['files']
                                ['datasources.yaml'].encode()).hexdigest()
        self.assertEqual(container['config']['DATASOURCES_YAML'], hash_text)

        # test the idempotence of the call by re-configuring the pod spec
        self.harness.charm.configure_pod()
        self.assertEqual(container['config']['DATASOURCES_YAML'], hash_text)
Exemplo n.º 14
0
class TestCharm(unittest.TestCase):
    def setUp(self) -> None:
        self.harness = Harness(NextcloudOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__pod_spec(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        pod_spec, _ = self.harness.get_pod_spec()

        self.assertEqual(
            BASE_CONFIG['port'],
            pod_spec.get('containers')[0].get('ports')[0].get('containerPort'))

        self.assertEqual(
            BASE_CONFIG['image'],
            pod_spec.get('containers')[0].get('imageDetails').get('imagePath'))

    def test__database_relation_data(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.state.database, {})

        # # add relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        rel = self.harness.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)

        # # check that charm datastore was properly set
        self.assertEqual(dict(self.harness.charm.state.database),
                         test_relation_data)

        # # now depart this relation and ensure the datastore is emptied
        self.harness.charm.on.database_relation_broken.emit(rel)
        self.assertEqual({}, dict(self.harness.charm.state.database))

    def test__update_pod_env_config(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # test mysql
        self.harness.charm.state.database = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'mysql-test-db',
            'user': '******',
            'password': '******'
        }

        expected_config = {
            'MYSQL_DATABASE': 'mysql-test-db',
            'MYSQL_USER': '******',
            'MYSQL_PASSWORD': '******',
            'MYSQL_HOST': '0.1.2.3:3306'
        }
        pod_spec, _ = self.harness.get_pod_spec()
        self.harness.charm._update_pod_env_config(pod_spec)
        self.assertEqual(pod_spec['containers'][0]['envConfig'],
                         expected_config)

        # test postgresql
        self.harness.charm.state.database = {
            'type': 'postgres',
            'host': '0.1.2.3:5432',
            'name': 'pg-test-db',
            'user': '******',
            'password': '******'
        }

        expected_config = {
            'POSTGRES_DB': 'pg-test-db',
            'POSTGRES_USER': '******',
            'POSTGRES_PASSWORD': '******',
            'POSTGRES_HOST': '0.1.2.3:5432'
        }

        pod_spec, _ = self.harness.get_pod_spec()
        self.harness.charm._update_pod_env_config(pod_spec)
        self.assertEqual(pod_spec['containers'][0]['envConfig'],
                         expected_config)
Exemplo n.º 15
0
class TestCharm(unittest.TestCase):
    """Test Charm"""

    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(RanCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_config_changed(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()
        ipam_body = {
            "type": "host-local",
            "subnet": "60.60.0.0/16",
            "rangeStart": "60.60.0.50",
            "rangeEnd": "60.60.0.250",
            "gateway": "60.60.0.100",
        }
        config_body = {
            "cniVersion": "0.3.1",
            "name": "internet-network",
            "type": "macvlan",
            "master": "ens3",
            "mode": "bridge",
            "ipam": ipam_body,
        }
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "ran",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "ranport",
                            "containerPort": 9487,
                            "protocol": "TCP",
                        },
                        {
                            "name": "ranport2",
                            "containerPort": 8081,
                            "protocol": "TCP",
                        },
                    ],
                    "envConfig": {"ALLOW_ANONYMOUS_LOGIN": "******", "MODEL": None},
                    "kubernetes": {"securityContext": {"privileged": True}},
                }
            ],
            "serviceaccount": {
                "automountServiceAccountToken": True,
                "roles": [
                    {
                        "rules": [
                            {
                                "apiGroups": [""],
                                "resources": ["services"],
                                "verbs": ["get", "watch", "list"],
                            }
                        ]
                    }
                ],
            },
            "kubernetesResources": {
                "customResourceDefinitions": [
                    {
                        "name": "network-attachment-definitions.k8s.cni.cncf.io",
                        "spec": {
                            "group": "k8s.cni.cncf.io",
                            "scope": "Namespaced",
                            "names": {
                                "kind": "NetworkAttachmentDefinition",
                                "singular": "network-attachment-definition",
                                "plural": "network-attachment-definitions",
                            },
                            "versions": [
                                {"name": "v1", "served": True, "storage": True}
                            ],
                        },
                    }
                ],
                "customResources": {
                    "network-attachment-definitions.k8s.cni.cncf.io": [
                        {
                            "apiVersion": "k8s.cni.cncf.io/v1",
                            "kind": "NetworkAttachmentDefinition",
                            "metadata": {"name": "internet-network"},
                            "spec": {"config": json.dumps(config_body)},
                        },
                    ]
                },
                "pod": {
                    "annotations": {
                        "k8s.v1.cni.cncf.io/networks": '[\n{\n"name" : "internet-network",'
                        '\n"interface": "eth1",\n"ips": []\n}\n]'
                    }
                },
                "services": [
                    {
                        "name": "udpnew-lb",
                        "labels": {"juju-app": "ran"},
                        "spec": {
                            "selector": {"juju-app": "ran"},
                            "ports": [
                                {
                                    "protocol": "UDP",
                                    "port": 2152,
                                    "targetPort": 2152,
                                }
                            ],
                            "type": "LoadBalancer",
                        },
                    }
                ],
            },
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(self.harness.charm.unit.status.message.endswith(" relations"))
Exemplo n.º 16
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations."""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(PcscfCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "pcscf",
                "image":
                "localhost:32000/ims_pcscf:1.0",
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "pcscf",
                    "containerPort": 4070,
                    "protocol": "TCP"
                }],
                "envConfig": {
                    "MODEL": None,
                    "MYSQL_HOST": "mysql-endpoints",
                    "MYSQL_USER": "******",
                    "MYSQL_ROOT_PASSWORD": "******",
                },
                "command": ["./init_pcscf.sh", "&"],
                "kubernetes": {
                    "startupProbe": {
                        "tcpSocket": {
                            "port": 4070
                        }
                    }
                },
            }],
            "serviceAccount": {
                "automountServiceAccountToken":
                True,
                "roles": [{
                    "rules": [{
                        "apiGroups": [""],
                        "resources": ["services"],
                        "verbs": ["get", "watch", "list"],
                    }]
                }],
            },
        }
        # Check if mysql is initialized
        self.assertIsNone(self.harness.charm.state.mysql)

        # Initializing mysql relation
        mysql_relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(mysql_relation_id, "mysql/0")
        self.harness.update_relation_data(
            mysql_relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mysql, "mysql")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mysql_app_relation_changed(self) -> NoReturn:
        """Test to see if mysql app relation is updated."""

        self.assertIsNone(self.harness.charm.state.mysql)

        relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(relation_id, "mysql/0")
        self.harness.update_relation_data(
            relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_pcscf_info(self) -> NoReturn:
        """Test to see if pcscf relation is updated."""
        expected_result = {"private-address": "127.1.1.1", "hostname": "pcscf"}
        relation_id = self.harness.add_relation("dns-source", "dns_source")
        relation_data = {"private-address": "127.1.1.1", "hostname": "pcscf"}
        self.harness.update_relation_data(relation_id, "dns_source",
                                          relation_data)
        relation_data = self.harness.get_relation_data(relation_id,
                                                       "dns_source")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 17
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)

        self.assertEqual(self.harness.charm._stored.alertmanagers, [])
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')

        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alerting_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)

        self.assertEqual(int(data['port']), MINIMAL_CONFIG['port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')
Exemplo n.º 18
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(Upf1Charm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        annot = {
            "annotations": {
                "k8s.v1.cni.cncf.io/networks":
                '[\n{\n"name" : "n6-network",'
                '\n"interface": "eth1",\n"ips": []\n}\n]'
            },
            "securityContext": {
                "runAsUser": 0000,
                "runAsGroup": 0000
            },
        }
        service = [{
            "name": "upf-e",
            "labels": {
                "juju-app": "upf1"
            },
            "spec": {
                "selector": {
                    "juju-app": "upf1"
                },
                "ports": [{
                    "protocol": "TCP",
                    "port": 80,
                    "targetPort": 80
                }],
                "type": "ClusterIP",
            },
        }]

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP",
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "services": service,
                "pod": annot,
            },
        }
        # Check if natapp is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)
        self.assertIsNone(self.harness.charm.state.natapp_host)

        # Initializing the natapp relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )
        # Checking if natapp data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_config_change(self) -> NoReturn:
        """Test installation without any relation."""

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP"
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "pod": {
                    "annotations": {
                        "k8s.v1.cni.cncf.io/networks":
                        '[\n{\n"name" : "n6-network",'
                        '\n"interface": "eth1",\n"ips": []\n}\n]'
                    },
                    "securityContext": {
                        "runAsUser": 0,
                        "runAsGroup": 0
                    },
                },
                "services": [{
                    "name": "upf-e",
                    "labels": {
                        "juju-app": "upf1"
                    },
                    "spec": {
                        "selector": {
                            "juju-app": "upf1"
                        },
                        "ports": [{
                            "protocol": "TCP",
                            "port": 80,
                            "targetPort": 80
                        }],
                        "type":
                        "ClusterIP",
                    },
                }],
            },
        }

        # Check if nrf,upf is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the nrf relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )

        # Checking if nrf,upf data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_natapp_app_relation_changed(self) -> NoReturn:
        """Test to see if upf app relation is updated."""
        self.harness.charm.on.config_changed.emit()

        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the upf relation
        natapp_relation_id = self.harness.add_relation("natapp", "upf")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        relation_data = {"static_ip": "192.168.70.15"}
        self.harness.update_relation_data(natapp_relation_id, "natapp/0",
                                          relation_data)

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_upf_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "private_address": "127.1.1.1",
        }
        relation_id = self.harness.add_relation("upf", "smf")
        relation_data = {"private_address": "127.1.1.1"}
        self.harness.update_relation_data(relation_id, "upf1", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "upf1")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 19
0
class TestCharm(unittest.TestCase):
    def setUp(self) -> None:
        # charm setup
        self.harness = Harness(GraylogCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.add_oci_resource('graylog-image')

        # patches
        self.mock_bind_address = \
            mock.patch('charm.GraylogCharm.bind_address', new_callable=mock.PropertyMock)
        self.mock_external_uri = \
            mock.patch('charm.GraylogCharm.external_uri', new_callable=mock.PropertyMock)

        self.mock_bind_address.start()
        self.mock_external_uri.start()

        # cleanup
        self.addCleanup(self.mock_bind_address.stop)
        self.addCleanup(self.mock_external_uri.stop)

    def test_pod_spec_port(self):
        self.harness.set_leader(True)
        # pretend to have mongo and elasticsearch
        self.harness.charm._stored.mongodb_uri = 'mongo://test_uri/'
        self.harness.charm._stored.elasticsearch_uri = 'http://test_es_uri'

        self.harness.update_config(BASE_CONFIG)
        self.harness.charm.on.config_changed.emit()

        spec, _ = self.harness.get_pod_spec()
        expected_port = 9000
        actual_port = spec['containers'][0]['ports'][0]['containerPort']
        self.assertEqual(expected_port, actual_port)

    def test_elasticsearch_and_mongodb_conn_strings(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # add the elasticsearch relation
        es_rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        mongo_rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.add_relation_unit(es_rel_id, 'elasticsearch/0')
        self.harness.add_relation_unit(mongo_rel_id, 'mongodb/0')

        # add elasticsearch relation data
        es_rel_data = {
            'ingress-address': '10.183.1.2',
            'port': 9200,
        }
        self.harness.update_relation_data(es_rel_id, 'elasticsearch/0',
                                          es_rel_data)
        self.assertTrue(self.harness.charm.has_elasticsearch)

        # add mongodb relation data
        mongo_rel_data = {
            'replica_set_uri': 'mongo://10.0.0.2:14001,10.0.0.3:14002',
            'replicated': 'True',
            'replica_set_name': 'rs0',
        }
        self.harness.update_relation_data(mongo_rel_id, 'mongodb/0',
                                          mongo_rel_data)
        self.assertTrue(self.harness.charm.has_mongodb)

        # test that elasticsearch-uri properly made it to the _stored variable
        expected_uri = 'http://10.183.1.2:9200'
        self.assertEqual(expected_uri,
                         self.harness.charm._stored.elasticsearch_uri)

        # now emit the relation broken events and make sure the _stored variables are cleared
        es_rel = self.harness.model.get_relation('elasticsearch')
        mongo_rel = self.harness.model.get_relation('mongodb')
        self.harness.charm.on.elasticsearch_relation_broken.emit(es_rel)
        self.harness.charm.on.mongodb_relation_broken.emit(mongo_rel)
        self.assertEqual(str(), self.harness.charm._stored.elasticsearch_uri)
        self.assertEqual(str(), self.harness.charm._stored.mongodb_uri)

    def test_blocking_without_mongodb_and_elasticsearch(self):
        self.harness.set_leader(True)
        with self.assertLogs(level='WARNING') as logger:
            self.harness.update_config(BASE_CONFIG)
            msg = 'WARNING:charm:Need both mongodb and Elasticsearch ' \
                  'relation for Graylog to function properly. Blocking.'
            self.assertEqual(sorted(logger.output), [msg])

    def test_check_config_with_missing_option(self):
        self.harness.set_leader(True)
        missing_password_config = {'port': 9000, 'admin-password': ''}
        with self.assertLogs(level='WARNING') as logger:
            self.harness.update_config(missing_password_config)
            msg = 'ERROR:charm:Need admin-password config option before setting pod spec.'
            self.assertEqual(sorted(logger.output), [msg])
            self.assertEqual(
                self.harness.model.unit.status,
                BlockedStatus("Need 'admin-password' config option."))
Exemplo n.º 20
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(DnsCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for ")
        )

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        self.harness.charm.on.config_changed.emit()
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "dns",
                    "image": "localhost:32000/ims_dns:1.0",
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "dnstcp",
                            "containerPort": 53,
                            "protocol": "TCP",
                        },
                        {
                            "name": "dnsudp",
                            "containerPort": 53,
                            "protocol": "UDP",
                        },
                    ],
                    "envConfig": {
                        "PCSCF": "10.45.30.27",
                        "ICSCF": "10.45.30.28",
                        "SCSCF": "10.45.30.29",
                        "HSS": "10.45.30.30",
                    },
                    "command": ["./init_dns.sh", "&"],
                }
            ],
        }

        self.assertIsNone(self.harness.charm.state.pcscf)
        self.assertIsNone(self.harness.charm.state.icscf)
        self.assertIsNone(self.harness.charm.state.scscf)
        self.assertIsNone(self.harness.charm.state.hss)

        # Initializing pcscf relation
        pcscf_relation_id = self.harness.add_relation("dns-source", "dns_source")
        self.harness.add_relation_unit(pcscf_relation_id, "dns_source/0")
        self.harness.update_relation_data(
            pcscf_relation_id,
            "dns_source",
            {"private-address": "10.45.30.27", "hostname": "pcscf"},
        )

        # Initializing icscf relation
        icscf_relation_id = self.harness.add_relation("dns-source", "dns_source")
        self.harness.add_relation_unit(icscf_relation_id, "dns_source/0")
        self.harness.update_relation_data(
            icscf_relation_id,
            "dns_source",
            {"private-address": "10.45.30.28", "hostname": "icscf"},
        )

        # Initializing scscf relation
        scscf_relation_id = self.harness.add_relation("dns-source", "dns_source")
        self.harness.add_relation_unit(scscf_relation_id, "dns_source/0")
        self.harness.update_relation_data(
            scscf_relation_id,
            "dns_source",
            {"private-address": "10.45.30.29", "hostname": "scscf"},
        )

        # Initializing hss relation
        hss_relation_id = self.harness.add_relation("dns-source", "dns_source")
        self.harness.add_relation_unit(hss_relation_id, "dns_source/0")
        self.harness.update_relation_data(
            hss_relation_id,
            "dns_source",
            {"private-address": "10.45.30.30", "hostname": "hss"},
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.pcscf, "10.45.30.27")
        self.assertEqual(self.harness.charm.state.icscf, "10.45.30.28")
        self.assertEqual(self.harness.charm.state.scscf, "10.45.30.29")
        self.assertEqual(self.harness.charm.state.hss, "10.45.30.30")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for ")
        )
Exemplo n.º 21
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(ElasticsearchOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

        # patch definitions
        self.mock_es_client = mock.patch('charm.ElasticsearchOperatorCharm._get_es_client')
        self.mock_es = mock.patch('elasticsearch.Elasticsearch')
        self.mock_current_mmn = \
            mock.patch('charm.ElasticsearchOperatorCharm.current_minimum_master_nodes',
                       new_callable=mock.PropertyMock)

        # start patches
        self.mock_es_client.start()
        self.mock_es.start()
        self.mock_current_mmn.start()

        # cleanup patches
        self.addCleanup(self.mock_es_client.stop)
        self.addCleanup(self.mock_es.stop)
        self.addCleanup(self.mock_current_mmn.stop)

    def test_cluster_name_can_be_changed(self):
        self.harness.set_leader(True)
        name_config = MINIMAL_CONFIG.copy()
        name_config['cluster-name'] = 'new name'
        self.harness.update_config(name_config)
        pod_spec, _ = self.harness.get_pod_spec()
        config = elastic_config(pod_spec)
        self.assertEqual(config['cluster']['name'],
                         name_config['cluster-name'])

    def test_seed_nodes_are_added_when_fewer_than_minimum(self):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        # create a peer relation and add a peer unit
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'elasticsearch-operator-0')

        # check number of seed hosts is the default value
        pod_spec, _ = self.harness.get_pod_spec()
        seed_hosts_file = config_file(pod_spec, 'unicast_hosts.txt')
        self.assertEqual(charm.SEED_SIZE, len(seed_hosts_file['content'].split("\n")))

        # increase number of seed hosts and add a unit to trigger the change
        charm.SEED_SIZE = 4
        self.harness.add_relation_unit(rel_id, 'elasticsearch-operator-1')
        self.harness.update_config(seed_config)

        # check the number of seed hosts has now increased
        pod_spec, _ = self.harness.get_pod_spec()
        seed_hosts_file = config_file(pod_spec, 'unicast_hosts.txt')
        self.assertEqual(charm.SEED_SIZE, 4)
        self.assertEqual(charm.SEED_SIZE, len(seed_hosts_file['content'].split("\n")))

    def test_num_hosts_is_equal_to_num_units(self):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        # add a random number of peer units
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        self.assertIsInstance(rel_id, int)
        num_units = random.randint(2, 10)

        # elasticsearch-operator/0 already exists as the starting unit
        for i in range(1, num_units):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))
        self.assertEqual(self.harness.charm.num_hosts, num_units)

    def test_minimum_master_nodes_matches_formula(self):
        # Test whether _minimum_master_nodes function
        # matches formula N / 2 + 1 for N > 2, 1 otherwise
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')

        # create inputs of three main case categories with two examples each
        # case categories: num_nodes <= 2, num_nodes is even, num_nodes is odd
        total_mmn_cases = [(1, 1), (2, 1), (4, 3), (5, 3), (6, 4), (7, 4)]
        for (num_nodes, expected_mmn) in total_mmn_cases:
            with self.subTest():
                for i in range(1, num_nodes):
                    self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))
                actual_mmn = self.harness.charm.ideal_minimum_master_nodes
                self.assertEqual(expected_mmn, actual_mmn)

    def test_dynamic_settings_payload_has_correct_minimum_master_nodes(self):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        # create the peer relation
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')

        # when number of nodes is 6, min master nodes should be 6 / 2 + 1 = 4
        num_nodes = 6
        expected_mmn = 4
        for i in range(1, num_nodes):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))
        payload = self.harness.charm._build_dynamic_settings_payload()
        actual_mmn = payload['persistent']['discovery.zen.minimum_master_nodes']
        self.assertEqual(expected_mmn, actual_mmn)

    def test_peer_changed_handler_with_single_node_via_update_status_event(self):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        # check that the number of nodes and the status is correct
        # after emitting the update_status event
        self.assertEqual(self.harness.charm.num_hosts, 1)
        self.harness.charm.on.update_status.emit()
        self.assertEqual(
            self.harness.charm.unit.status,
            ActiveStatus()
        )

    @mock.patch('charm.ElasticsearchOperatorCharm.num_es_nodes', new_callable=mock.PropertyMock)
    def test_relation_changed_with_node_and_unit_mismatch(self, mock_es_nodes):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        expected_num_es_nodes = 2
        mock_es_nodes.return_value = expected_num_es_nodes
        expected_num_units = 3

        # add a different number of units than number of es_nodes
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        rel = self.harness.model.get_relation('elasticsearch')
        for i in range(1, expected_num_units):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))

        # check that there is a mismatch
        self.assertEqual(expected_num_es_nodes, self.harness.charm.num_es_nodes)
        self.assertEqual(expected_num_units, self.harness.charm.num_hosts)

        # check that the proper status has been set in _elasticsearch_relation_changed
        self.harness.charm.on.elasticsearch_relation_changed.emit(rel)
        self.assertEqual(
            self.harness.charm.unit.status,
            MaintenanceStatus('Waiting for nodes to join ES cluster')
        )

    @mock.patch('charm.ElasticsearchOperatorCharm.num_es_nodes', new_callable=mock.PropertyMock)
    def test_relation_changed_with_node_and_unit_mismatch_via_update_status(self, mock_es_nodes):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        expected_num_es_nodes = 2
        mock_es_nodes.return_value = expected_num_es_nodes
        expected_num_units = 3

        # add a different number of units than number of es_nodes
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        for i in range(1, expected_num_units):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))

        # check that there is a mismatch
        self.assertEqual(expected_num_es_nodes, self.harness.charm.num_es_nodes)
        self.assertEqual(expected_num_units, self.harness.charm.num_hosts)

        # check that the proper status has been set in _elasticsearch_relation_changed
        self.harness.charm.on.update_status.emit()
        self.assertEqual(
            self.harness.charm.unit.status,
            MaintenanceStatus('Waiting for nodes to join ES cluster')
        )

    @mock.patch('charm.ElasticsearchOperatorCharm.num_es_nodes', new_callable=mock.PropertyMock)
    def test_relation_changed_with_node_and_unit_match(self, mock_es_nodes):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        expected_num_es_nodes = 3
        mock_es_nodes.return_value = expected_num_es_nodes
        expected_num_units = 3

        # add same number of units as number of es_nodes
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        rel = self.harness.model.get_relation('elasticsearch')
        for i in range(1, expected_num_units):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))

        # check that there is a match
        self.assertEqual(expected_num_es_nodes, self.harness.charm.num_es_nodes)
        self.assertEqual(expected_num_units, self.harness.charm.num_hosts)

        # check that the proper status has been set and that the logs are correct
        with self.assertLogs(level='INFO') as logger:
            self.harness.charm.on.elasticsearch_relation_changed.emit(rel)
            # check the logs
            expected_logs = ['INFO:charm:Attempting to configure dynamic settings.']
            self.assertEqual(sorted(logger.output), expected_logs)
            # check the status
            self.assertEqual(
                self.harness.charm.unit.status,
                ActiveStatus()
            )

    @mock.patch('charm.ElasticsearchOperatorCharm.num_es_nodes', new_callable=mock.PropertyMock)
    def test_relation_changed_with_node_and_unit_match_via_update_status(self, mock_es_nodes):
        self.harness.set_leader(True)
        seed_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(seed_config)

        expected_num_es_nodes = 3
        mock_es_nodes.return_value = expected_num_es_nodes
        expected_num_units = 3

        # add same number of units as number of es_nodes
        rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        for i in range(1, expected_num_units):
            self.harness.add_relation_unit(rel_id, 'elasticsearch-operator/{}'.format(i))

        # check that there is a match
        self.assertEqual(expected_num_es_nodes, self.harness.charm.num_es_nodes)
        self.assertEqual(expected_num_units, self.harness.charm.num_hosts)

        # check that the proper status has been set and that the logs are correct
        with self.assertLogs(level='INFO') as logger:
            self.harness.charm.on.update_status.emit()
            # check the logs (there will be two calls to _configure_dynamic_settings
            expected_logs = ['INFO:charm:Attempting to configure dynamic settings.']
            self.assertEqual(sorted(logger.output), expected_logs)
            # check the status
            self.assertEqual(
                self.harness.charm.unit.status,
                ActiveStatus()
            )
Exemplo n.º 22
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(HssCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with relation."""
        self.harness.charm.on.start.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "hss",
                "image":
                "localhost:32000/ims_hss:1.0",
                "imagePullPolicy":
                "Always",
                "ports": [
                    {
                        "name": "diahss",
                        "containerPort": 3868,
                        "protocol": "TCP"
                    },
                    {
                        "name": "hss",
                        "containerPort": 8080,
                        "protocol": "TCP"
                    },
                ],
                "envConfig": {
                    "MYSQL_HOST": "mysql-endpoints",
                    "MYSQL_USER": "******",
                    "MYSQL_ROOT_PASSWORD": "******",
                },
                "command": ["./init_hss.sh", "&"],
            }],
        }
        # Check if mysql is initialized
        self.assertIsNone(self.harness.charm.state.mysql)

        # Initializing mysql relation
        mysql_relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(mysql_relation_id, "mysql/0")
        self.harness.update_relation_data(
            mysql_relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mysql, "mysql")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mysql_app_relation_changed(self) -> NoReturn:
        """Test to see if mysql app relation is updated."""
        self.harness.charm.on.start.emit()

        self.assertIsNone(self.harness.charm.state.mysql)

        relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(relation_id, "mysql/0")
        self.harness.update_relation_data(
            relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_hss_info(self) -> NoReturn:
        """Test to see if hss relation is updated."""
        expected_result = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.charm.on.start.emit()
        relation_id = self.harness.add_relation("dns-source", "dns_source")
        relation_data = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.update_relation_data(relation_id, "dns_source",
                                          relation_data)
        relation_data = self.harness.get_relation_data(relation_id,
                                                       "dns_source")
        self.assertDictEqual(expected_result, relation_data)