class HAServiceRequires(unittest.TestCase):

    class MyCharm(CharmBase):

        def __init__(self, *args):
            super().__init__(*args)
            self.seen_events = []
            self.ha = ops_ha_interface.HAServiceRequires(self, 'ha')

            self.framework.observe(
                self.ha.on.ha_ready,
                self._log_event)

        def _log_event(self, event):
            self.seen_events.append(type(event).__name__)

    def setUp(self):
        super().setUp()
        self.harness = Harness(
            self.MyCharm,
            meta='''
name: my-charm
requires:
  ha:
    interface: hacluster
    scope: container
'''
        )

    def test_local_vars(self):
        self.harness.begin()
        self.harness.charm.ha.set_local('a', 'b')
        self.assertEqual(
            self.harness.charm.ha.get_local('a'),
            'b')
        self.harness.charm.ha.set_local(**{'c': 'd', 'e': 'f'})
        self.assertEqual(
            self.harness.charm.ha.get_local('c'),
            'd')
        self.assertEqual(
            self.harness.charm.ha.get_local('e'),
            'f')
        self.harness.charm.ha.set_local(data={'g': 'h', 'i': 'j'})
        self.assertEqual(
            self.harness.charm.ha.get_local('g'),
            'h')
        self.assertEqual(
            self.harness.charm.ha.get_local('i'),
            'j')

    def test_remote_vars(self):
        self.harness.begin()
        rel_id = self.harness.add_relation(
            'ha',
            'hacluster')
        self.harness.add_relation_unit(
            rel_id,
            'hacluster/0')
        self.harness.charm.ha.set_remote('a', 'b')
        rel_data = self.harness.get_relation_data(
            rel_id,
            'my-charm/0')
        self.assertEqual(rel_data, {'a': 'b'})

    def test_get_remote_all(self):
        self.harness.begin()
        rel_id1 = self.harness.add_relation(
            'ha',
            'hacluster-a')
        self.harness.add_relation_unit(
            rel_id1,
            'hacluster-a/0')
        self.harness.update_relation_data(
            rel_id1,
            'hacluster-a/0',
            {'fruit': 'banana'})
        self.harness.add_relation_unit(
            rel_id1,
            'hacluster-a/1')
        self.harness.update_relation_data(
            rel_id1,
            'hacluster-a/1',
            {'fruit': 'orange'})
        rel_id2 = self.harness.add_relation(
            'ha',
            'hacluster-b')
        self.harness.add_relation_unit(
            rel_id2,
            'hacluster-b/0')
        self.harness.update_relation_data(
            rel_id2,
            'hacluster-b/0',
            {'fruit': 'grape'})
        self.harness.add_relation_unit(
            rel_id2,
            'hacluster-b/1')
        self.harness.update_relation_data(
            rel_id2,
            'hacluster-b/1',
            {'veg': 'carrot'})
        self.assertEqual(
            self.harness.charm.ha.get_remote_all('fruit'),
            ['orange', 'grape', 'banana'])

    def test_ha_ready(self):
        self.harness.begin()
        self.assertEqual(
            self.harness.charm.seen_events,
            [])
        rel_id = self.harness.add_relation(
            'ha',
            'hacluster')
        self.harness.add_relation_unit(
            rel_id,
            'hacluster/0')
        self.harness.update_relation_data(
            rel_id,
            'hacluster/0',
            {'clustered': 'yes'})
        self.assertEqual(
            self.harness.charm.seen_events,
            ['HAServiceReadyEvent'])

    def test_data_changed(self):
        self.harness.begin()
        self.assertTrue(
            self.harness.charm.ha.data_changed(
                'relation-data', {'a': 'b'}))
        self.assertFalse(
            self.harness.charm.ha.data_changed(
                'relation-data', {'a': 'b'}))
        self.assertTrue(
            self.harness.charm.ha.data_changed(
                'relation-data', {'a': 'c'}))
Exemplo n.º 2
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(HelloKubeconCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_gosherve_layer(self):
        # Test with empty config.
        self.assertEqual(self.harness.charm.config["redirect-map"], "https://jnsgr.uk/demo-routes")
        expected = {
            "summary": "gosherve layer",
            "description": "pebble config layer for gosherve",
            "services": {
                "gosherve": {
                    "override": "replace",
                    "summary": "gosherve",
                    "command": "/gosherve",
                    "startup": "enabled",
                    "environment": {
                        "REDIRECT_MAP_URL": "https://jnsgr.uk/demo-routes",
                        "WEBROOT": "/srv",
                    },
                }
            },
        }
        self.assertEqual(self.harness.charm._gosherve_layer(), expected)
        # And now test with a different value in the redirect-map config option.
        # Disable hook firing first.
        self.harness.disable_hooks()
        self.harness.update_config({"redirect-map": "test value"})
        expected["services"]["gosherve"]["environment"]["REDIRECT_MAP_URL"] = "test value"
        self.assertEqual(self.harness.charm._gosherve_layer(), expected)

    def test_on_config_changed(self):
        plan = self.harness.get_container_pebble_plan("gosherve")
        self.assertEqual(plan.to_dict(), {})
        # Trigger a config-changed hook. Since there was no plan initially, the
        # "gosherve" service in the container won't be running so we'll be
        # testing the `is_running() == False` codepath.
        self.harness.update_config({"redirect-map": "test value"})
        plan = self.harness.get_container_pebble_plan("gosherve")
        # Get the expected layer from the gosherve_layer method (tested above)
        expected = self.harness.charm._gosherve_layer()
        expected.pop("summary", "")
        expected.pop("description", "")
        # Check the plan is as expected
        self.assertEqual(plan.to_dict(), expected)
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())
        container = self.harness.model.unit.get_container("gosherve")
        self.assertEqual(container.get_service("gosherve").is_running(), True)

        # Now test again with different config, knowing that the "gosherve"
        # service is running (because we've just tested it above), so we'll
        # be testing the `is_running() == True` codepath.
        self.harness.update_config({"redirect-map": "test2 value"})
        plan = self.harness.get_container_pebble_plan("gosherve")
        # Adjust the expected plan
        expected["services"]["gosherve"]["environment"]["REDIRECT_MAP_URL"] = "test2 value"
        self.assertEqual(plan.to_dict(), expected)
        self.assertEqual(container.get_service("gosherve").is_running(), True)
        self.assertEqual(self.harness.model.unit.status, ActiveStatus())

        # And finally test again with the same config to ensure we exercise
        # the case where the plan we've created matches the active one. We're
        # going to mock the container.stop and container.start calls to confirm
        # they were not called.
        with patch('ops.model.Container.start') as _start, patch('ops.model.Container.stop') as _stop:
            self.harness.charm.on.config_changed.emit()
            _start.assert_not_called()
            _stop.assert_not_called()

    @patch("charm.HelloKubeconCharm._fetch_site")
    def test_on_install(self, _fetch_site):
        self.harness.charm._on_install("mock_event")
        _fetch_site.assert_called_once

    @patch("charm.HelloKubeconCharm._fetch_site")
    def test_pull_site_action(self, _fetch_site):
        mock_event = Mock()
        self.harness.charm._pull_site_action(mock_event)
        _fetch_site.assert_called_once
        mock_event.called_once_with({"result": "site pulled"})
Exemplo n.º 3
0
    def test__init__works_without_a_hitch(self):
        # Setup
        harness = Harness(charm.Charm)

        # Exercise
        harness.begin()
Exemplo n.º 4
0
class GrafanaCharmTest(unittest.TestCase):
    def setUp(self) -> None:
        self.harness = Harness(GrafanaK8s)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__grafana_source_data(self):

        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')
        self.assertIsInstance(rel_id, int)

        # test that the unit data propagates the correct way
        # which is through the triggering of on_relation_changed
        self.harness.update_relation_data(
            rel_id, 'prometheus/0', {
                'private-address': '192.0.2.1',
                'port': 1234,
                'source-type': 'prometheus',
                'source-name': 'prometheus-app',
            })

        expected_first_source_data = {
            'private-address': '192.0.2.1',
            'port': 1234,
            'source-name': 'prometheus-app',
            'source-type': 'prometheus',
            'isDefault': 'true',
            'unit_name': 'prometheus/0'
        }
        self.assertEqual(expected_first_source_data,
                         dict(self.harness.charm.datastore.sources[rel_id]))

        # test that clearing the relation data leads to
        # the datastore for this data source being cleared
        self.harness.update_relation_data(rel_id, 'prometheus/0', {
            'private-address': None,
            'port': None,
        })
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(rel_id))

    def test__ha_database_and_status_check(self):
        """If there is a peer connection and no database (needed for HA),
        the charm should put the application in a blocked state."""

        # start charm with one peer and no database relation
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        # ensure _check_high_availability() ends up with the correct status
        status = self.harness.charm._check_high_availability()
        self.assertEqual(status, SINGLE_NODE_STATUS)

        # make sure that triggering 'update-status' hook does not
        # overwrite the current active status
        self.harness.charm.on.update_status.emit()
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        peer_rel_id = self.harness.add_relation('grafana', 'grafana')

        # add main unit and its data
        # self.harness.add_relation_unit(peer_rel_id, 'grafana/0')
        # will trigger the grafana-changed hook
        self.harness.update_relation_data(peer_rel_id, 'grafana/0',
                                          {'private-address': '10.1.2.3'})

        # add peer unit and its data
        self.harness.add_relation_unit(peer_rel_id, 'grafana/1')
        self.harness.update_relation_data(peer_rel_id, 'grafana/1',
                                          {'private-address': '10.0.0.1'})

        self.assertTrue(self.harness.charm.has_peer)
        self.assertFalse(self.harness.charm.has_db)
        self.assertEqual(self.harness.charm.unit.status, HA_NOT_READY_STATUS)

        # ensure update-status hook doesn't overwrite this
        self.harness.charm.on.update_status.emit()
        self.assertEqual(self.harness.charm.unit.status, HA_NOT_READY_STATUS)

        # now add the database connection and the model should
        # not have a blocked status
        db_rel_id = self.harness.add_relation('database', 'mysql')
        self.harness.add_relation_unit(db_rel_id, 'mysql/0')
        self.harness.update_relation_data(
            db_rel_id, 'mysql/0', {
                'type': 'mysql',
                'host': '10.10.10.10:3306',
                'name': 'test_mysql_db',
                'user': '******',
                'password': '******',
            })
        self.assertTrue(self.harness.charm.has_db)
        self.assertEqual(self.harness.charm.unit.status,
                         APPLICATION_ACTIVE_STATUS)

        # ensure _check_high_availability() ends up with the correct status
        status = self.harness.charm._check_high_availability()
        self.assertEqual(status, HA_READY_STATUS)

    def test__database_relation_data(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.database, {})

        # add relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        rel = self.harness.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)
        # check that charm datastore was properly set
        self.assertEqual(dict(self.harness.charm.datastore.database),
                         test_relation_data)

        # now depart this relation and ensure the datastore is emptied
        self.harness.charm.on.database_relation_departed.emit(rel)
        self.assertEqual({}, dict(self.harness.charm.datastore.database))

    def test__multiple_database_relation_handling(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.database, {})

        # add first database relation
        self.harness.add_relation('database', 'mysql')

        # add second database relation -- should fail here
        with self.assertRaises(TooManyRelatedAppsError):
            self.harness.add_relation('database', 'mysql')
            self.harness.charm.model.get_relation('database')

    def test__multiple_source_relations(self):
        """This will test data-source config text with multiple sources.

        Specifically, it will test multiple grafana-source relations."""
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        rel_id0 = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id0, 'prometheus/0')

        # add test data to grafana-source relation
        # and test that _make_data_source_config_text() works as expected
        prom_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus'
        }
        self.harness.update_relation_data(rel_id0, 'prometheus/0',
                                          prom_source_data)
        header_text = textwrap.dedent("""
                apiVersion: 1

                datasources:""")
        correct_config_text0 = header_text + textwrap.dedent("""
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text0 + '\n', generated_text)

        # add another source relation and check the resulting config text
        jaeger_source_data = {
            'private-address': '255.255.255.0',
            'port': 7890,
            'source-type': 'jaeger',
            'source-name': 'jaeger-application'
        }
        rel_id1 = self.harness.add_relation('grafana-source', 'jaeger')
        self.harness.add_relation_unit(rel_id1, 'jaeger/0')
        self.harness.update_relation_data(rel_id1, 'jaeger/0',
                                          jaeger_source_data)

        correct_config_text1 = correct_config_text0 + textwrap.dedent("""
            - name: jaeger-application
              type: jaeger
              access: proxy
              url: http://255.255.255.0:7890
              isDefault: false
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text1 + '\n', generated_text)

        # test removal of second source results in config_text
        # that is the same as the original
        # self.harness.charm.on.grafana_source_relation_departed.emit(rel)
        self.harness.update_relation_data(rel_id1, 'jaeger/0', {
            'private-address': None,
            'port': None,
        })
        generated_text = self.harness.charm._make_data_source_config_text()
        correct_text_after_removal = textwrap.dedent("""
            apiVersion: 1

            deleteDatasources:
            - name: jaeger-application
              orgId: 1

            datasources:
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}""").format(
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['basic_auth_username'])

        self.assertEqual(correct_text_after_removal + '\n', generated_text)

        # now test that the 'deleteDatasources' is gone
        generated_text = self.harness.charm._make_data_source_config_text()
        self.assertEqual(correct_config_text0 + '\n', generated_text)

    def test__check_config_missing_image_path(self):
        self.harness.update_config(MISSING_IMAGE_PASSWORD_CONFIG)

        # test the return value of _check_config
        missing = self.harness.charm._check_config()
        expected = ['grafana_image_password']
        self.assertEqual(missing, expected)

    def test__check_config_missing_password(self):
        self.harness.update_config(MISSING_IMAGE_CONFIG)

        # test the return value of _check_config
        missing = self.harness.charm._check_config()
        expected = ['grafana_image_path']
        self.assertEqual(missing, expected)

    def test__pod_spec_container_datasources(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')

        # add test data to grafana-source relation
        # and test that _make_data_source_config_text() works as expected
        prom_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus'
        }
        self.harness.update_relation_data(rel_id, 'prometheus/0',
                                          prom_source_data)

        data_source_file_text = textwrap.dedent("""
            apiVersion: 1

            datasources:
            - name: prometheus/0
              type: prometheus
              access: proxy
              url: http://192.0.2.1:4321
              isDefault: true
              editable: true
              orgId: 1
              basicAuthUser: {0}
              secureJsonData:
                basicAuthPassword: {1}
              """).format(self.harness.model.config['basic_auth_password'],
                          self.harness.model.config['basic_auth_username'])

        config_ini_file_text = textwrap.dedent("""
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}
        """).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        expected_container_files_spec = [{
            'name':
            'grafana-datasources',
            'mountPath':
            self.harness.model.config['datasource_mount_path'],
            'files': {
                'datasources.yaml': data_source_file_text,
            },
        }, {
            'name':
            'grafana-config-ini',
            'mountPath':
            self.harness.model.config['config_ini_mount_path'],
            'files': {
                'grafana.ini': config_ini_file_text
            }
        }]
        pod_spec = self.harness.get_pod_spec()[0]
        container = get_container(pod_spec, 'grafana')
        actual_container_files_spec = container['files']
        self.assertEqual(expected_container_files_spec,
                         actual_container_files_spec)

    def test__access_sqlite_storage_location(self):
        expected_path = '/var/lib/grafana'
        actual_path = self.harness.charm.meta.storages['sqlitedb'].location
        self.assertEqual(expected_path, actual_path)

    def test__config_ini_without_database(self):
        self.harness.update_config(BASE_CONFIG)
        expected_config_text = textwrap.dedent("""
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}
        """).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        actual_config_text = self.harness.charm._make_config_ini_text()
        self.assertEqual(expected_config_text, actual_config_text)

    def test__config_ini_with_database(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # add database relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        # rel = self.harness.charm.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)

        # test the results of _make_config_ini_text()
        expected_config_text = textwrap.dedent(
            """
        [paths]
        provisioning = {0}

        [security]
        admin_user = {1}
        admin_password = {2}

        [log]
        mode = {3}
        level = {4}

        [database]
        type = mysql
        host = 0.1.2.3:3306
        name = my-test-db
        user = test-user
        password = super!secret!password
        url = mysql://test-user:[email protected]:3306/my-test-db"""
        ).format(
            self.harness.model.config['provisioning_path'],
            self.harness.model.config['basic_auth_username'],
            self.harness.model.config['basic_auth_password'],
            self.harness.model.config['grafana_log_mode'],
            self.harness.model.config['grafana_log_level'],
        )

        actual_config_text = self.harness.charm._make_config_ini_text()
        self.assertEqual(expected_config_text, actual_config_text)

    def test__duplicate_source_names(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.datastore.sources, {})

        # add first relation
        p_rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        p_rel = self.harness.model.get_relation('grafana-source', p_rel_id)
        self.harness.add_relation_unit(p_rel_id, 'prometheus/0')

        # add test data to grafana-source relation
        prom_source_data0 = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-type': 'prometheus',
            'source-name': 'duplicate-source-name'
        }
        self.harness.update_relation_data(p_rel_id, 'prometheus/0',
                                          prom_source_data0)
        expected_source_data = {
            'private-address': '192.0.2.1',
            'port': 4321,
            'source-name': 'duplicate-source-name',
            'source-type': 'prometheus',
            'isDefault': 'true',
            'unit_name': 'prometheus/0'
        }
        self.assertEqual(dict(self.harness.charm.datastore.sources[p_rel_id]),
                         expected_source_data)

        # add second source with the same name as the first source
        g_rel_id = self.harness.add_relation('grafana-source', 'graphite')
        self.harness.add_relation_unit(g_rel_id, 'graphite/0')

        graphite_source_data0 = {
            'private-address': '192.12.23.34',
            'port': 4321,
            'source-type': 'graphite',
            'source-name': 'duplicate-source-name'
        }
        self.harness.update_relation_data(g_rel_id, 'graphite/0',
                                          graphite_source_data0)
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(g_rel_id))
        self.assertEqual(1, len(self.harness.charm.datastore.sources))

        # now remove the relation and ensure datastore source-name is removed
        self.harness.charm.on.grafana_source_relation_departed.emit(p_rel)
        self.assertEqual(None,
                         self.harness.charm.datastore.sources.get(p_rel_id))
        self.assertEqual(0, len(self.harness.charm.datastore.sources))

    def test__idempotent_datasource_file_hash(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        rel_id = self.harness.add_relation('grafana-source', 'prometheus')
        self.harness.add_relation_unit(rel_id, 'prometheus/0')
        self.assertIsInstance(rel_id, int)

        # test that the unit data propagates the correct way
        # which is through the triggering of on_relation_changed
        self.harness.update_relation_data(
            rel_id, 'prometheus/0', {
                'private-address': '192.0.2.1',
                'port': 1234,
                'source-type': 'prometheus',
                'source-name': 'prometheus-app',
            })

        # get a hash of the created file and check that it matches the pod spec
        container = get_container(self.harness.get_pod_spec()[0], 'grafana')
        hash_text = hashlib.md5(container['files'][0]['files']
                                ['datasources.yaml'].encode()).hexdigest()
        self.assertEqual(container['config']['DATASOURCES_YAML'], hash_text)

        # test the idempotence of the call by re-configuring the pod spec
        self.harness.charm.configure_pod()
        self.assertEqual(container['config']['DATASOURCES_YAML'], hash_text)
class TestTransform(unittest.TestCase):
    """Test that the promql-transform implementation works."""
    def setUp(self):
        self.harness = Harness(TransformProviderCharm, meta=META)
        self.harness.set_model_name("transform")
        self.addCleanup(self.harness.cleanup)
        self.harness.add_resource("promql-transform-amd64", "dummy resource")
        self.harness.begin()

    # pylint: disable=protected-access
    @unittest.mock.patch("platform.processor", lambda: "teakettle")
    def test_disable_on_invalid_arch(self):
        transform = self.harness.charm.transformer
        self.assertIsNone(transform.path)
        self.assertTrue(transform._disabled)

    # pylint: disable=protected-access
    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    def test_gives_path_on_valid_arch(self):
        """When given a valid arch, it should return the resource path."""
        transformer = self.harness.charm.transformer
        self.assertIsInstance(transformer.path, PosixPath)

    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    def test_setup_transformer(self):
        """When setup it should know the path to the binary."""
        transform = self.harness.charm.transformer

        self.assertIsInstance(transform.path, PosixPath)

        p = str(transform.path)
        self.assertTrue(
            p.startswith("/") and p.endswith("promql-transform-amd64"))

    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    @unittest.mock.patch("subprocess.run")
    def test_returns_original_expression_when_subprocess_call_errors(
            self, mocked_run):
        mocked_run.side_effect = subprocess.CalledProcessError(
            returncode=10, cmd="promql-transform", stderr="")

        transform = self.harness.charm.transformer
        output = transform.apply_label_matchers({
            "groups": [{
                "alert": "CPUOverUse",
                "expr": "process_cpu_seconds_total > 0.12",
                "for": "0m",
                "labels": {
                    "severity": "Low",
                    "juju_model": "None",
                    "juju_model_uuid": "f2c1b2a6-e006-11eb-ba80-0242ac130004",
                    "juju_application": "consumer-tester",
                },
                "annotations": {
                    "summary":
                    "Instance {{ $labels.instance }} CPU over use",
                    "description":
                    "{{ $labels.instance }} of job "
                    "{{ $labels.job }} has used too much CPU.",
                },
            }]
        })
        self.assertEqual(output["groups"][0]["expr"],
                         "process_cpu_seconds_total > 0.12")

    @unittest.mock.patch("platform.processor", lambda: "invalid")
    def test_uses_original_expression_when_resource_missing(self):
        transform = self.harness.charm.transformer
        output = transform.apply_label_matchers({
            "groups": [{
                "alert": "CPUOverUse",
                "expr": "process_cpu_seconds_total > 0.12",
                "for": "0m",
                "labels": {
                    "severity": "Low",
                    "juju_model": "None",
                    "juju_model_uuid": "f2c1b2a6-e006-11eb-ba80-0242ac130004",
                    "juju_application": "consumer-tester",
                },
                "annotations": {
                    "summary":
                    "Instance {{ $labels.instance }} CPU over use",
                    "description":
                    "{{ $labels.instance }} of job "
                    "{{ $labels.job }} has used too much CPU.",
                },
            }]
        })
        self.assertEqual(output["groups"][0]["expr"],
                         "process_cpu_seconds_total > 0.12")

    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    def test_fetches_the_correct_expression(self):
        self.harness.add_resource(
            "promql-transform-amd64",
            open("./promql-transform", "rb").read(),
        )
        transform = self.harness.charm.transformer

        output = transform._apply_label_matcher(
            "up", {"juju_model": "some_juju_model"})
        assert output == 'up{juju_model="some_juju_model"}'

    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    def test_handles_comparisons(self):
        self.harness.add_resource(
            "promql-transform-amd64",
            open("./promql-transform", "rb").read(),
        )
        transform = self.harness.charm.transformer
        output = transform._apply_label_matcher(
            "up > 1", {"juju_model": "some_juju_model"})
        assert output == 'up{juju_model="some_juju_model"} > 1'

    @unittest.mock.patch("platform.processor", lambda: "x86_64")
    def test_handles_multiple_labels(self):
        self.harness.add_resource(
            "promql-transform-amd64",
            open("./promql-transform", "rb").read(),
        )
        transform = self.harness.charm.transformer
        output = transform._apply_label_matcher(
            "up > 1",
            {
                "juju_model": "some_juju_model",
                "juju_model_uuid": "123ABC",
                "juju_application": "some_application",
                "juju_unit": "some_application/1",
            },
        )
        assert (
            output ==
            'up{juju_application="some_application",juju_model="some_juju_model"'
            ',juju_model_uuid="123ABC",juju_unit="some_application/1"} > 1')
Exemplo n.º 6
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)
        self.assertEqual(self.harness.charm.stored.alertmanagers, {})
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'smtp')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'smtp/0')
        self.harness.update_relation_data(
            rel_id, 'smtp/0',
            {'alerting_config': yaml.dump(SMTP_ALERTING_CONFIG)})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SMTP_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alertmanager_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), str())

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)
        self.assertEqual(int(data['port']), MINIMAL_CONFIG['advertised-port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_web_admin_api_can_be_enabled(self):
        self.harness.set_leader(True)

        # without web admin enabled
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'), None)

        # with web admin enabled
        admin_api_config = MINIMAL_CONFIG.copy()
        admin_api_config['web-enable-admin-api'] = True
        self.harness.update_config(admin_api_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--web.enable-admin-api'),
                         '--web.enable-admin-api')

    def test_web_page_title_can_be_set(self):
        self.harness.set_leader(True)
        web_config = MINIMAL_CONFIG.copy()
        web_config['web-page-title'] = 'Prometheus Test Page'
        self.harness.update_config(web_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            cli_arg(pod_spec, '--web.page-title')[1:-1],  # striping quotes
            web_config['web-page-title'])

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_max_web_connections_can_be_set(self):
        self.harness.set_leader(True)
        maxcon_config = MINIMAL_CONFIG.copy()
        maxcon_config['web-max-connections'] = 512
        self.harness.update_config(maxcon_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(int(cli_arg(pod_spec, '--web.max-connections')),
                         maxcon_config['web-max-connections'])

    def test_alertmanager_queue_capacity_can_be_set(self):
        self.harness.set_leader(True)
        queue_config = MINIMAL_CONFIG.copy()
        queue_config['alertmanager-notification-queue-capacity'] = 512
        self.harness.update_config(queue_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(
            int(cli_arg(pod_spec,
                        '--alertmanager.notification-queue-capacity')),
            queue_config['alertmanager-notification-queue-capacity'])

    def test_alertmanager_timeout_can_be_set(self):
        self.harness.set_leader(True)
        timeout_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            timeout_config['alertmanager-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(timeout_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(cli_arg(pod_spec, '--alertmanager.timeout'),
                             timeout_config['alertmanager-timeout'])

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')

    def test_k8s_scrape_config_can_be_set(self):
        self.harness.set_leader(True)
        k8s_config = MINIMAL_CONFIG.copy()
        k8s_config['monitor-k8s'] = True
        self.harness.update_config(k8s_config)
        pod_spec = self.harness.get_pod_spec()
        k8s_api_scrape_config = scrape_config(pod_spec,
                                              'kubernetes-apiservers')
        self.assertIsNotNone(k8s_api_scrape_config,
                             'No k8s API server scrape config found')
        k8s_node_scrape_config = scrape_config(pod_spec, 'kubernetes-nodes')
        self.assertIsNotNone(k8s_node_scrape_config,
                             'No k8s nodes scrape config found')
        k8s_ca_scrape_config = scrape_config(pod_spec, 'kubernetes-cadvisor')
        self.assertIsNotNone(k8s_ca_scrape_config,
                             'No k8s cAdvisor scrape config found')
        k8s_ep_scrape_config = scrape_config(pod_spec,
                                             'kubernetes-service-endpoints')
        self.assertIsNotNone(k8s_ep_scrape_config,
                             'No k8s service endpoints scrape config found')
        k8s_svc_scrape_config = scrape_config(pod_spec, 'kubernetes-services')
        self.assertIsNotNone(k8s_svc_scrape_config,
                             'No k8s services scrape config found')
        k8s_in_scrape_config = scrape_config(pod_spec, 'kubernetes-ingresses')
        self.assertIsNotNone(k8s_in_scrape_config,
                             'No k8s ingress scrape config found')
        k8s_pod_scrape_config = scrape_config(pod_spec, 'kubernetes-pods')
        self.assertIsNotNone(k8s_pod_scrape_config,
                             'No k8s pods scrape config found')
Exemplo n.º 7
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(HssCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with relation."""
        self.harness.charm.on.start.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "hss",
                "image":
                "localhost:32000/ims_hss:1.0",
                "imagePullPolicy":
                "Always",
                "ports": [
                    {
                        "name": "diahss",
                        "containerPort": 3868,
                        "protocol": "TCP"
                    },
                    {
                        "name": "hss",
                        "containerPort": 8080,
                        "protocol": "TCP"
                    },
                ],
                "envConfig": {
                    "MYSQL_HOST": "mysql-endpoints",
                    "MYSQL_USER": "******",
                    "MYSQL_ROOT_PASSWORD": "******",
                },
                "command": ["./init_hss.sh", "&"],
            }],
        }
        # Check if mysql is initialized
        self.assertIsNone(self.harness.charm.state.mysql)

        # Initializing mysql relation
        mysql_relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(mysql_relation_id, "mysql/0")
        self.harness.update_relation_data(
            mysql_relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mysql, "mysql")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mysql_app_relation_changed(self) -> NoReturn:
        """Test to see if mysql app relation is updated."""
        self.harness.charm.on.start.emit()

        self.assertIsNone(self.harness.charm.state.mysql)

        relation_id = self.harness.add_relation("mysql", "mysql")
        self.harness.add_relation_unit(relation_id, "mysql/0")
        self.harness.update_relation_data(
            relation_id,
            "mysql",
            {
                "hostname": "mysql",
                "mysql_user": "******",
                "mysql_pwd": "root"
            },
        )

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_hss_info(self) -> NoReturn:
        """Test to see if hss relation is updated."""
        expected_result = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.charm.on.start.emit()
        relation_id = self.harness.add_relation("dns-source", "dns_source")
        relation_data = {"private-address": "127.1.1.1", "hostname": "hss"}
        self.harness.update_relation_data(relation_id, "dns_source",
                                          relation_data)
        relation_data = self.harness.get_relation_data(relation_id,
                                                       "dns_source")
        self.assertDictEqual(expected_result, relation_data)
class TestEndpointProvider(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(EndpointProviderCharm, meta=PROVIDER_META)
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def test_provider_default_scrape_relations_not_in_meta(self):
        """Tests that the Provider raises exception when no promethes_scrape in meta."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                provides:
                    non-standard-name:
                        interface: prometheus_scrape
                """,
        )
        self.assertRaises(RelationNotFoundError, harness.begin)

    def test_provider_default_scrape_relation_wrong_interface(self):
        """Tests that Provider raises exception if the default relation has the wrong interface."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                provides:
                    metrics-endpoint:
                        interface: not_prometheus_scrape
                """,
        )
        self.assertRaises(RelationInterfaceMismatchError, harness.begin)

    def test_provider_default_scrape_relation_wrong_role(self):
        """Tests that Provider raises exception if the default relation has the wrong role."""
        harness = Harness(
            EndpointProviderCharm,
            # No provider relation with `prometheus_scrape` as interface
            meta="""
                name: provider-tester
                containers:
                    prometheus:
                        resource: prometheus-image
                prometheus-tester: {}
                requires:
                    metrics-endpoint:
                        interface: prometheus_scrape
                """,
        )
        self.assertRaises(RelationRoleMismatchError, harness.begin)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_sets_scrape_metadata(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_metadata", data)
        scrape_metadata = data["scrape_metadata"]
        self.assertIn("model", scrape_metadata)
        self.assertIn("model_uuid", scrape_metadata)
        self.assertIn("application", scrape_metadata)

    @patch_network_get(private_address="192.0.8.2")
    def test_provider_unit_sets_bind_address_on_pebble_ready(self, *unused):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.container_pebble_ready("prometheus-tester")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.charm.unit.name)
        self.assertIn("prometheus_scrape_unit_address", data)
        self.assertEqual(data["prometheus_scrape_unit_address"], "192.0.8.2")

    @patch_network_get(private_address="192.0.8.2")
    def test_provider_unit_sets_bind_address_on_relation_joined(self, *unused):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.charm.unit.name)
        self.assertIn("prometheus_scrape_unit_address", data)
        self.assertEqual(data["prometheus_scrape_unit_address"], "192.0.8.2")
        self.assertIn("prometheus_scrape_unit_name", data)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_supports_multiple_jobs(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_jobs", data)
        jobs = json.loads(data["scrape_jobs"])
        self.assertEqual(len(jobs), len(JOBS))
        names = [job["job_name"] for job in jobs]
        job_names = [job["job_name"] for job in JOBS]
        self.assertListEqual(names, job_names)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_provider_sanitizes_jobs(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("scrape_jobs", data)
        jobs = json.loads(data["scrape_jobs"])
        for job in jobs:
            keys = set(job.keys())
            self.assertTrue(keys.issubset(ALLOWED_KEYS))

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_each_alert_rule_is_topology_labeled(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("alert_rules", data)
        alerts = json.loads(data["alert_rules"])
        self.assertIn("groups", alerts)
        self.assertEqual(len(alerts["groups"]), 5)
        group = alerts["groups"][0]
        for rule in group["rules"]:
            self.assertIn("labels", rule)
            labels = rule["labels"]
            self.assertIn("juju_model", labels)
            self.assertIn("juju_application", labels)
            self.assertIn("juju_model_uuid", labels)
            self.assertIn("juju_charm", labels)

    @patch("ops.testing._TestingModelBackend.network_get")
    def test_each_alert_expression_is_topology_labeled(self, _):
        rel_id = self.harness.add_relation(RELATION_NAME, "provider")
        self.harness.add_relation_unit(rel_id, "provider/0")
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.app.name)
        self.assertIn("alert_rules", data)
        alerts = json.loads(data["alert_rules"])
        self.assertIn("groups", alerts)
        self.assertEqual(len(alerts["groups"]), 5)
        group = alerts["groups"][0]
        for rule in group["rules"]:
            self.assertIn("expr", rule)
            for labels in expression_labels(rule["expr"]):
                self.assertIn("juju_model", labels)
                self.assertIn("juju_model_uuid", labels)
                self.assertIn("juju_application", labels)
                self.assertIn("juju_charm", labels)
Exemplo n.º 9
0
class TestManilaNetappCharm(TestCase):

    REQUIRED_CHARM_CONFIG_BY_DEFAULT = {
        'management-address': '10.0.0.1',
        'admin-password': '******',
        'vserver-name': 'svm0',
    }

    def setUp(self):
        self.harness = Harness(charm.ManilaNetappCharm)
        self.addCleanup(self.harness.cleanup)

    def test_custom_status_check_default_config(self):
        self.harness.disable_hooks()
        self.harness.begin()

        self.assertFalse(self.harness.charm.custom_status_check())
        expected_status = BlockedStatus('Missing configs: {}'.format(
            list(self.REQUIRED_CHARM_CONFIG_BY_DEFAULT.keys())))
        self.assertEqual(self.harness.charm.unit.status, expected_status)

    def test_custom_status_check_valid_config(self):
        self.harness.update_config(self.REQUIRED_CHARM_CONFIG_BY_DEFAULT)
        self.harness.disable_hooks()
        self.harness.begin()

        self.assertTrue(self.harness.charm.custom_status_check())

    @mock.patch.object(charm.ops_openstack.core.OSBaseCharm, 'install_pkgs')
    @mock.patch.object(charm.interface_manila_plugin.ManilaPluginProvides,
                       'send_backend_config')
    @mock.patch('charmhelpers.contrib.openstack.templating.get_loader')
    @mock.patch('charmhelpers.core.templating.render')
    def test_send_config_dhss_disabled(self, _render, _get_loader,
                                       _send_backend_config, _install_pkgs):
        _render.return_value = 'test-rendered-manila-backend-config'
        _get_loader.return_value = 'test-loader'
        self.harness.update_config(self.REQUIRED_CHARM_CONFIG_BY_DEFAULT)
        rel_id = self.harness.add_relation('manila-plugin', 'manila')
        self.harness.add_relation_unit(rel_id, 'manila/0')
        self.harness.begin_with_initial_hooks()

        self.assertTrue(self.harness.charm.state.is_started)
        _render.assert_called_once_with(source='manila.conf',
                                        template_loader='test-loader',
                                        target=None,
                                        context=self.harness.charm.adapters)
        _get_loader.assert_called_once_with('templates/', 'default')
        _send_backend_config.assert_called_once_with(
            'netapp-ontap', 'test-rendered-manila-backend-config')
        _install_pkgs.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status,
                         ActiveStatus('Unit is ready'))

    @mock.patch.object(charm.ops_openstack.core.OSBaseCharm, 'install_pkgs')
    @mock.patch.object(charm.interface_manila_plugin.ManilaPluginProvides,
                       'send_backend_config')
    @mock.patch('charmhelpers.contrib.openstack.templating.get_loader')
    @mock.patch('charmhelpers.core.templating.render')
    def test_send_config_dhss_enabled(self, _render, _get_loader,
                                      _send_backend_config, _install_pkgs):
        _render.return_value = 'test-rendered-manila-backend-config'
        _get_loader.return_value = 'test-loader'
        config = copy.deepcopy(self.REQUIRED_CHARM_CONFIG_BY_DEFAULT)
        config['driver-handles-share-servers'] = True
        config['root-volume-aggregate-name'] = 'test_cluster_01_VM_DISK_1'
        self.harness.update_config(config)
        self.harness.begin_with_initial_hooks()

        # Validate workflow with incomplete relation data
        self.assertFalse(self.harness.charm.state.is_started)
        _render.assert_not_called()
        _get_loader.assert_not_called()
        _send_backend_config.assert_not_called()
        _install_pkgs.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status, UnknownStatus())

        # Validate workflow with complete relation data
        rel_id = self.harness.add_relation('manila-plugin', 'manila')
        self.harness.add_relation_unit(rel_id, 'manila/0')
        self.harness.update_relation_data(rel_id, 'manila/0', {
            '_authentication_data':
            json.dumps({'data': 'test-manila-auth-data'})
        })
        self.assertTrue(self.harness.charm.state.is_started)
        _render.assert_called_once_with(source='manila.conf',
                                        template_loader='test-loader',
                                        target=None,
                                        context=self.harness.charm.adapters)
        _get_loader.assert_called_once_with('templates/', 'default')
        _send_backend_config.assert_called_once_with(
            'netapp-ontap', 'test-rendered-manila-backend-config')
        self.assertEqual(self.harness.charm.unit.status,
                         ActiveStatus('Unit is ready'))
Exemplo n.º 10
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""
    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(Upf1Charm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        annot = {
            "annotations": {
                "k8s.v1.cni.cncf.io/networks":
                '[\n{\n"name" : "n6-network",'
                '\n"interface": "eth1",\n"ips": []\n}\n]'
            },
            "securityContext": {
                "runAsUser": 0000,
                "runAsGroup": 0000
            },
        }
        service = [{
            "name": "upf-e",
            "labels": {
                "juju-app": "upf1"
            },
            "spec": {
                "selector": {
                    "juju-app": "upf1"
                },
                "ports": [{
                    "protocol": "TCP",
                    "port": 80,
                    "targetPort": 80
                }],
                "type": "ClusterIP",
            },
        }]

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP",
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "services": service,
                "pod": annot,
            },
        }
        # Check if natapp is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)
        self.assertIsNone(self.harness.charm.state.natapp_host)

        # Initializing the natapp relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )
        # Checking if natapp data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_config_change(self) -> NoReturn:
        """Test installation without any relation."""

        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "upf1",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "upf1",
                    "containerPort": 2152,
                    "protocol": "UDP"
                }],
                "envConfig": {
                    "UE_RANGE": "60.60.0.0/24",
                    "STATIC_IP": "192.168.70.15",
                },
                "command": ["./upf_start.sh", "&"],
                "kubernetes": {
                    "securityContext": {
                        "privileged": True
                    }
                },
            }],
            "kubernetesResources": {
                "pod": {
                    "annotations": {
                        "k8s.v1.cni.cncf.io/networks":
                        '[\n{\n"name" : "n6-network",'
                        '\n"interface": "eth1",\n"ips": []\n}\n]'
                    },
                    "securityContext": {
                        "runAsUser": 0,
                        "runAsGroup": 0
                    },
                },
                "services": [{
                    "name": "upf-e",
                    "labels": {
                        "juju-app": "upf1"
                    },
                    "spec": {
                        "selector": {
                            "juju-app": "upf1"
                        },
                        "ports": [{
                            "protocol": "TCP",
                            "port": 80,
                            "targetPort": 80
                        }],
                        "type":
                        "ClusterIP",
                    },
                }],
            },
        }

        # Check if nrf,upf is initialized
        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the nrf relation
        natapp_relation_id = self.harness.add_relation("natapp", "natapp")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        self.harness.update_relation_data(
            natapp_relation_id,
            "natapp",
            {
                "hostname": "natapp",
                "static_ip": "192.168.70.15"
            },
        )

        # Checking if nrf,upf data is stored
        self.assertEqual(self.harness.charm.state.natapp_ip, "192.168.70.15")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_natapp_app_relation_changed(self) -> NoReturn:
        """Test to see if upf app relation is updated."""
        self.harness.charm.on.config_changed.emit()

        self.assertIsNone(self.harness.charm.state.natapp_ip)

        # Initializing the upf relation
        natapp_relation_id = self.harness.add_relation("natapp", "upf")
        self.harness.add_relation_unit(natapp_relation_id, "natapp/0")
        relation_data = {"static_ip": "192.168.70.15"}
        self.harness.update_relation_data(natapp_relation_id, "natapp/0",
                                          relation_data)

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_publish_upf_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "private_address": "127.1.1.1",
        }
        relation_id = self.harness.add_relation("upf", "smf")
        relation_data = {"private_address": "127.1.1.1"}
        self.harness.update_relation_data(relation_id, "upf1", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "upf1")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 11
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MongoDBCharm)
        self.addCleanup(self.harness.cleanup)
        mongo_resource = {
            "registrypath": "mongodb:4.4.1",
            "username": "******",
            "password": "******"
        }
        self.harness.add_oci_resource("mongodb-image", mongo_resource)
        self.harness.begin()
        self.peer_rel_id = self.harness.add_relation('mongodb', 'mongodb')

    @patch('ops.testing._TestingPebbleClient.pull')
    def test_replica_set_name_can_be_changed(self, _):
        self.harness.set_leader(True)
        self.harness.container_pebble_ready("mongodb")

        # check default replica set name
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "rs0")

        # check replica set name can be changed
        self.harness.update_config({"replica_set_name": "new_name"})
        plan = self.harness.get_container_pebble_plan("mongodb")
        self.assertEqual(replica_set_name(plan), "new_name")

    @patch("mongoserver.MongoDB.reconfigure_replica_set")
    def test_replica_set_is_reconfigured_when_peer_joins(self, mock_reconf):
        self.harness.set_leader(True)
        self.harness.add_relation_unit(self.peer_rel_id, 'mongodb/1')
        self.harness.update_relation_data(self.peer_rel_id, 'mongodb/1',
                                          {'private-address': '10.0.0.1'})
        peers = [
            'mongodb-k8s-0.mongodb-k8s-endpoints',
            'mongodb-k8s-1.mongodb-k8s-endpoints'
        ]
        mock_reconf.assert_called_once_with(peers)

    def test_replica_set_uri_data_is_generated_correctly(self):
        self.harness.set_leader(True)
        replica_set_uri = self.harness.charm.mongo.replica_set_uri()
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        cred = "root:{}".format(data['root_password'])
        self.assertEqual(
            replica_set_uri,
            'mongodb://{}@mongodb-k8s-0.mongodb-k8s-endpoints:27017/admin'.
            format(cred))

    def test_leader_sets_key_and_root_credentials(self):
        self.harness.set_leader(False)
        self.harness.set_leader(True)
        data = self.harness.get_relation_data(self.peer_rel_id,
                                              self.harness.model.app.name)
        self.assertIsNotNone(data['root_password'])
        self.assertIsNotNone(data['security_key'])

    @patch('mongoserver.MongoDB.version')
    def test_charm_provides_version(self, mock_version):
        self.harness.set_leader(True)
        mock_version.return_value = "4.4.1"
        version = self.harness.charm.mongo.version()
        self.assertEqual(version, "4.4.1")

    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deferred_if_monog_is_not_ready(self, is_ready):
        is_ready.return_value = False
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            for message in sorted(logger.output):
                if "DEBUG:ops.framework:Deferring" in message:
                    self.assertIn("StartEvent", message)

    @patch('mongoserver.MongoDB.initialize_replica_set')
    @patch('mongoserver.MongoDB.is_ready')
    def test_start_is_deffered_if_monog_is_not_initialized(
            self, is_ready, initialize):
        is_ready.return_value = True
        initialize.side_effect = RuntimeError("Not Initialized")
        self.harness.set_leader(True)
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.charm.on.start.emit()
            is_ready.assert_called()
            self.assertIn(
                "INFO:charm:Deferring on_start since : error=Not Initialized",
                sorted(logger.output))
Exemplo n.º 12
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(RedisCharm)
        self.addCleanup(self.harness.cleanup)
        redis_resource = {
            "registrypath": "redis:6.0",
            # "username" and "password" are useless, but oci-resource
            # library fetch() fails if we do not provide them ...
            "username": "",
            "password": ""
        }
        self.harness.add_oci_resource("redis-image", redis_resource)
        self.harness.begin()

    def test_on_start_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.start.emit()
        # Then
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_start_when_redis_is_not_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = False
        # When
        self.harness.charm.on.start.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status,
                         WaitingStatus("Waiting for Redis ..."))

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_start_when_redis_is_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.start.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())

    def test_on_stop(self):
        # When
        self.harness.charm.on.stop.emit()
        # Then
        self.assertEqual(self.harness.charm.unit.status,
                         MaintenanceStatus('Pod is terminating.'))

    def test_on_config_changed_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_config_changed_when_unit_is_leader_and_redis_is_ready(
            self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        self.assertIsNotNone(self.harness.charm.state.pod_spec)
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())

    @mock.patch.object(OCIImageResource, 'fetch')
    def test_on_config_changed_when_unit_is_leader_but_image_fetch_breaks(
            self, fetch):
        # Given
        self.harness.set_leader(True)
        fetch.side_effect = OCIImageResourceError("redis-image")
        # When
        self.harness.charm.on.config_changed.emit()
        # Then
        fetch.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status,
                         BlockedStatus("Error fetching image information."))

    def test_on_update_status_when_unit_is_not_leader(self):
        # Given
        self.harness.set_leader(False)
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_update_status_when_redis_is_not_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = False
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status,
                         WaitingStatus("Waiting for Redis ..."))

    @mock.patch.object(RedisClient, 'is_ready')
    def test_on_update_status_when_redis_is_ready(self, is_ready):
        # Given
        self.harness.set_leader(True)
        is_ready.return_value = True
        # When
        self.harness.charm.on.update_status.emit()
        # Then
        is_ready.assert_called_once_with()
        self.assertEqual(self.harness.charm.unit.status, ActiveStatus())
Exemplo n.º 13
0
class TestDashboardConsumer(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(ConsumerCharm, meta=CONSUMER_META)
        self.harness._backend.model_name = "testing"
        self.harness._backend.model_uuid = "abcdefgh-1234"
        self.addCleanup(self.harness.cleanup)
        self.harness.set_leader(True)
        self.harness.begin()

    def test_consumer_does_not_set_dashboard_without_monitoring(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

    def test_consumer_sets_dashboard_data(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)

    def test_consumer_can_remove_dashboard(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)
        self.harness.charm.consumer.remove_dashboard()
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": True,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }

    def test_consumer_resends_dashboard_after_monitoring_established(self):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": False,
            "invalidated_reason": "",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)

    def test_consumer_invalidates_dashboard_after_monitoring_established_then_broken(
        self, ):
        rel_id = self.harness.add_relation("grafana-dashboard", "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.charm.consumer.add_dashboard(DASHBOARD_TMPL)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)

        mon_rel_id = self.harness.add_relation("monitoring", "consumer")
        self.harness.add_relation_unit(mon_rel_id, "monitoring/0")
        self.harness.remove_relation(mon_rel_id)
        data = json.loads(
            self.harness.get_relation_data(
                rel_id, self.harness.model.app.name)["dashboards"])
        return_data = {
            "monitoring_identifier": "testing_abcdefgh-1234_monitoring",
            "monitoring_target": "Consumer-tester [ testing / abcdefgh-1234 ]",
            "monitoring_query":
            "juju_model='testing',juju_model_uuid='abcdefgh-1234',juju_application='consumer-tester'",
            "template": "\n\n",
            "removed": False,
            "invalidated": True,
            "invalidated_reason":
            "Waiting for a monitoring relation to send dashboard data",
            "uuid": "12345678",
        }
        self.assertEqual(return_data, data)
        self.assertEqual(self.harness.charm._stored.invalid_events, 1)
Exemplo n.º 14
0
class TestEndpointConsumer(unittest.TestCase):
    def setUp(self):
        metadata_file = open("metadata.yaml")
        self.harness = Harness(EndpointConsumerCharm, meta=metadata_file)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def setup_charm_relations(self, multi=False):
        """Create relations used by test cases.

        Args:
            multi: a boolean indicating if multiple relations must be
            created.
        """
        rel_ids = []
        self.assertEqual(self.harness.charm._stored.num_events, 0)
        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        rel_ids.append(rel_id)
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "scrape_metadata": json.dumps(SCRAPE_METADATA),
                "scrape_jobs": json.dumps(SCRAPE_JOBS),
            },
        )
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(
            rel_id, "consumer/0", {"prometheus_scrape_host": "1.1.1.1"})
        self.assertEqual(self.harness.charm._stored.num_events, 2)

        if multi:
            rel_id = self.harness.add_relation(RELATION_NAME, "other-consumer")
            rel_ids.append(rel_id)
            self.harness.update_relation_data(
                rel_id,
                "other-consumer",
                {
                    "scrape_metadata": json.dumps(OTHER_SCRAPE_METADATA),
                    "scrape_jobs": json.dumps(OTHER_SCRAPE_JOBS),
                },
            )
            self.harness.add_relation_unit(rel_id, "other-consumer/0")
            self.harness.update_relation_data(
                rel_id, "other-consumer/0",
                {"prometheus_scrape_host": "2.2.2.2"})
            self.assertEqual(self.harness.charm._stored.num_events, 4)

        return rel_ids

    def validate_jobs(self, jobs):
        """Valdiate that a list of jobs has the expected fields.

        Existence for unit labels is not checked since these do not
        exist for all jobs.

        Args:
            jobs: list of jobs where each job is a dictionary.

        Raises:
            assertion failures if any job is not as expected.
        """
        for job in jobs:
            self.assertIn("job_name", job)
            self.assertIn("static_configs", job)
            static_configs = job["static_configs"]
            for static_config in static_configs:
                self.assertIn("targets", static_config)
                self.assertIn("labels", static_config)
                labels = static_config["labels"]
                self.assertIn("juju_model", labels)
                self.assertIn("juju_model_uuid", labels)
                self.assertIn("juju_application", labels)

            relabel_configs = job["relabel_configs"]
            self.assertEqual(len(relabel_configs), 1)

            relabel_config = relabel_configs[0]
            self.assertEqual(
                relabel_config.get("source_labels"),
                [
                    "juju_model", "juju_model_uuid", "juju_application",
                    "juju_unit"
                ],
            )

    def test_consumer_notifies_on_new_scrape_relation(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id, "consumer",
            {"scrape_metadata": json.dumps(SCRAPE_METADATA)})
        self.assertEqual(self.harness.charm._stored.num_events, 1)

    def test_consumer_notifies_on_new_scrape_target(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)
        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(
            rel_id, "consumer/0", {"prometheus_scrape_host": "1.1.1.1"})
        self.assertEqual(self.harness.charm._stored.num_events, 1)

    def test_consumer_returns_all_static_scrape_labeled_jobs(self):
        self.setup_charm_relations()

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), len(SCRAPE_JOBS))
        self.validate_jobs(jobs)

    def test_consumer_does_not_unit_label_fully_qualified_targets(self):
        self.setup_charm_relations()

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), len(SCRAPE_JOBS))
        for job in jobs:
            for static_config in job["static_configs"]:
                if FULL_TARGET in static_config.get("targets"):
                    self.assertNotIn("juju_unit", static_config.get("labels"))

    def test_consumer_does_attach_unit_labels_to_wildcard_hosts(self):
        self.setup_charm_relations()

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), len(SCRAPE_JOBS))
        for job in jobs:
            for static_config in job["static_configs"]:
                if FULL_TARGET not in static_config.get("targets"):
                    self.assertIn("juju_unit", static_config.get("labels"))

    def test_consumer_allows_custom_metrics_paths(self):
        rel_ids = self.setup_charm_relations()
        self.assertEqual(len(rel_ids), 1)
        rel_id = rel_ids[0]

        jobs = self.harness.charm.prometheus_consumer.jobs()
        for job in jobs:
            if job.get("metrics_path"):
                name_suffix = job_name_suffix(job["job_name"],
                                              juju_job_labels(job), rel_id)
                path = named_job_attribute(name_suffix, "metrics_path",
                                           "/metrics")
                self.assertEqual(job["metrics_path"], path)

    def test_consumer_sanitizes_jobs(self):
        self.setup_charm_relations()

        jobs = self.harness.charm.prometheus_consumer.jobs()
        for job in jobs:
            job_keys = set(job.keys())
            self.assertTrue(job_keys.issubset(ALLOWED_KEYS))

    def test_consumer_returns_jobs_for_all_relations(self):
        self.setup_charm_relations(multi=True)

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), len(SCRAPE_JOBS) + len(OTHER_SCRAPE_JOBS))

    def test_consumer_scrapes_each_port_for_wildcard_hosts(self):
        rel_ids = self.setup_charm_relations()
        self.assertEqual(len(rel_ids), 1)
        rel_id = rel_ids[0]

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), len(SCRAPE_JOBS))
        ports = wildcard_target_ports(SCRAPE_JOBS)
        targets = wildcard_targets(jobs, ports)
        consumers = self.harness.charm.model.get_relation(
            RELATION_NAME, rel_id)
        self.assertEqual(len(targets), len(ports) * len(consumers.units))

    def test_consumer_handles_default_scrape_job(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "scrape_metadata": json.dumps(SCRAPE_METADATA),
                "scrape_jobs": json.dumps(DEFAULT_JOBS),
            },
        )
        self.assertEqual(self.harness.charm._stored.num_events, 1)
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(
            rel_id, "consumer/0", {"prometheus_scrape_host": "1.1.1.1"})
        self.assertEqual(self.harness.charm._stored.num_events, 2)

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.validate_jobs(jobs)

    def test_consumer_overwrites_juju_topology_labels(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)
        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "scrape_metadata": json.dumps(SCRAPE_METADATA),
                "scrape_jobs": json.dumps(BAD_JOBS),
            },
        )
        self.assertEqual(self.harness.charm._stored.num_events, 1)
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.harness.update_relation_data(
            rel_id, "consumer/0", {"prometheus_scrape_host": "1.1.1.1"})
        self.assertEqual(self.harness.charm._stored.num_events, 2)

        jobs = self.harness.charm.prometheus_consumer.jobs()
        self.assertEqual(len(jobs), 1)
        self.validate_jobs(jobs)
        bad_labels = juju_job_labels(BAD_JOBS[0])
        labels = juju_job_labels(jobs[0])
        for label_name, label_value in labels.items():
            self.assertNotEqual(label_value, bad_labels[label_name])

    def test_consumer_returns_alerts_indexed_by_group_name(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "scrape_metadata": json.dumps(SCRAPE_METADATA),
                "alert_rules": json.dumps(ALERT_RULES),
            },
        )
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.assertEqual(self.harness.charm._stored.num_events, 1)

        alerts = self.harness.charm.prometheus_consumer.alerts()
        self.assertEqual(len(alerts), 1)
        for name, alert_group in alerts.items():
            group = next(
                (group
                 for group in ALERT_RULES["groups"] if group["name"] == name),
                None)
            self.assertDictEqual(alert_group, group)

    def test_consumer_logs_an_error_on_missing_alerting_data(self):
        self.assertEqual(self.harness.charm._stored.num_events, 0)

        bad_metadata = {"bad": "metadata"}
        bad_rules = {"bad": "rule"}

        rel_id = self.harness.add_relation(RELATION_NAME, "consumer")
        self.harness.update_relation_data(
            rel_id,
            "consumer",
            {
                "scrape_metadata": json.dumps(bad_metadata),
                "alert_rules": json.dumps(bad_rules),
            },
        )
        self.harness.add_relation_unit(rel_id, "consumer/0")
        self.assertEqual(self.harness.charm._stored.num_events, 1)
        with self.assertLogs(level="ERROR") as logger:
            _ = self.harness.charm.prometheus_consumer.alerts()
            messages = sorted(logger.output)
            self.assertEqual(len(messages), 1)
            self.assertIn(f"Relation {rel_id} has invalid data", messages[0])
Exemplo n.º 15
0
    def test_on_install(self):
        harness = Harness(OPAAuditCharm)
        self.addCleanup(harness.cleanup)
        harness.begin()

        assert harness.charm._on_install({}) is None
Exemplo n.º 16
0
class TestDatase(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MySQLOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.peer_relation_id = self.harness.add_relation(
            "database-peers", "database-peers")
        self.harness.add_relation_unit(self.peer_relation_id, "mysql/1")
        self.database_relation_id = self.harness.add_relation(
            DB_RELATION_NAME, "app")
        self.harness.add_relation_unit(self.database_relation_id, "app/0")
        self.charm = self.harness.charm

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.get_mysql_version",
           return_value="8.0.29-0ubuntu0.20.04.3")
    @patch(
        "mysqlsh_helpers.MySQL.get_cluster_members_addresses",
        return_value={"2.2.2.1:3306", "2.2.2.3:3306", "2.2.2.2:3306"},
    )
    @patch("mysqlsh_helpers.MySQL.get_cluster_primary_address",
           return_value="2.2.2.2:3306")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    @patch("relations.database.generate_random_password",
           return_value="super_secure_password")
    def test_database_requested(
        self,
        _generate_random_password,
        _create_application_database_and_scoped_user,
        _get_cluster_primary_address,
        _get_cluster_members_addresses,
        _get_mysql_version,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        database_relation_databag = self.harness.get_relation_data(
            self.database_relation_id, self.harness.charm.app)
        database_relation = self.charm.model.get_relation(DB_RELATION_NAME)
        app_unit = list(database_relation.units)[0]

        # simulate cluster initialized by editing the flag
        self.harness.update_relation_data(self.peer_relation_id,
                                          self.charm.app.name,
                                          {"units-added-to-cluster": "1"})

        self.assertEqual(database_relation_databag, {})
        self.assertEqual(database_relation.data.get(app_unit), {})
        self.assertEqual(database_relation.data.get(self.charm.unit), {})

        # update the app leader unit data to trigger database_requested event
        self.harness.update_relation_data(self.database_relation_id, "app",
                                          {"database": "test_db"})

        self.assertEqual(
            database_relation_databag,
            {
                "data": '{"database": "test_db"}',
                "password": "******",
                "username": f"relation-{self.database_relation_id}",
                "endpoints": "2.2.2.2:3306",
                "version": "8.0.29-0ubuntu0.20.04.3",
                "read-only-endpoints": "2.2.2.1:3306,2.2.2.3:3306",
            },
        )

        _generate_random_password.assert_called_once()
        _create_application_database_and_scoped_user.assert_called_once()
        _get_cluster_primary_address.assert_called_once()
        _get_cluster_members_addresses.assert_called_once()
        _get_mysql_version.assert_called_once()

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.delete_user_for_relation")
    def test_database_broken(self, _delete_user_for_relation):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        self.harness.remove_relation(self.database_relation_id)

        _delete_user_for_relation.assert_called_once_with(
            self.database_relation_id)

    @patch_network_get(private_address="1.1.1.1")
    @patch("mysqlsh_helpers.MySQL.delete_user_for_relation")
    def test_database_broken_failure(self, _delete_user_for_relation):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        _delete_user_for_relation.side_effect = MySQLDeleteUserForRelationError(
        )

        self.harness.remove_relation(self.database_relation_id)

        _delete_user_for_relation.assert_called_once()
Exemplo n.º 17
0
    def test_configure_pod(self):
        harness = Harness(OPAAuditCharm)
        self.addCleanup(harness.cleanup)
        harness.begin()

        assert harness.charm._configure_pod() is None
Exemplo n.º 18
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(GitlabRunnerCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
Exemplo n.º 19
0
class TestCephISCSIGatewayPeers(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(CharmBase,
                               meta='''
            name: ceph-iscsi
            peers:
              cluster:
                interface: ceph-iscsi-peer
        ''')

    @mock.patch.object(CephISCSIGatewayPeers,
                       'cluster_bind_address',
                       new_callable=PropertyMock)
    @mock.patch('socket.getfqdn')
    def test_on_changed(self, _getfqdn, _cluster_bind_address):
        our_fqdn = 'ceph-iscsi-0.example'
        _getfqdn.return_value = our_fqdn
        # TODO: Replace this with calls to the test harness once
        # https://github.com/canonical/operator/issues/222 is fixed.
        _cluster_bind_address.return_value = '192.0.2.1'

        class TestReceiver(framework.Object):
            def __init__(self, parent, key):
                super().__init__(parent, key)
                self.observed_events = []

            def on_ready_peers(self, event):
                self.observed_events.append(event)

        self.harness.begin()
        self.peers = CephISCSIGatewayPeers(self.harness.charm, 'cluster')

        receiver = TestReceiver(self.harness.framework, 'receiver')
        self.harness.framework.observe(self.peers.on.ready_peers, receiver)
        relation_id = self.harness.add_relation('cluster', 'ceph-iscsi')
        self.harness.add_relation_unit(
            relation_id, 'ceph-iscsi/1', {
                'ingress-address': '192.0.2.2',
                'gateway_ready': 'True',
                'gateway_fqdn': 'ceph-iscsi-1.example'
            })
        self.assertEqual(len(receiver.observed_events), 1)
        self.assertIsInstance(receiver.observed_events[0], ReadyPeersEvent)

    def test_set_admin_password(self):
        self.harness.set_leader()
        self.harness.begin()
        self.peers = CephISCSIGatewayPeers(self.harness.charm, 'cluster')
        self.harness.add_relation('cluster', 'ceph-iscsi')

        self.peers.set_admin_password('s3cr3t')
        rel_data = self.harness.charm.model.get_relation('cluster').data
        our_app = self.harness.charm.app
        self.assertEqual(rel_data[our_app]['admin_password'], 's3cr3t')

    @mock.patch('socket.getfqdn')
    def test_announce_ready(self, _getfqdn):
        our_fqdn = 'ceph-iscsi-0.example'
        _getfqdn.return_value = our_fqdn
        self.harness.begin()
        self.peers = CephISCSIGatewayPeers(self.harness.charm, 'cluster')
        self.harness.add_relation('cluster', 'ceph-iscsi')

        self.peers.announce_ready()
        rel_data = self.harness.charm.model.get_relation('cluster').data
        our_unit = self.harness.charm.unit
        self.assertEqual(rel_data[our_unit]['gateway_fqdn'], our_fqdn)
        self.assertEqual(rel_data[our_unit]['gateway_ready'], 'True')

    @mock.patch.object(CephISCSIGatewayPeers,
                       'cluster_bind_address',
                       new_callable=PropertyMock)
    @mock.patch('socket.getfqdn')
    def test_ready_peer_details(self, _getfqdn, _cluster_bind_address):
        _getfqdn.return_value = 'ceph-iscsi-0.example'
        # TODO: Replace this with calls to the test harness once
        # https://github.com/canonical/operator/issues/222 is fixed.
        _cluster_bind_address.return_value = '192.0.2.1'

        self.harness.begin()
        self.peers = CephISCSIGatewayPeers(self.harness.charm, 'cluster')
        relation_id = self.harness.add_relation('cluster', 'ceph-iscsi')

        self.harness.add_relation_unit(
            relation_id, 'ceph-iscsi/1', {
                'ingress-address': '192.0.2.2',
                'gateway_ready': 'True',
                'gateway_fqdn': 'ceph-iscsi-1.example'
            })
        self.harness.add_relation_unit(
            relation_id, 'ceph-iscsi/2', {
                'ingress-address': '192.0.2.3',
                'gateway_ready': 'True',
                'gateway_fqdn': 'ceph-iscsi-2.example',
            })
        self.harness.add_relation_unit(relation_id, 'ceph-iscsi/3',
                                       {'ingress-address': '192.0.2.4'})

        self.peers.ready_peer_details

    @mock.patch.object(interface_ceph_iscsi_peer.CephISCSIGatewayPeers,
                       'cluster_bind_address',
                       new_callable=PropertyMock)
    def test_ready_peer_addresses(self, _cluster_bind_address):
        # TODO: Replace this with calls to the test harness once
        # https://github.com/canonical/operator/issues/222 is fixed.
        _cluster_bind_address.return_value = '192.0.2.1'

        self.harness.begin()
        self.peers = CephISCSIGatewayPeers(self.harness.charm, 'cluster')
        relation_id = self.harness.add_relation('cluster', 'ceph-iscsi')

        self.harness.add_relation_unit(
            relation_id, 'ceph-iscsi/1', {
                'ingress-address': '192.0.2.2',
                'gateway_ready': 'True',
                'gateway_fqdn': 'ceph-iscsi-1.example'
            })
        self.harness.add_relation_unit(
            relation_id, 'ceph-iscsi/2', {
                'ingress-address': '192.0.2.3',
                'gateway_ready': 'True',
                'gateway_fqdn': 'ceph-iscsi-2.example',
            })
        self.assertEqual(['192.0.2.1', '192.0.2.2', '192.0.2.3'],
                         self.peers.peer_addresses)
Exemplo n.º 20
0
class TestDBRouter(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(MySQLOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.peer_relation_id = self.harness.add_relation(
            "database-peers", "database-peers")
        self.harness.add_relation_unit(self.peer_relation_id, "mysql/1")
        self.db_router_relation_id = self.harness.add_relation(
            "db-router", "app")
        self.harness.add_relation_unit(self.db_router_relation_id, "app/0")
        self.charm = self.harness.charm

    @patch_network_get(private_address="1.1.1.1")
    @patch("relations.db_router.generate_random_password",
           return_value="super_secure_password")
    @patch("mysqlsh_helpers.MySQL.get_cluster_primary_address",
           return_value="2.2.2.2")
    @patch("mysqlsh_helpers.MySQL.does_mysql_user_exist", return_value=False)
    @patch("mysqlsh_helpers.MySQL.configure_mysqlrouter_user")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    def test_db_router_relation_changed(
        self,
        _create_application_database_and_scoped_user,
        _configure_mysqlrouter_user,
        _does_mysql_user_exist,
        _get_cluster_primary_address,
        _generate_random_password,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        db_router_relation_databag = self.harness.get_relation_data(
            self.db_router_relation_id, self.harness.charm.app)
        db_router_relation = self.charm.model.get_relation("db-router")
        app_unit = list(db_router_relation.units)[0]

        self.assertEqual(db_router_relation_databag, {})
        self.assertEqual(db_router_relation.data.get(app_unit), {})
        self.assertEqual(db_router_relation.data.get(self.charm.unit), {})

        # update the app leader unit data to trigger db_router_relation_changed event
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertEqual(_generate_random_password.call_count, 2)
        self.assertEqual(_does_mysql_user_exist.call_count, 2)
        self.assertEqual(
            sorted(_does_mysql_user_exist.mock_calls),
            sorted([
                call("mysqlrouteruser", "1.1.1.3"),
                call("keystone_user", "1.1.1.2"),
            ]),
        )

        _configure_mysqlrouter_user.assert_called_once_with(
            "mysqlrouteruser", "super_secure_password", "1.1.1.3", "app/0")
        _create_application_database_and_scoped_user.assert_called_once_with(
            "keystone_database", "keystone_user", "super_secure_password",
            "1.1.1.2", "app/0")

        # confirm that credentials in the mysql leader unit databag is set correctly
        self.assertEqual(
            db_router_relation.data.get(app_unit),
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertEqual(
            db_router_relation.data.get(self.charm.unit),
            {
                "db_host": '"2.2.2.2"',
                "mysqlrouter_password": '******',
                "mysqlrouter_allowed_units": '"app/0"',
                "MRUP_password": '******',
                "MRUP_allowed_units": '"app/0"',
            },
        )

    @patch_network_get(private_address="1.1.1.1")
    @patch("relations.db_router.generate_random_password",
           return_value="super_secure_password")
    @patch("mysqlsh_helpers.MySQL.does_mysql_user_exist", return_value=False)
    @patch("mysqlsh_helpers.MySQL.configure_mysqlrouter_user")
    @patch("mysqlsh_helpers.MySQL.create_application_database_and_scoped_user")
    def test_db_router_relation_changed_exceptions(
        self,
        _create_application_database_and_scoped_user,
        _configure_mysqlrouter_user,
        _does_mysql_user_exist,
        _generate_random_password,
    ):
        # run start-up events to enable usage of the helper class
        self.harness.set_leader(True)
        self.charm.on.config_changed.emit()

        # confirm that the relation databag is empty
        db_router_relation_databag = self.harness.get_relation_data(
            self.db_router_relation_id, self.harness.charm.app)
        db_router_relation = self.charm.model.get_relation("db-router")
        app_unit = list(db_router_relation.units)[0]

        self.assertEqual(db_router_relation_databag, {})
        self.assertEqual(db_router_relation.data.get(app_unit), {})
        self.assertEqual(db_router_relation.data.get(self.charm.unit), {})

        # test an exception while configuring mysql users
        _does_mysql_user_exist.side_effect = MySQLCheckUserExistenceError
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _does_mysql_user_exist.reset_mock()

        # test an exception while creating the mysql router user
        _configure_mysqlrouter_user.side_effect = MySQLConfigureRouterUserError
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _configure_mysqlrouter_user.reset_mock()

        # test an exception while creating the application database and scoped user
        _create_application_database_and_scoped_user.side_effect = (
            MySQLCreateApplicationDatabaseAndScopedUserError)
        self.harness.update_relation_data(
            self.db_router_relation_id,
            "app/0",
            {
                "MRUP_database": "keystone_database",
                "MRUP_hostname": "1.1.1.2",
                "MRUP_username": "******",
                "mysqlrouter_hostname": "1.1.1.3",
                "mysqlrouter_username": "******",
            },
        )

        self.assertTrue(
            isinstance(self.harness.model.unit.status, BlockedStatus))

        _create_application_database_and_scoped_user.reset_mock()
Exemplo n.º 21
0
class TestCharm(unittest.TestCase):
    def setUp(self) -> None:
        self.harness = Harness(NextcloudOperatorCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test__pod_spec(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        pod_spec, _ = self.harness.get_pod_spec()

        self.assertEqual(
            BASE_CONFIG['port'],
            pod_spec.get('containers')[0].get('ports')[0].get('containerPort'))

        self.assertEqual(
            BASE_CONFIG['image'],
            pod_spec.get('containers')[0].get('imageDetails').get('imagePath'))

    def test__database_relation_data(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)
        self.assertEqual(self.harness.charm.state.database, {})

        # # add relation and update relation data
        rel_id = self.harness.add_relation('database', 'mysql')
        rel = self.harness.model.get_relation('database')
        self.harness.add_relation_unit(rel_id, 'mysql/0')
        test_relation_data = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'my-test-db',
            'user': '******',
            'password': '******',
        }
        self.harness.update_relation_data(rel_id, 'mysql/0',
                                          test_relation_data)

        # # check that charm datastore was properly set
        self.assertEqual(dict(self.harness.charm.state.database),
                         test_relation_data)

        # # now depart this relation and ensure the datastore is emptied
        self.harness.charm.on.database_relation_broken.emit(rel)
        self.assertEqual({}, dict(self.harness.charm.state.database))

    def test__update_pod_env_config(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # test mysql
        self.harness.charm.state.database = {
            'type': 'mysql',
            'host': '0.1.2.3:3306',
            'name': 'mysql-test-db',
            'user': '******',
            'password': '******'
        }

        expected_config = {
            'MYSQL_DATABASE': 'mysql-test-db',
            'MYSQL_USER': '******',
            'MYSQL_PASSWORD': '******',
            'MYSQL_HOST': '0.1.2.3:3306'
        }
        pod_spec, _ = self.harness.get_pod_spec()
        self.harness.charm._update_pod_env_config(pod_spec)
        self.assertEqual(pod_spec['containers'][0]['envConfig'],
                         expected_config)

        # test postgresql
        self.harness.charm.state.database = {
            'type': 'postgres',
            'host': '0.1.2.3:5432',
            'name': 'pg-test-db',
            'user': '******',
            'password': '******'
        }

        expected_config = {
            'POSTGRES_DB': 'pg-test-db',
            'POSTGRES_USER': '******',
            'POSTGRES_PASSWORD': '******',
            'POSTGRES_HOST': '0.1.2.3:5432'
        }

        pod_spec, _ = self.harness.get_pod_spec()
        self.harness.charm._update_pod_env_config(pod_spec)
        self.assertEqual(pod_spec['containers'][0]['envConfig'],
                         expected_config)
Exemplo n.º 22
0
class TestCharm(unittest.TestCase):
    def setUp(self):
        self.harness = Harness(PrometheusCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()

    def test_image_path_is_required(self):
        missing_image_config = {
            'prometheus-image-path': '',
            'prometheus-image-username': '',
            'prometheus-image-password': ''
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_image_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-path']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-path']
        self.assertEqual(missing, expected)

    def test_password_is_required_when_username_is_set(self):
        missing_password_config = {
            'prometheus-image-path': 'prom/prometheus:latest',
            'prometheus-image-username': '******',
            'prometheus-image-password': '',
        }
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(missing_password_config)
            expected_logs = [
                "ERROR:charm:Incomplete Configuration : ['prometheus-image-password']. "
                "Application will be blocked."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        missing = self.harness.charm._check_config()
        expected = ['prometheus-image-password']
        self.assertEqual(missing, expected)

    def test_alerting_config_is_updated_by_alertmanager_relation(self):
        self.harness.set_leader(True)

        # check alerting config is empty without alertmanager relation
        self.harness.update_config(MINIMAL_CONFIG)

        self.assertEqual(self.harness.charm._stored.alertmanagers, [])
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')

        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

        # check alerting config is updated when a alertmanager joins
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

    def test_alerting_config_is_removed_when_alertmanager_departs(self):
        self.harness.set_leader(True)

        # ensure there is a non-empty alerting config
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('alertmanager', 'alertmanager')
        rel = self.harness.model.get_relation('alertmanager')
        self.assertIsInstance(rel_id, int)
        self.harness.add_relation_unit(rel_id, 'alertmanager/0')
        self.harness.update_relation_data(rel_id, 'alertmanager',
                                          {'port': '9093'})
        self.harness.update_relation_data(rel_id, 'alertmanager/0',
                                          {'ingress-address': '192.169.0.1'})
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), SAMPLE_ALERTING_CONFIG)

        # check alerting config is removed when relation departs
        self.harness.charm.on.alerting_relation_departed.emit(rel)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(alerting_config(pod_spec), None)

    def test_grafana_is_provided_port_and_source(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        rel_id = self.harness.add_relation('grafana-source', 'grafana')
        self.harness.add_relation_unit(rel_id, 'grafana/0')
        self.harness.update_relation_data(rel_id, 'grafana/0', {})
        data = self.harness.get_relation_data(rel_id,
                                              self.harness.model.unit.name)

        self.assertEqual(int(data['port']), MINIMAL_CONFIG['port'])
        self.assertEqual(data['source-type'], 'prometheus')

    def test_default_cli_log_level_is_info(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'info')

    def test_invalid_log_level_defaults_to_debug(self):
        self.harness.set_leader(True)
        bad_log_config = MINIMAL_CONFIG.copy()
        bad_log_config['log-level'] = 'bad-level'
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(bad_log_config)
            expected_logs = [
                "ERROR:root:Invalid loglevel: bad-level given, "
                "debug/info/warn/error/fatal allowed. "
                "defaulting to DEBUG loglevel."
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'debug')

    def test_valid_log_level_is_accepted(self):
        self.harness.set_leader(True)
        valid_log_config = MINIMAL_CONFIG.copy()
        valid_log_config['log-level'] = 'warn'
        self.harness.update_config(valid_log_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--log.level'), 'warn')

    def test_tsdb_compression_is_not_enabled_by_default(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         None)

    def test_tsdb_compression_can_be_enabled(self):
        self.harness.set_leader(True)
        compress_config = MINIMAL_CONFIG.copy()
        compress_config['tsdb-wal-compression'] = True
        self.harness.update_config(compress_config)
        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.wal-compression'),
                         '--storage.tsdb.wal-compression')

    def test_valid_tsdb_retention_times_can_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            retention_time = '{}{}'.format(1, unit)
            retention_time_config['tsdb-retention-time'] = retention_time
            self.harness.update_config(retention_time_config)
            pod_spec = self.harness.get_pod_spec()
            self.assertEqual(
                cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                retention_time)

    def test_invalid_tsdb_retention_times_can_not_be_set(self):
        self.harness.set_leader(True)
        retention_time_config = MINIMAL_CONFIG.copy()

        # invalid unit
        retention_time = '{}{}'.format(1, 'x')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = ["ERROR:charm:Invalid unit x in time spec"]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

        # invalid time value
        retention_time = '{}{}'.format(0, 'd')
        retention_time_config['tsdb-retention-time'] = retention_time
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(retention_time_config)
            expected_logs = [
                "ERROR:charm:Expected positive time spec but got 0"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        self.assertEqual(cli_arg(pod_spec, '--storage.tsdb.retention.time'),
                         None)

    def test_global_scrape_interval_can_be_set(self):
        self.harness.set_leader(True)
        scrapeint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapeint_config['scrape-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapeint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_interval'],
                             scrapeint_config['scrape-interval'])

    def test_global_scrape_timeout_can_be_set(self):
        self.harness.set_leader(True)
        scrapetime_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            scrapetime_config['scrape-timeout'] = '{}{}'.format(1, unit)
            self.harness.update_config(scrapetime_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['scrape_timeout'],
                             scrapetime_config['scrape-timeout'])

    def test_global_evaluation_interval_can_be_set(self):
        self.harness.set_leader(True)
        evalint_config = MINIMAL_CONFIG.copy()
        acceptable_units = ['y', 'w', 'd', 'h', 'm', 's']
        for unit in acceptable_units:
            evalint_config['evaluation-interval'] = '{}{}'.format(1, unit)
            self.harness.update_config(evalint_config)
            pod_spec = self.harness.get_pod_spec()
            gconfig = global_config(pod_spec)
            self.assertEqual(gconfig['evaluation_interval'],
                             evalint_config['evaluation-interval'])

    def test_valid_external_labels_can_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        labels = {'name1': 'value1', 'name2': 'value2'}
        label_config['external-labels'] = json.dumps(labels)
        self.harness.update_config(label_config)
        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNotNone(gconfig['external_labels'])
        self.assertEqual(labels, gconfig['external_labels'])

    def test_invalid_external_labels_can_not_be_set(self):
        self.harness.set_leader(True)
        label_config = MINIMAL_CONFIG.copy()
        # label value must be string
        labels = {'name': 1}
        label_config['external-labels'] = json.dumps(labels)
        with self.assertLogs(level='ERROR') as logger:
            self.harness.update_config(label_config)
            expected_logs = [
                "ERROR:charm:External label keys/values must be strings"
            ]
            self.assertEqual(sorted(logger.output), expected_logs)

        pod_spec = self.harness.get_pod_spec()
        gconfig = global_config(pod_spec)
        self.assertIsNone(gconfig.get('external_labels'))

    def test_default_scrape_config_is_always_set(self):
        self.harness.set_leader(True)
        self.harness.update_config(MINIMAL_CONFIG)
        pod_spec = self.harness.get_pod_spec()
        prometheus_scrape_config = scrape_config(pod_spec, 'prometheus')
        self.assertIsNotNone(prometheus_scrape_config,
                             'No default config found')
Exemplo n.º 23
0
class TestKuberneteseInfluxdbCharm(unittest.TestCase):
    def setUp(self) -> None:
        """Setup the harness object."""
        self.harness = Harness(KubernetesInfluxdbCharm)
        self.harness.begin()
        self.harness.add_oci_resource("influxdb2-image")

    def tearDown(self):
        """Cleanup the harness."""
        self.harness.cleanup()

    #
    # Hooks
    #
    def test__on_config_changed(self) -> None:
        self.harness.update_config({"port": "9999"})
        self.assertEqual(self.harness.charm.unit.status,
                         ActiveStatus("Pod is ready"))

    def test__on_config_changed_pebble_api_connection_error_1(self) -> None:
        self.harness.charm.unit.get_container = mock.MagicMock()
        self.harness.charm.unit.get_container.return_value.get_plan = mock.MagicMock(
            side_effect=ConnectionError("connection timeout"))
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.update_config({"port": "9999"})
            self.assertIn(
                "DEBUG:charm:The Pebble API is not ready yet. Error message: connection timeout",
                logger.output,
            )
            self.assertNotIn(
                "DEBUG:charm:Pebble plan has already been loaded. No need to update the config.",
                logger.output,
            )

    def test__on_config_changed_pebble_api_connection_error_2(self) -> None:
        self.harness.charm.unit.get_container = mock.MagicMock()
        self.harness.charm.unit.get_container.return_value.get_plan.return_value.to_dict = (
            mock.MagicMock(return_value={}))
        self.harness.charm.unit.get_container.return_value.add_layer = mock.MagicMock(
            side_effect=ConnectionError("connection timeout"))
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.update_config({"port": "9999"})
            self.assertIn(
                "DEBUG:charm:The Pebble API is not ready yet. Error message: connection timeout",
                logger.output,
            )
            self.assertNotIn(
                "DEBUG:charm:Pebble plan has already been loaded. No need to update the config.",
                logger.output,
            )

    def test__on_config_changed_same_plan(self) -> None:
        self.harness.charm.unit.get_container = mock.MagicMock()
        self.harness.charm.unit.get_container.return_value.get_plan.return_value.to_dict = (
            mock.MagicMock(return_value=self.harness.charm._influxdb2_layer()))
        with self.assertLogs(level="DEBUG") as logger:
            self.harness.update_config({"port": "9999"})
            self.assertIn(
                "DEBUG:charm:Pebble plan has already been loaded. No need to update the config.",
                logger.output,
            )
            self.assertNotIn(
                "DEBUG:charm:The Pebble API is not ready yet. Error message: connection timeout",
                logger.output,
            )

    #
    # Test Relations
    #
    def test__grafana_source_data(self,
                                  expected_reldata: Optional[Dict] = None
                                  ) -> None:
        # Initialize values
        interface: str = "grafana-source"
        rel_app: str = "grafana"
        rel_unit: str = "grafana/0"
        rel_data: Dict[str, str] = {}
        expected: Dict[str, str] = {}

        if expected_reldata is None:
            # relation not initialized
            expected_reldata = {
                key: ""
                for key in ["private-address", "port", "source-type"]
            }

        # Initialize unit state (related to grafana)
        rel_id = self.harness.add_relation(interface, rel_app)
        self.harness.add_relation_unit(rel_id, rel_unit)

        # Trigger the -relation-changed hook, which will call the observed event
        self.harness.update_relation_data(rel_id, rel_app, rel_data)

        self.assertIsInstance(rel_id, int)

        # Verify the relation data set by the influxdb2 charm
        relation = self.harness.model.get_relation(interface)

        for key, expected_val in expected.items():
            self.assertEqual(
                relation.data[self.harness.charm.unit].get(key, ""),
                expected_val)

    @mock.patch("subprocess.check_output")
    def test__grafana_source_data_leader(
            self, mock_check_output: mock.MagicMock) -> None:
        mock_check_output.return_value = b"10.0.0.1"
        expected_reldata: Dict[str, str] = {
            "private-address": "10.0.0.1",
            "port": "8086",
            "source-type": "influxdb",
        }
        self.harness.set_leader(True)
        self.test__grafana_source_data(expected_reldata=expected_reldata)

    #
    # Test Helpers
    #
    def test__influxdb2_layer(self) -> None:
        expected = {
            "summary": "influxdb2 layer",
            "description": "pebble config layer for influxdb2",
            "services": {
                "influxdb2": {
                    "override": "replace",
                    "summary": "influxdb2 service",
                    "command": "/entrypoint.sh influxd",
                    "startup": "enabled",
                    "environment": {
                        "DOCKER_INFLUXDB_INIT_MODE": "setup",
                        "DOCKER_INFLUXDB_INIT_USERNAME": "******",
                        "DOCKER_INFLUXDB_INIT_PASSWORD": "******",
                        "DOCKER_INFLUXDB_INIT_ORG": "influxdata",
                        "DOCKER_INFLUXDB_INIT_BUCKET": "default",
                        "DOCKER_INFLUXDB_INIT_RETENTION": "0s",
                        "DOCKER_INFLUXDB_INIT_ADMIN_TOKEN": "asdfasdfasdf",
                        "INFLUXD_BOLT_PATH":
                        "/var/lib/influxdbv2/influxd.bolt",
                        "INFLUXD_ENGINE_PATH": "/var/lib/influxdbv2",
                        "INFLUXD_HTTP_BIND_ADDRESS": ":8086",
                    },
                }
            },
        }

        self.assertEqual(set(self.harness.charm._influxdb2_layer()),
                         set(expected))

    def test__is_running(self) -> None:
        container = self.harness.charm.unit.get_container(WORKLOAD_CONTAINER)
        service_not_running = self.harness.charm._is_running(
            container, "influxd")
        self.assertFalse(service_not_running)
Exemplo n.º 24
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup"""
        self.harness = Harness(WebuiCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for ")
        )

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with any relation."""
        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "webui",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "webui",
                            "containerPort": 5000,
                            "protocol": "TCP",
                        }
                    ],
                    "envConfig": {
                        "ALLOW_ANONYMOUS_LOGIN": "******",
                        "GIN_MODE": "release",
                        "MONGODB_URI": "mongodb://mongodb:27017",
                        "MONGODB_HOST": "mongodb",
                    },
                    "command": ["./webui_start.sh", "&"],
                }
            ],
        }

        self.harness.charm.on.start.emit()
        # Check if nrf is initialized
        self.assertIsNone(self.harness.charm.state.mongodb_host)

        # Initializing the nrf relation
        mongodb_relation_id = self.harness.add_relation("mongodb", "mongodb")
        self.harness.add_relation_unit(mongodb_relation_id, "mongodb/0")
        self.harness.update_relation_data(
            mongodb_relation_id,
            "mongodb",
            {"hostname": "mongodb", "mongodb_uri": "mongodb://mongodb:27017"},
        )

        # Checking if nrf data is stored
        self.assertEqual(self.harness.charm.state.mongodb_host, "mongodb")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_mongodb_app_relation_changed(self) -> NoReturn:
        """Test to see if mongo relation is updated."""

        self.assertIsNone(self.harness.charm.state.mongodb_host)

        relation_id = self.harness.add_relation("mongodb", "mongodb")
        self.harness.add_relation_unit(relation_id, "mongodb/0")
        self.harness.update_relation_data(
            relation_id,
            "mongodb",
            {"hostname": "mongodb", "mongodb_uri": "mongodb://mongodb:27017"},
        )

        self.assertEqual(self.harness.charm.state.mongodb_host, "mongodb")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertFalse(
            self.harness.charm.unit.status.message.startswith("Waiting for ")
        )
Exemplo n.º 25
0
def harness():
    _harness = Harness(SplunkCharm)
    _harness.set_model_name("testing")
    _harness.begin()
    yield _harness
    _harness.cleanup()
Exemplo n.º 26
0
class TestCharm(unittest.TestCase):
    """Test script for checking pod spec and relations"""
    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(SmfCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_start_without_relations(self) -> NoReturn:
        """Test installation without any relation."""
        self.harness.charm.on.config_changed.emit()

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_start_with_relations(self) -> NoReturn:
        """Test installation with relation."""
        self.harness.charm.on.start.emit()
        expected_result = {
            "version":
            3,
            "containers": [{
                "name":
                "smf",
                "imageDetails":
                self.harness.charm.image.fetch(),
                "imagePullPolicy":
                "Always",
                "ports": [{
                    "name": "smf",
                    "containerPort": 29502,
                    "protocol": "TCP",
                }],
                "envConfig": {
                    "ALLOW_ANONYMOUS_LOGIN": "******",
                    "GIN_MODE": "release",
                    "IPADDR1": "10.45.30.27",
                    "NRF_HOST": "nrf",
                },
                "command": ["./ipscript.sh", "&"],
            }],
        }

        # Check if nrf,upf is initialized
        self.assertIsNone(self.harness.charm.state.nrf_host)
        self.assertIsNone(self.harness.charm.state.upf_host)

        # Initializing the nrf relation
        nrf_relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(nrf_relation_id, "nrf/0")
        self.harness.update_relation_data(nrf_relation_id, "nrf",
                                          {"hostname": "nrf"})

        # Initializing the upf relation
        upf_relation_id = self.harness.add_relation("upf", "upf")
        self.harness.add_relation_unit(upf_relation_id, "upf/0")
        self.harness.update_relation_data(upf_relation_id, "upf/0",
                                          {"private_address": "10.45.30.27"})

        # Checking if nrf,upf data is stored
        self.assertEqual(self.harness.charm.state.nrf_host, "nrf")
        self.assertEqual(self.harness.charm.state.upf_host, "10.45.30.27")

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_on_upf_app_relation_changed(self) -> NoReturn:
        """Test to see if upf app relation is updated."""
        self.harness.charm.on.start.emit()

        self.assertIsNone(self.harness.charm.state.upf_host)

        # Initializing the upf relation
        upf_relation_id = self.harness.add_relation("upf", "upf")
        self.harness.add_relation_unit(upf_relation_id, "upf/0")
        self.harness.update_relation_data(upf_relation_id, "upf/0",
                                          {"private_address": "upf"})

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))

    def test_on_nrf_app_relation_changed(self) -> NoReturn:
        """Test to see if nfr relation is updated."""
        self.harness.charm.on.start.emit()

        self.assertIsNone(self.harness.charm.state.nrf_host)

        relation_id = self.harness.add_relation("nrf", "nrf")
        self.harness.add_relation_unit(relation_id, "nrf/0")
        self.harness.update_relation_data(relation_id, "nrf",
                                          {"hostname": "nrf"})

        # Verifying status
        self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        self.assertTrue(
            self.harness.charm.unit.status.message.startswith("Waiting for "))
Exemplo n.º 27
0
class CharmTest(unittest.TestCase):

    def setUp(self):
        # Setup
        self.harness = Harness(charm.Charm)
        self.harness.begin()

    def test__init__works_without_a_hitch(self):
        # Setup
        harness = Harness(charm.Charm)

        # Exercise
        harness.begin()

    def test__mysql_on_new_relation_calls_handler(self):
        with patch.object(charm, 'on_server_new_relation_handler',
                          spect_set=True) as mocked_on_new_server_relation_handler:
            # Setup
            server_details = MySQLServerDetails(dict(
                host=str(uuid4()),
                port=random.randint(1, 65535),
                database=str(uuid4()),
                user=str(uuid4()),
                password=str(uuid4()),
            ))

            # Exercise
            self.harness.charm.mysql.on.new_relation.emit(server_details)

            # Assert
            assert mocked_on_new_server_relation_handler.call_count == 1

            args, kwargs = mocked_on_new_server_relation_handler.call_args
            assert isinstance(args[0], NewMySQLRelationEvent)
            assert hasattr(args[0], 'server_details')
            assert args[0].server_details.address == server_details.address
            assert args[0].server_details.username == server_details.username
            assert args[0].server_details.database == server_details.database
            assert args[0].server_details.password == server_details.password
            assert isinstance(args[1], BoundStoredState)
            assert isinstance(args[2], adapters.framework.FrameworkAdapter)

    def test__on_config_changed_calls_handler(self):
        with patch.object(charm, 'on_config_changed_handler',
                          spect_set=True) as mocked_on_config_changed_handler:
            # Exercise
            self.harness.update_config()

            # Assert
            assert mocked_on_config_changed_handler.call_count == 1

            args, kwargs = mocked_on_config_changed_handler.call_args
            assert isinstance(args[0], ConfigChangedEvent)
            assert isinstance(args[1], adapters.framework.FrameworkAdapter)

    def test__prometheus_client_on_new_server_available_calls_handler(self):
        with patch.object(charm, 'on_server_new_relation_handler',
                          spect_set=True) as mocked_on_new_server_relation_handler:
            # Setup
            server_details = PostgresServerDetails(
                host=str(uuid4()),
                port=random.randint(1, 65535),
            )

            # Exercise
            self.harness.charm.prometheus_client.on.server_available.emit(
                server_details)

            # Assert
            assert mocked_on_new_server_relation_handler.call_count == 1

            args, kwargs = mocked_on_new_server_relation_handler.call_args
            assert isinstance(args[0], ServerAvailableEvent)
            assert hasattr(args[0], 'server_details')
            assert args[0].server_details.host == server_details.host
            assert args[0].server_details.port == server_details.port
            assert isinstance(args[1], BoundStoredState)
            assert isinstance(args[2], adapters.framework.FrameworkAdapter)
Exemplo n.º 28
0
    def test_on_config_changed(self):
        harness = Harness(OPAAuditCharm)
        self.addCleanup(harness.cleanup)
        harness.begin()

        assert harness.charm._on_config_changed({}) is None
Exemplo n.º 29
0
class TestCharm(unittest.TestCase):
    """Test script for checking relations"""

    def setUp(self) -> NoReturn:
        """Test setup."""
        self.harness = Harness(NatappCharm)
        self.harness.set_leader(is_leader=True)
        self.harness.begin()

    def test_on_configure_change(self) -> NoReturn:
        """Test installation with any relation."""
        self.harness.charm.on.config_changed.emit()
        config_data = "192.168.1.216"
        second_interface = [
            {"name": "n6-network", "interface": "eth1", "ips": [config_data]}
        ]

        annot = {
            "annotations": {"k8s.v1.cni.cncf.io/networks": json.dumps(second_interface)}
        }
        custom_resource_def = [
            {
                "name": "network-attachment-definitions.k8s.cni.cncf.io",
                "spec": {
                    "group": "k8s.cni.cncf.io",
                    "scope": "Namespaced",
                    "names": {
                        "kind": "NetworkAttachmentDefinition",
                        "singular": "network-attachment-definition",
                        "plural": "network-attachment-definitions",
                    },
                    "versions": [{"name": "v1", "served": True, "storage": True}],
                },
            }
        ]
        pdn_subnet = "192.168.0.0/16"
        pdn_ip_range_start = "192.168.1.100"
        pdn_ip_range_end = "192.168.1.250"
        pdn_gateway_ip = "192.168.1.1"
        ipam_body = {
            "type": "host-local",
            "subnet": pdn_subnet,
            "rangeStart": pdn_ip_range_start,
            "rangeEnd": pdn_ip_range_end,
            "gateway": pdn_gateway_ip,
        }
        config_body = {
            "cniVersion": "0.3.1",
            "name": "n6-network",
            "type": "macvlan",
            "master": "ens3",
            "mode": "bridge",
            "ipam": ipam_body,
        }

        custom_resource = {
            "network-attachment-definitions.k8s.cni.cncf.io": [
                {
                    "apiVersion": "k8s.cni.cncf.io/v1",
                    "kind": "NetworkAttachmentDefinition",
                    "metadata": {"name": "n6-network"},
                    "spec": {"config": json.dumps(config_body)},
                }
            ]
        }

        expected_result = {
            "version": 3,
            "containers": [
                {
                    "name": "natapp",
                    "imageDetails": self.harness.charm.image.fetch(),
                    "imagePullPolicy": "Always",
                    "ports": [
                        {
                            "name": "natapp",
                            "containerPort": 2601,
                            "protocol": "UDP",
                        }
                    ],
                    "command": ["./start.sh", "&"],
                    "kubernetes": {"securityContext": {"privileged": True}},
                }
            ],
            "kubernetesResources": {
                "customResourceDefinitions": custom_resource_def,
                "customResources": custom_resource,
                "pod": annot,
            },
        }

        # Verifying status
        self.assertNotIsInstance(self.harness.charm.unit.status, BlockedStatus)

        # Verifying status message
        self.assertGreater(len(self.harness.charm.unit.status.message), 0)
        pod_spec, _ = self.harness.get_pod_spec()
        self.assertDictEqual(expected_result, pod_spec)

    def test_publish_natapp_info(self) -> NoReturn:
        """Test to see if upf relation is updated."""
        expected_result = {
            "hostname": "natapp",
            "static_ip": "192.168.70.15",
        }
        relation_id = self.harness.add_relation("natapp", "upf1")
        self.harness.add_relation_unit(relation_id, "upf1/0")
        relation_data = {"hostname": "natapp", "static_ip": "192.168.70.15"}
        self.harness.update_relation_data(relation_id, "natapp", relation_data)
        relation_data = self.harness.get_relation_data(relation_id, "natapp")
        self.assertDictEqual(expected_result, relation_data)
Exemplo n.º 30
0
class TestCharm(unittest.TestCase):
    def setUp(self) -> None:
        # charm setup
        self.harness = Harness(GraylogCharm)
        self.addCleanup(self.harness.cleanup)
        self.harness.begin()
        self.harness.add_oci_resource('graylog-image')

        # patches
        self.mock_bind_address = \
            mock.patch('charm.GraylogCharm.bind_address', new_callable=mock.PropertyMock)
        self.mock_external_uri = \
            mock.patch('charm.GraylogCharm.external_uri', new_callable=mock.PropertyMock)

        self.mock_bind_address.start()
        self.mock_external_uri.start()

        # cleanup
        self.addCleanup(self.mock_bind_address.stop)
        self.addCleanup(self.mock_external_uri.stop)

    def test_pod_spec_port(self):
        self.harness.set_leader(True)
        # pretend to have mongo and elasticsearch
        self.harness.charm._stored.mongodb_uri = 'mongo://test_uri/'
        self.harness.charm._stored.elasticsearch_uri = 'http://test_es_uri'

        self.harness.update_config(BASE_CONFIG)
        self.harness.charm.on.config_changed.emit()

        spec, _ = self.harness.get_pod_spec()
        expected_port = 9000
        actual_port = spec['containers'][0]['ports'][0]['containerPort']
        self.assertEqual(expected_port, actual_port)

    def test_elasticsearch_and_mongodb_conn_strings(self):
        self.harness.set_leader(True)
        self.harness.update_config(BASE_CONFIG)

        # add the elasticsearch relation
        es_rel_id = self.harness.add_relation('elasticsearch', 'elasticsearch')
        mongo_rel_id = self.harness.add_relation('mongodb', 'mongodb')
        self.harness.add_relation_unit(es_rel_id, 'elasticsearch/0')
        self.harness.add_relation_unit(mongo_rel_id, 'mongodb/0')

        # add elasticsearch relation data
        es_rel_data = {
            'ingress-address': '10.183.1.2',
            'port': 9200,
        }
        self.harness.update_relation_data(es_rel_id, 'elasticsearch/0',
                                          es_rel_data)
        self.assertTrue(self.harness.charm.has_elasticsearch)

        # add mongodb relation data
        mongo_rel_data = {
            'replica_set_uri': 'mongo://10.0.0.2:14001,10.0.0.3:14002',
            'replicated': 'True',
            'replica_set_name': 'rs0',
        }
        self.harness.update_relation_data(mongo_rel_id, 'mongodb/0',
                                          mongo_rel_data)
        self.assertTrue(self.harness.charm.has_mongodb)

        # test that elasticsearch-uri properly made it to the _stored variable
        expected_uri = 'http://10.183.1.2:9200'
        self.assertEqual(expected_uri,
                         self.harness.charm._stored.elasticsearch_uri)

        # now emit the relation broken events and make sure the _stored variables are cleared
        es_rel = self.harness.model.get_relation('elasticsearch')
        mongo_rel = self.harness.model.get_relation('mongodb')
        self.harness.charm.on.elasticsearch_relation_broken.emit(es_rel)
        self.harness.charm.on.mongodb_relation_broken.emit(mongo_rel)
        self.assertEqual(str(), self.harness.charm._stored.elasticsearch_uri)
        self.assertEqual(str(), self.harness.charm._stored.mongodb_uri)

    def test_blocking_without_mongodb_and_elasticsearch(self):
        self.harness.set_leader(True)
        with self.assertLogs(level='WARNING') as logger:
            self.harness.update_config(BASE_CONFIG)
            msg = 'WARNING:charm:Need both mongodb and Elasticsearch ' \
                  'relation for Graylog to function properly. Blocking.'
            self.assertEqual(sorted(logger.output), [msg])

    def test_check_config_with_missing_option(self):
        self.harness.set_leader(True)
        missing_password_config = {'port': 9000, 'admin-password': ''}
        with self.assertLogs(level='WARNING') as logger:
            self.harness.update_config(missing_password_config)
            msg = 'ERROR:charm:Need admin-password config option before setting pod spec.'
            self.assertEqual(sorted(logger.output), [msg])
            self.assertEqual(
                self.harness.model.unit.status,
                BlockedStatus("Need 'admin-password' config option."))