def test_port_alert_complex_uri(self, socket_connect_mock): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site' : { 'my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") # use a URI that has commas to verify that we properly parse it alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertTrue('(Unit Tests)' in alerts[0]['text']) self.assertTrue('response time on port 2181' in alerts[0]['text'])
def test_script_alert(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text'])
def test_port_alert_complex_uri(self, socket_connect_mock): definition_json = self._get_port_alert_definition() configuration = { 'hdfs-site': { 'my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") # use a URI that has commas to verify that we properly parse it alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertTrue('(Unit Tests)' in alerts[0]['text']) self.assertTrue('response time on port 2181' in alerts[0]['text'])
def test_skipped_alert(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'skip': 'true' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], None) # instruct the test alert script to be skipped alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) # ensure that it was skipped self.assertEquals(0,len(collector.alerts()))
def test_skipped_alert(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join( 'ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join( 'ambari_agent', 'common-services') definition_json['source']['host_scripts_directory'] = os.path.join( 'ambari_agent', 'host_scripts') configuration = {'foo-site': {'skip': 'true'}} collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], None) # instruct the test alert script to be skipped alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals( definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) # ensure that it was skipped self.assertEquals(0, len(collector.alerts()))
def test_alerts(self): alert1 = { 'name': 'AlertName1', 'uuid': '11' } alert2 = { 'name': 'AlertName2', 'uuid': '12' } alert3 = { 'name': 'AlertName3', 'uuid': '13' } alert4 = { 'name': 'AlertName4', 'uuid': '14' } controller = AlertCollector() controller._AlertCollector__buckets = { 'TestCluster1': { 'AlertName1': alert1, 'AlertName2': alert2 }, 'TestCluster2': { 'AlertName3': alert3, 'AlertName4': alert4 } } list = controller.alerts() self.assertEquals(controller._AlertCollector__buckets, {}) self.assertEquals(list.sort(), [alert1, alert2, alert3, alert4].sort())
def test_metric_alert(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() configuration = {'hdfs-site' : { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # trip an OK ma_load_jmx_mock.return_value = [1, 25] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 125', alerts[0]['text']) # trip a warning ma_load_jmx_mock.return_value = [1, 75] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) Warning: 1 75 175', alerts[0]['text']) # trip a critical now ma_load_jmx_mock.return_value = [1, 150] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) Critical: 1 150 250', alerts[0]['text']) del definition_json['source']['jmx']['value'] collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # now try without any jmx value to compare to ma_load_jmx_mock.return_value = [1, 25] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 None', alerts[0]['text'])
def test_put_clusterExists(self): cluster = 'TestCluster' alert = { 'name': 'AlertName', 'uuid': '12' } collector = AlertCollector() collector._AlertCollector__buckets = { 'TestCluster': {} } collector.put(cluster, alert) self.assertEquals(collector._AlertCollector__buckets, {'TestCluster': {'AlertName': alert}})
def test_remove_noAlert(self): alert2 = { 'name': 'AlertName2', 'uuid': '12' } controller = AlertCollector() controller._AlertCollector__buckets = { 'TestCluster2': { 'AlertName2': alert2 } } controller.remove('TestCluster', 'AlertName1') self.assertEquals(controller._AlertCollector__buckets, {'TestCluster2': {'AlertName2': alert2}})
def test_port_alert(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "Could not load process info: {0}" } } } } collector = AlertCollector() pa = PortAlert(json, json['source']) pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'}) self.assertEquals(6, pa.interval()) res = pa.collect()
def test_port_alert_no_sub(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "http://c6401.ambari.apache.org", "default_port": 50070, "reporting": { "ok": { "text": "TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "Could not load process info: {0}" } } } } pa = PortAlert(json, json['source']) pa.set_helpers(AlertCollector(), '') self.assertEquals('http://c6401.ambari.apache.org', pa.uri) res = pa.collect()
def test_port_alert_no_sub(self): definition_json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "http://c6401.ambari.apache.org", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "(Unit Tests) Could not load process info: {0}" } } } } cluster_configuration = self.__get_cluster_configuration() alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(AlertCollector(), cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals('http://c6401.ambari.apache.org', alert.uri) alert.collect()
def test_port_alert_complex_uri(self, socket_connect_mock): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "(Unit Tests) Could not load process info: {0}" } } } } collector = AlertCollector() pa = PortAlert(json, json['source']) # use a URI that has commas to verify that we properly parse it pa.set_helpers( collector, { 'hdfs-site/my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181' }) pa.host_name = 'c6402.ambari.apache.org' self.assertEquals(6, pa.interval()) pa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertTrue('(Unit Tests)' in alerts[0]['text']) self.assertTrue('response time on port 2181' in alerts[0]['text'])
def test_script_alert(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "SCRIPT", "path": "test_script.py", } } # normally set by AlertSchedulerHandler json['source']['stacks_directory'] = os.path.join( 'ambari_agent', 'dummy_files') json['source']['common_services_directory'] = os.path.join( 'ambari_agent', 'common-services') json['source']['host_scripts_directory'] = os.path.join( 'ambari_agent', 'host_scripts') collector = AlertCollector() sa = ScriptAlert(json, json['source'], MagicMock()) sa.set_helpers(collector, { 'foo-site/bar': 'rendered-bar', 'foo-site/baz': 'rendered-baz' }) self.assertEquals(json['source']['path'], sa.path) self.assertEquals(json['source']['stacks_directory'], sa.stacks_dir) self.assertEquals(json['source']['common_services_directory'], sa.common_services_dir) self.assertEquals(json['source']['host_scripts_directory'], sa.host_scripts_dir) sa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text'])
def test_configuration_updates(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' } } # populate the configuration cache with the initial configs collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # run the alert and verify the output alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text']) # now update only the configs and run the same alert again and check # for different output configuration = {'foo-site' : { 'bar': 'rendered-bar2', 'baz' : 'rendered-baz2' } } # populate the configuration cache with the initial configs self.__update_cluster_configuration(cluster_configuration, configuration) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar2, baz is rendered-baz2', alerts[0]['text'])
def test_alert_collector_purge(self): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site': {'my-key': 'value1'}} collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) res = alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertTrue(alerts[0] is not None) self.assertEquals('CRITICAL', alerts[0]['state']) collector.remove_by_uuid('c1f73191-4481-4435-8dae-fd380e4c0be1') self.assertEquals(0, len(collector.alerts()))
def test_metric_alert(self, ma_load_jmx_mock): json = { "name": "cpu_check", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "METRIC", "uri": "http://myurl:8633", "jmx": { "property_list": ["someJmxObject/value", "someOtherJmxObject/value"], "value": "{0} * 100 + 123" }, "reporting": { "ok": { "text": "ok_arr: {0} {1} {2}", }, "warning": { "text": "", "value": 13 }, "critical": { "text": "crit_arr: {0} {1} {2}", "value": 72 } } } } ma_load_jmx_mock.return_value = [1, 3] collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers(collector, '') ma.collect() self.assertEquals('CRITICAL', collector.alerts()[0]['state']) self.assertEquals('crit_arr: 1 3 223', collector.alerts()[0]['text']) del json['source']['jmx']['value'] collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers(collector, '') ma.collect() self.assertEquals('OK', collector.alerts()[0]['state']) self.assertEquals('ok_arr: 1 3 None', collector.alerts()[0]['text'])
def test_remove_by_uuid_absent(self): alert1 = { 'name': 'AlertName1', 'uuid': '11' } alert2 = { 'name': 'AlertName2', 'uuid': '12' } controller = AlertCollector() controller._AlertCollector__buckets = { 'TestCluster': { 'AlertName1': alert1, 'AlertName2': alert2 } } controller.remove_by_uuid('13') self.assertEquals(controller._AlertCollector__buckets, {'TestCluster': {'AlertName1': alert1, 'AlertName2': alert2}})
def test_uri_structure_parsing_without_namespace(self): """ Tests that we can parse an HA URI that only includes an alias and not a namespace :return: """ uri_structure = { "http": "{{yarn-site/yarn.resourcemanager.webapp.address}}", "https": "{{yarn-site/yarn.resourcemanager.webapp.http.address}}", "https_property": "{{yarn-site/yarn.http.policy}}", "https_property_value": "HTTPS_ONLY", "high_availability": { "alias_key": "{{yarn-site/yarn.resourcemanager.ha.rm-ids}}", "http_pattern": "{{yarn-site/yarn.resourcemanager.webapp.address.{{alias}}}}", "https_pattern": "{{yarn-site/yarn.resourcemanager.webapp.https.address.{{alias}}}}" } } configuration = { 'yarn-site': { 'yarn.http.policy': 'HTTPS_ONLY', 'yarn.resourcemanager.webapp.address': 'c6401.ambari.apache.org:80', 'yarn.resourcemanager.webapp.http.address': 'c6401.ambari.apache.org:443', 'yarn.resourcemanager.webapp.address.rm1': 'c6401.ambari.apache.org:8080', 'yarn.resourcemanager.webapp.https.address.rm1': 'c6401.ambari.apache.org:8443', 'yarn.resourcemanager.webapp.address.rm2': 'c6402.ambari.apache.org:8080', 'yarn.resourcemanager.webapp.https.address.rm2': 'c6402.ambari.apache.org:8443', 'yarn.resourcemanager.ha.rm-ids': 'rm1, rm2' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MockAlert() alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6402.ambari.apache.org:8443', uri.uri) self.assertEqual(True, uri.is_ssl_enabled)
def test_alert_collector_purge(self): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site' : { 'my-key': 'value1' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) res = alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertTrue(alerts[0] is not None) self.assertEquals('CRITICAL', alerts[0]['state']) collector.remove_by_uuid('c1f73191-4481-4435-8dae-fd380e4c0be1') self.assertEquals(0,len(collector.alerts()))
def test_configuration_updates(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join( 'ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join( 'ambari_agent', 'common-services') definition_json['source']['host_scripts_directory'] = os.path.join( 'ambari_agent', 'host_scripts') configuration = { 'foo-site': { 'bar': 'rendered-bar', 'baz': 'rendered-baz' } } # populate the configuration cache with the initial configs collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # run the alert and verify the output alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text']) # now update only the configs and run the same alert again and check # for different output configuration = { 'foo-site': { 'bar': 'rendered-bar2', 'baz': 'rendered-baz2' } } # populate the configuration cache with the initial configs self.__update_cluster_configuration(cluster_configuration, configuration) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar2, baz is rendered-baz2', alerts[0]['text'])
def test_skipped_alert(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "SCRIPT", "path": "test_script.py", } } # normally set by AlertSchedulerHandler json['source']['stacks_directory'] = os.path.join( 'ambari_agent', 'dummy_files') json['source']['common_services_directory'] = os.path.join( 'ambari_agent', 'common-services') json['source']['host_scripts_directory'] = os.path.join( 'ambari_agent', 'host_scripts') collector = AlertCollector() sa = ScriptAlert(json, json['source'], None) # instruct the test alert script to be skipped sa.set_helpers(collector, {'foo-site/skip': 'true'}) self.assertEquals(json['source']['path'], sa.path) self.assertEquals(json['source']['stacks_directory'], sa.stacks_dir) self.assertEquals(json['source']['common_services_directory'], sa.common_services_dir) self.assertEquals(json['source']['host_scripts_directory'], sa.host_scripts_dir) # ensure that it was skipped self.assertEquals(0, len(collector.alerts()))
def test_script_alert(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "SCRIPT", "path": "test_script.py", "reporting": { "ok": { "text": "TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "Could not load process info: {0}" } } } } # normally set by AlertSchedulerHandler json['source']['stacks_dir'] = os.path.join('ambari_agent', 'dummy_files') collector = AlertCollector() sa = ScriptAlert(json, json['source']) sa.set_helpers(collector, '') self.assertEquals(json['source']['path'], sa.path) self.assertEquals(json['source']['stacks_dir'], sa.stacks_dir) sa.collect() self.assertEquals('WARNING', collector.alerts()[0]['state']) self.assertEquals('all is not well', collector.alerts()[0]['text'])
def test_metric_alert_uses_refresh_processor(self, http_response_mock, http_connection_mock): """ Tests that the RefreshHeaderProcessor is correctly chained and called :param http_response_mock: :param http_connection_mock: :return: """ http_conn = http_connection_mock.return_value http_conn.getresponse.return_value = MagicMock(status=200) http_response_mock.return_value = MagicMock(code=200) url_opener = urllib2.build_opener(RefreshHeaderProcessor()) response = url_opener.open("http://foo.bar.baz/jmx") self.assertFalse(response is None) self.assertTrue(http_conn.request.called) self.assertTrue(http_conn.getresponse.called) self.assertTrue(http_response_mock.called) # now we know that the refresh header is intercepting, reset the mocks # and try with a METRIC alert MagicMock.reset_mock(http_response_mock) MagicMock.reset_mock(http_connection_mock) definition_json = self._get_metric_alert_definition() configuration = { 'hdfs-site': { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertFalse(response is None) self.assertTrue(http_conn.request.called) self.assertTrue(http_conn.getresponse.called) self.assertTrue(http_response_mock.called)
def test_port_alert(self, socket_connect_mock, time_mock): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site': {'my-key': 'value1'}} collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # called 3x with 3 calls per alert # - 900ms and then a time.time() for the date from base_alert # - 2000ms and then a time.time() for the date from base_alert # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert time_mock.side_effect = [ 0, 900, 336283000000, 0, 2000, 336283100000, socket.timeout, 336283200000 ] alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) # 900ms is OK alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # 2000ms is WARNING alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) # throws a socket.timeout exception, causes a CRITICAL alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state'])
def test_alert_collector_purge(self): json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "(Unit Tests) Could not load process info: {0}" } } } } collector = AlertCollector() pa = PortAlert(json, json['source']) pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'}) self.assertEquals(6, pa.interval()) res = pa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertTrue(alerts[0] is not None) self.assertEquals('CRITICAL', alerts[0]['state']) collector.remove_by_uuid('c1f73191-4481-4435-8dae-fd380e4c0be1') self.assertEquals(0, len(collector.alerts()))
def test_port_alert(self, socket_connect_mock, time_mock): definition_json = self._get_port_alert_definition() configuration = { 'hdfs-site' : { 'my-key': 'value1' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # called 3x with 3 calls per alert # - 900ms and then a time.time() for the date from base_alert # - 2000ms and then a time.time() for the date from base_alert # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert time_mock.side_effect = [0,900,336283000000, 0,2000,336283100000, socket.timeout,336283200000] alert = PortAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) # 900ms is OK alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # 2000ms is WARNING alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) # throws a socket.timeout exception, causes a CRITICAL alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state'])
def test_metric_alert(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() configuration = { 'hdfs-site': { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # trip an OK ma_load_jmx_mock.return_value = [1, 25] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 125', alerts[0]['text']) # trip a warning ma_load_jmx_mock.return_value = [1, 75] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) Warning: 1 75 175', alerts[0]['text']) # trip a critical now ma_load_jmx_mock.return_value = [1, 150] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) Critical: 1 150 250', alerts[0]['text']) del definition_json['source']['jmx']['value'] collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # now try without any jmx value to compare to ma_load_jmx_mock.return_value = [1, 25] alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 None', alerts[0]['text'])
def test_web_alert(self, wa_make_web_request_mock): json = { "name": "webalert_test", "service": "HDFS", "component": "DATANODE", "label": "WebAlert Test", "interval": 1, "scope": "HOST", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "WEB", "uri": { "http": "{{hdfs-site/dfs.datanode.http.address}}", "https": "{{hdfs-site/dfs.datanode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY" }, "reporting": { "ok": { "text": "(Unit Tests) ok: {0}", }, "warning": { "text": "(Unit Tests) warning: {0}", }, "critical": { "text": "(Unit Tests) critical: {1}. {3}", } } } } WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg') wa_make_web_request_mock.return_value = WebResponse(200, 1.234, None) # run the alert and check HTTP 200 collector = AlertCollector() alert = WebAlert(json, json['source']) alert.set_helpers( collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'}) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('(Unit Tests) ok: 200', alerts[0]['text']) self.assertEquals('OK', alerts[0]['state']) # run the alert and check HTTP 500 wa_make_web_request_mock.return_value = WebResponse(500, 1.234, None) collector = AlertCollector() alert = WebAlert(json, json['source']) alert.set_helpers( collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'}) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) warning: 500', alerts[0]['text']) # run the alert and check critical wa_make_web_request_mock.return_value = WebResponse( 0, 0, 'error message') collector = AlertCollector() alert = WebAlert(json, json['source']) alert.set_helpers( collector, {'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80'}) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # http assertion indicating that we properly determined non-SSL self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals( '(Unit Tests) critical: http://1.2.3.4:80. error message', alerts[0]['text']) collector = AlertCollector() alert = WebAlert(json, json['source']) alert.set_helpers( collector, { 'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80', 'hdfs-site/dfs.datanode.https.address': '1.2.3.4:8443', 'hdfs-site/dfs.http.policy': 'HTTPS_ONLY' }) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # SSL assertion self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals( '(Unit Tests) critical: https://1.2.3.4:8443. error message', alerts[0]['text'])
def test_uri_structure_parsing(self): uri_structure = { "http": "{{hdfs-site/dfs.namenode.http.address}}", "https": "{{hdfs-site/dfs.namenode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY", "high_availability": { "nameservice": "{{hdfs-site/dfs.nameservices}}", "alias_key": "{{hdfs-site/dfs.ha.namenodes.{{ha-nameservice}}}}", "http_pattern": "{{hdfs-site/dfs.namenode.http-address.{{ha-nameservice}}.{{alias}}}}", "https_pattern": "{{hdfs-site/dfs.namenode.https-address.{{ha-nameservice}}.{{alias}}}}" } } configuration = { 'hdfs-site': { 'dfs.namenode.http.address': 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address': 'c6401.ambari.apache.org:443' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MockAlert() alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6401.ambari.apache.org:80', uri.uri) self.assertEqual(False, uri.is_ssl_enabled) configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTP_ONLY', 'dfs.namenode.http.address': 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address': 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6401.ambari.apache.org:80', uri.uri) self.assertEqual(False, uri.is_ssl_enabled) # switch to SSL configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTPS_ONLY', 'dfs.namenode.http.address': 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address': 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6401.ambari.apache.org:443', uri.uri) self.assertEqual(True, uri.is_ssl_enabled) # test HA configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTP_ONLY', 'dfs.namenode.http.address': 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address': 'c6401.ambari.apache.org:443', 'dfs.nameservices': 'c1ha', 'dfs.ha.namenodes.c1ha': 'nn1, nn2', 'dfs.namenode.http-address.c1ha.nn1': 'c6401.ambari.apache.org:8080', 'dfs.namenode.http-address.c1ha.nn2': 'c6402.ambari.apache.org:8080', } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6401.ambari.apache.org:8080', uri.uri) self.assertEqual(False, uri.is_ssl_enabled) # test HA SSL configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTPS_ONLY', 'dfs.namenode.http.address': 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address': 'c6401.ambari.apache.org:443', 'dfs.nameservices': 'c1ha', 'dfs.ha.namenodes.c1ha': 'nn1, nn2', 'dfs.namenode.http-address.c1ha.nn1': 'c6401.ambari.apache.org:8080', 'dfs.namenode.http-address.c1ha.nn2': 'c6402.ambari.apache.org:8080', 'dfs.namenode.https-address.c1ha.nn1': 'c6401.ambari.apache.org:8443', 'dfs.namenode.https-address.c1ha.nn2': 'c6402.ambari.apache.org:8443', } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual('c6401.ambari.apache.org:8443', uri.uri) self.assertEqual(True, uri.is_ssl_enabled)
def test_web_alert(self, wa_make_web_request_mock): definition_json = self._get_web_alert_definition() WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg') wa_make_web_request_mock.return_value = WebResponse(200, 1.234, None) # run the alert and check HTTP 200 configuration = { 'hdfs-site': { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('(Unit Tests) ok: 200', alerts[0]['text']) self.assertEquals('OK', alerts[0]['state']) # run the alert and check HTTP 500 wa_make_web_request_mock.return_value = WebResponse( 500, 1.234, "Internal Server Error") collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) warning: 500 (Internal Server Error)', alerts[0]['text']) # run the alert and check critical wa_make_web_request_mock.return_value = WebResponse( 0, 0, 'error message') collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # http assertion indicating that we properly determined non-SSL self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals( '(Unit Tests) critical: http://c6401.ambari.apache.org:80. error message', alerts[0]['text']) configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTPS_ONLY', 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address': 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # SSL assertion self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals( '(Unit Tests) critical: https://c6401.ambari.apache.org:443. error message', alerts[0]['text'])
def test_web_alert(self, wa_make_web_request_mock): definition_json = self._get_web_alert_definition() WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg') wa_make_web_request_mock.return_value = WebResponse(200,1.234,None) # run the alert and check HTTP 200 configuration = {'hdfs-site' : { 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('(Unit Tests) ok: 200', alerts[0]['text']) self.assertEquals('OK', alerts[0]['state']) # run the alert and check HTTP 500 wa_make_web_request_mock.return_value = WebResponse(500,1.234,"Internal Server Error") collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) warning: 500 (Internal Server Error)', alerts[0]['text']) # run the alert and check critical wa_make_web_request_mock.return_value = WebResponse(0,0,'error message') collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # http assertion indicating that we properly determined non-SSL self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) critical: http://c6401.ambari.apache.org:80. error message', alerts[0]['text']) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTPS_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], None) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # SSL assertion self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) critical: https://c6401.ambari.apache.org:443. error message', alerts[0]['text'])
def test_alert_uri_structure(self, ma_load_jmx_mock): json = { "name": "cpu_check", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "METRIC", "uri": { "http": "{{hdfs-site/dfs.datanode.http.address}}", "https": "{{hdfs-site/dfs.datanode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY" }, "jmx": { "property_list": ["someJmxObject/value", "someOtherJmxObject/value"], "value": "{0}" }, "reporting": { "ok": { "text": "(Unit Tests) ok_arr: {0} {1} {2}", }, "warning": { "text": "", "value": 10 }, "critical": { "text": "(Unit Tests) crit_arr: {0} {1} {2}", "value": 20 } } } } ma_load_jmx_mock.return_value = [1, 1] # run the alert without specifying any keys; an exception should be thrown # indicating that there was no URI and the result is UNKNOWN collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers(collector, '') ma.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set 2 properties that make no sense wihtout the main URI properties collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers(collector, {'hdfs-site/dfs.http.policy': 'HTTP_ONLY'}) ma.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set an actual property key (http) collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers( collector, { 'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80', 'hdfs-site/dfs.http.policy': 'HTTP_ONLY' }) ma.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set an actual property key (https) collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers( collector, { 'hdfs-site/dfs.datanode.https.address': '1.2.3.4:443', 'hdfs-site/dfs.http.policy': 'HTTP_ONLY' }) ma.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set both (http and https) collector = AlertCollector() ma = MetricAlert(json, json['source']) ma.set_helpers( collector, { 'hdfs-site/dfs.datanode.http.address': '1.2.3.4:80', 'hdfs-site/dfs.datanode.https.address': '1.2.3.4:443', 'hdfs-site/dfs.http.policy': 'HTTP_ONLY' }) ma.collect() self.assertEquals('OK', collector.alerts()[0]['state'])
def test_port_alert(self, socket_connect_mock, time_mock): # called 3x with 3 calls per alert # - 900ms and then a time.time() for the date from base_alert # - 2000ms and then a time.time() for the date from base_alert # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert time_mock.side_effect = [ 0, 900, 336283200000, 0, 2000, 336283200000, socket.timeout, 336283200000 ] json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "warning": { "text": "(Unit Tests) TCP WARN - {0:.4f} response time on port {1}", "value": 1.5 }, "critical": { "text": "(Unit Tests) Could not load process info: {0}", "value": 5.0 } } } } collector = AlertCollector() pa = PortAlert(json, json['source']) pa.set_helpers(collector, {'hdfs-site/my-key': 'value1'}) self.assertEquals(6, pa.interval()) # 900ms is OK pa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # 2000ms is WARNING pa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) # throws a socket.timeout exception, causes a CRITICAL pa.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state'])
def test_alert_uri_structure(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() ma_load_jmx_mock.return_value = [0, 0] # run the alert without specifying any keys; an exception should be thrown # indicating that there was no URI and the result is UNKNOWN collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set properties that make no sense wihtout the main URI properties configuration = {'hdfs-site': {'dfs.http.policy': 'HTTP_ONLY'}} collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set an actual property key (http) configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTP_ONLY', 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set an actual property key (https) configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTP_ONLY', 'dfs.datanode.https.address': 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set both (http and https) configuration = { 'hdfs-site': { 'dfs.http.policy': 'HTTP_ONLY', 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address': 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state'])
def test_alert_uri_structure(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() ma_load_jmx_mock.return_value = [0,0] # run the alert without specifying any keys; an exception should be thrown # indicating that there was no URI and the result is UNKNOWN collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set properties that make no sense wihtout the main URI properties configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set an actual property key (http) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set an actual property key (https) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set both (http and https) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source']) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state'])