def test_immediate_alert(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, cluster_configuration, None) ash.start() self.assertEquals(1, ash.get_job_count()) self.assertEquals(0, len(ash._collector.alerts())) execution_commands = [{ "clusterName": "c1", "hostName": "c6401.ambari.apache.org", "alertDefinition": self._get_port_alert_definition() }] # execute the alert immediately and verify that the collector has the result ash.execute_alert(execution_commands) self.assertEquals(1, len(ash._collector.alerts()))
def test_execute_alert_from_extension(self): execution_commands = [{ 'clusterName': 'cluster', 'hostName': 'host', 'publicHostName': 'host', 'alertDefinition': { 'name': 'alert1' } }] scheduler = AlertSchedulerHandler('wrong_path', 'wrong_path', 'wrong_path', TEST_PATH, 'wrong_path', None, self.config, None) alert_mock = MagicMock() alert_mock.collect = Mock() alert_mock.set_helpers = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock( return_value=alert_mock) scheduler._AlertSchedulerHandler__config_maps = {'cluster': {}} scheduler.execute_alert(execution_commands) scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with( 'cluster', 'host', 'host', {'name': 'alert1'}) self.assertTrue(alert_mock.collect.called)
def test_immediate_alert(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, cluster_configuration, None) ash.start() self.assertEquals(1, ash.get_job_count()) self.assertEquals(0, len(ash._collector.alerts())) execution_commands = [ { "clusterName": "c1", "hostName": "c6401.ambari.apache.org", "alertDefinition": self._get_port_alert_definition() } ] # execute the alert immediately and verify that the collector has the result ash.execute_alert(execution_commands) self.assertEquals(1, len(ash._collector.alerts()))
def test_execute_alert_emptyCommands(self): execution_commands = [] scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, 5, None, None, None) alert_mock = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock) scheduler.execute_alert(execution_commands) self.assertFalse(alert_mock.collect.called)
def test_execute_alert_emptyCommands(self): execution_commands = [] scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, None) alert_mock = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock( return_value=alert_mock) scheduler.execute_alert(execution_commands) self.assertFalse(alert_mock.collect.called)
def test_execute_alert_emptyCommands(self): execution_commands = [] initializer_module = InitializerModule() initializer_module.init() scheduler = AlertSchedulerHandler(initializer_module) #TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, self.config, None) alert_mock = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock) scheduler.execute_alert(execution_commands) self.assertFalse(alert_mock.collect.called)
def test_immediate_alert(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_host_scripts_path, None) ash.start() self.assertEquals(1, ash.get_job_count()) self.assertEquals(0, len(ash._collector.alerts())) execution_commands = [{ "clusterName": "c1", "hostName": "c6401.ambari.apache.org", "alertDefinition": { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "(Unit Tests) Could not load process info: {0}" } } } } }] # execute the alert immediately and verify that the collector has the result ash.execute_alert(execution_commands) self.assertEquals(1, len(ash._collector.alerts()))
def test_execute_alert(self): execution_commands = [ { 'clusterName': 'cluster', 'hostName': 'host', 'alertDefinition': { 'name': 'alert1' } } ] scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, 5, None, None, None) alert_mock = MagicMock() alert_mock.collect = Mock() alert_mock.set_helpers = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock) scheduler._AlertSchedulerHandler__config_maps = { 'cluster': {} } scheduler.execute_alert(execution_commands) scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with('cluster', 'host', {'name': 'alert1'}) self.assertTrue(alert_mock.collect.called)