Ejemplo n.º 1
0
    def test_start(self):
        execution_commands = [{
            'clusterName': 'cluster',
            'hostName': 'host',
            'alertDefinition': {
                'name': 'alert1'
            }
        }]

        scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH,
                                          TEST_PATH, None, None)
        alert_mock = MagicMock()
        alert_mock.interval = Mock(return_value=5)
        alert_mock.collect = Mock()
        alert_mock.set_helpers = Mock()
        scheduler.schedule_definition = MagicMock()
        scheduler._AlertSchedulerHandler__scheduler = MagicMock()
        scheduler._AlertSchedulerHandler__scheduler.running = False
        scheduler._AlertSchedulerHandler__scheduler.start = Mock()
        scheduler._AlertSchedulerHandler__json_to_callable = Mock(
            return_value=alert_mock)
        scheduler._AlertSchedulerHandler__config_maps = {'cluster': {}}

        scheduler.start()

        self.assertTrue(
            scheduler._AlertSchedulerHandler__scheduler.start.called)
        scheduler.schedule_definition.assert_called_with(alert_mock)
Ejemplo n.º 2
0
    def test_immediate_alert(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        cluster_configuration = self.__get_cluster_configuration()
        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path,
                                    cluster_configuration, None)

        ash.start()

        self.assertEquals(1, ash.get_job_count())
        self.assertEquals(0, len(ash._collector.alerts()))

        execution_commands = [{
            "clusterName":
            "c1",
            "hostName":
            "c6401.ambari.apache.org",
            "alertDefinition":
            self._get_port_alert_definition()
        }]

        # execute the alert immediately and verify that the collector has the result
        ash.execute_alert(execution_commands)
        self.assertEquals(1, len(ash._collector.alerts()))
  def test_start(self):
    execution_commands = [
      {
        'clusterName': 'cluster',
        'hostName': 'host',
        'alertDefinition': {
          'name': 'alert1'
        }
      }
    ]

    scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, 5, None, None, None)
    alert_mock = MagicMock()
    alert_mock.interval = Mock(return_value=5)
    alert_mock.collect = Mock()
    alert_mock.set_helpers = Mock()
    scheduler.schedule_definition = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler = MagicMock()
    scheduler._AlertSchedulerHandler__scheduler.running = False
    scheduler._AlertSchedulerHandler__scheduler.start = Mock()
    scheduler._AlertSchedulerHandler__json_to_callable = Mock(return_value=alert_mock)
    scheduler._AlertSchedulerHandler__config_maps = {
      'cluster': {}
    }

    scheduler.start()

    self.assertTrue(scheduler._AlertSchedulerHandler__scheduler.start.called)
    scheduler.schedule_definition.assert_called_with(alert_mock)
Ejemplo n.º 4
0
  def test_immediate_alert(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()
    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())
    self.assertEquals(0, len(ash._collector.alerts()))

    execution_commands = [ {
      "clusterName": "c1",
      "hostName": "c6401.ambari.apache.org",
      "alertDefinition": self._get_port_alert_definition()
    } ]

    # execute the alert immediately and verify that the collector has the result
    ash.execute_alert(execution_commands)
    self.assertEquals(1, len(ash._collector.alerts()))
Ejemplo n.º 5
0
    def test_start(self, aps_add_interval_job_mock, aps_start_mock):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path)
        ash.start()

        self.assertTrue(aps_add_interval_job_mock.called)
        self.assertTrue(aps_start_mock.called)
Ejemplo n.º 6
0
    def test_disabled_definitions(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)
        ash.start()

        self.assertEquals(1, ash.get_job_count())

        json = {
            "name": "namenode_process",
            "service": "HDFS",
            "component": "NAMENODE",
            "label": "NameNode process",
            "interval": 6,
            "scope": "host",
            "enabled": True,
            "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
            "source": {
                "type": "PORT",
                "uri": "{{hdfs-site/my-key}}",
                "default_port": 50070,
                "reporting": {
                    "ok": {
                        "text":
                        "(Unit Tests) TCP OK - {0:.4f} response time on port {1}"
                    },
                    "critical": {
                        "text": "(Unit Tests) Could not load process info: {0}"
                    }
                }
            }
        }

        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        self.assertEquals(2, ash.get_job_count())

        json['enabled'] = False
        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        # verify disabled alert not scheduled
        self.assertEquals(2, ash.get_job_count())

        json['enabled'] = True
        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        # verify enabled alert was scheduled
        self.assertEquals(3, ash.get_job_count())
Ejemplo n.º 7
0
    def test_reschedule(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path)
        ash.start()

        self.assertEquals(1, ash.get_job_count())
        ash.reschedule()
        self.assertEquals(1, ash.get_job_count())
Ejemplo n.º 8
0
  def test_start(self, aps_add_interval_job_mock, aps_start_mock):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()

    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertTrue(aps_add_interval_job_mock.called)
    self.assertTrue(aps_start_mock.called)
Ejemplo n.º 9
0
    def test_immediate_alert(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)
        ash.start()

        self.assertEquals(1, ash.get_job_count())
        self.assertEquals(0, len(ash._collector.alerts()))

        execution_commands = [{
            "clusterName": "c1",
            "hostName": "c6401.ambari.apache.org",
            "alertDefinition": {
                "name": "namenode_process",
                "service": "HDFS",
                "component": "NAMENODE",
                "label": "NameNode process",
                "interval": 6,
                "scope": "host",
                "enabled": True,
                "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
                "source": {
                    "type": "PORT",
                    "uri": "{{hdfs-site/my-key}}",
                    "default_port": 50070,
                    "reporting": {
                        "ok": {
                            "text":
                            "(Unit Tests) TCP OK - {0:.4f} response time on port {1}"
                        },
                        "critical": {
                            "text":
                            "(Unit Tests) Could not load process info: {0}"
                        }
                    }
                }
            }
        }]

        # execute the alert immediately and verify that the collector has the result
        ash.execute_alert(execution_commands)
        self.assertEquals(1, len(ash._collector.alerts()))
Ejemplo n.º 10
0
  def test_reschedule(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()

    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())
    ash.reschedule()
    self.assertEquals(1, ash.get_job_count())
Ejemplo n.º 11
0
    def test_update_configurations(self, json_mock):
        def open_side_effect(file, mode):
            if mode == 'w':
                file_mock = MagicMock()
                return file_mock
            else:
                return self.original_open(file, mode)

        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        commands = [{
            "clusterName": "c1",
            "configurations": {
                "hdfs-site": {
                    "dfs.namenode.http-address":
                    "c6401.ambari.apache.org:50071"
                }
            }
        }]
        with open(os.path.join(test_stack_path, "definitions.json"),
                  "r") as fp:
            all_commands = json.load(fp)
        all_commands[0]['configurations']['hdfs-site'].update(
            {"dfs.namenode.http-address": "c6401.ambari.apache.org:50071"})

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)
        ash.start()

        with patch("__builtin__.open") as open_mock:
            open_mock.side_effect = open_side_effect
            ash.update_configurations(commands)

        self.assertTrue(json_mock.called)
        self.assertTrue(json_mock.called_with(all_commands))
Ejemplo n.º 12
0
    def test_update_configurations_without_reschedule(self, json_mock,
                                                      reschedule_mock):
        def open_side_effect(file, mode):
            if mode == 'w':
                file_mock = MagicMock()
                return file_mock
            else:
                return self.original_open(file, mode)

        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        with open(os.path.join(test_stack_path, "definitions.json"),
                  "r") as fp:
            all_commands = json.load(fp)

        # create a copy of the configurations from definitions.json, then add
        # a brand new property - this should not cause a restart since there are
        # no alerts that use this new property
        commands = [{"clusterName": "c1"}]
        commands[0]['configurations'] = all_commands[0]['configurations']
        commands[0]['configurations'].update({"foo": "bar"})

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)

        ash.start()

        with patch("__builtin__.open") as open_mock:
            open_mock.side_effect = open_side_effect
            ash.update_configurations(commands)

        self.assertTrue(json_mock.called)
        self.assertTrue(json_mock.called_with(all_commands))
        self.assertFalse(reschedule_mock.called)
Ejemplo n.º 13
0
    def test_disabled_definitions(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        cluster_configuration = self.__get_cluster_configuration()

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path,
                                    cluster_configuration, None)

        ash.start()

        self.assertEquals(1, ash.get_job_count())

        definition_json = self._get_port_alert_definition()

        alert = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(alert)

        self.assertEquals(2, ash.get_job_count())

        definition_json['enabled'] = False
        alert = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(alert)

        # verify disabled alert not scheduled
        self.assertEquals(2, ash.get_job_count())

        definition_json['enabled'] = True
        pa = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(pa)

        # verify enabled alert was scheduled
        self.assertEquals(3, ash.get_job_count())
Ejemplo n.º 14
0
  def test_disabled_definitions(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()

    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())

    definition_json = self._get_port_alert_definition()

    alert = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(alert)

    self.assertEquals(2, ash.get_job_count())

    definition_json['enabled'] = False
    alert = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(alert)

    # verify disabled alert not scheduled
    self.assertEquals(2, ash.get_job_count())

    definition_json['enabled'] = True
    pa = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(pa)

    # verify enabled alert was scheduled
    self.assertEquals(3, ash.get_job_count())