Example #1
0
    def test_disabled_definitions(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)
        ash.start()

        self.assertEquals(1, ash.get_job_count())

        json = {
            "name": "namenode_process",
            "service": "HDFS",
            "component": "NAMENODE",
            "label": "NameNode process",
            "interval": 6,
            "scope": "host",
            "enabled": True,
            "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
            "source": {
                "type": "PORT",
                "uri": "{{hdfs-site/my-key}}",
                "default_port": 50070,
                "reporting": {
                    "ok": {
                        "text":
                        "(Unit Tests) TCP OK - {0:.4f} response time on port {1}"
                    },
                    "critical": {
                        "text": "(Unit Tests) Could not load process info: {0}"
                    }
                }
            }
        }

        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        self.assertEquals(2, ash.get_job_count())

        json['enabled'] = False
        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        # verify disabled alert not scheduled
        self.assertEquals(2, ash.get_job_count())

        json['enabled'] = True
        pa = PortAlert(json, json['source'])
        ash.schedule_definition(pa)

        # verify enabled alert was scheduled
        self.assertEquals(3, ash.get_job_count())
Example #2
0
    def test_reschedule(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path)
        ash.start()

        self.assertEquals(1, ash.get_job_count())
        ash.reschedule()
        self.assertEquals(1, ash.get_job_count())
Example #3
0
    def test_immediate_alert(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        cluster_configuration = self.__get_cluster_configuration()
        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path,
                                    cluster_configuration, None)

        ash.start()

        self.assertEquals(1, ash.get_job_count())
        self.assertEquals(0, len(ash._collector.alerts()))

        execution_commands = [{
            "clusterName":
            "c1",
            "hostName":
            "c6401.ambari.apache.org",
            "alertDefinition":
            self._get_port_alert_definition()
        }]

        # execute the alert immediately and verify that the collector has the result
        ash.execute_alert(execution_commands)
        self.assertEquals(1, len(ash._collector.alerts()))
Example #4
0
  def test_immediate_alert(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()
    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())
    self.assertEquals(0, len(ash._collector.alerts()))

    execution_commands = [ {
      "clusterName": "c1",
      "hostName": "c6401.ambari.apache.org",
      "alertDefinition": self._get_port_alert_definition()
    } ]

    # execute the alert immediately and verify that the collector has the result
    ash.execute_alert(execution_commands)
    self.assertEquals(1, len(ash._collector.alerts()))
Example #5
0
  def test_reschedule(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()

    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())
    ash.reschedule()
    self.assertEquals(1, ash.get_job_count())
Example #6
0
    def test_disabled_definitions(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        cluster_configuration = self.__get_cluster_configuration()

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path,
                                    cluster_configuration, None)

        ash.start()

        self.assertEquals(1, ash.get_job_count())

        definition_json = self._get_port_alert_definition()

        alert = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(alert)

        self.assertEquals(2, ash.get_job_count())

        definition_json['enabled'] = False
        alert = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(alert)

        # verify disabled alert not scheduled
        self.assertEquals(2, ash.get_job_count())

        definition_json['enabled'] = True
        pa = PortAlert(definition_json, definition_json['source'])
        ash.schedule_definition(pa)

        # verify enabled alert was scheduled
        self.assertEquals(3, ash.get_job_count())
Example #7
0
  def test_disabled_definitions(self):
    test_file_path = os.path.join('ambari_agent', 'dummy_files')
    test_stack_path = os.path.join('ambari_agent', 'dummy_files')
    test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
    test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

    cluster_configuration = self.__get_cluster_configuration()

    ash = AlertSchedulerHandler(test_file_path, test_stack_path,
      test_common_services_path, test_host_scripts_path, cluster_configuration,
      None)

    ash.start()

    self.assertEquals(1, ash.get_job_count())

    definition_json = self._get_port_alert_definition()

    alert = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(alert)

    self.assertEquals(2, ash.get_job_count())

    definition_json['enabled'] = False
    alert = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(alert)

    # verify disabled alert not scheduled
    self.assertEquals(2, ash.get_job_count())

    definition_json['enabled'] = True
    pa = PortAlert(definition_json, definition_json['source'])
    ash.schedule_definition(pa)

    # verify enabled alert was scheduled
    self.assertEquals(3, ash.get_job_count())
Example #8
0
    def test_immediate_alert(self):
        test_file_path = os.path.join('ambari_agent', 'dummy_files')
        test_stack_path = os.path.join('ambari_agent', 'dummy_files')
        test_common_services_path = os.path.join('ambari_agent', 'dummy_files')
        test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files')

        ash = AlertSchedulerHandler(test_file_path, test_stack_path,
                                    test_common_services_path,
                                    test_host_scripts_path, None)
        ash.start()

        self.assertEquals(1, ash.get_job_count())
        self.assertEquals(0, len(ash._collector.alerts()))

        execution_commands = [{
            "clusterName": "c1",
            "hostName": "c6401.ambari.apache.org",
            "alertDefinition": {
                "name": "namenode_process",
                "service": "HDFS",
                "component": "NAMENODE",
                "label": "NameNode process",
                "interval": 6,
                "scope": "host",
                "enabled": True,
                "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1",
                "source": {
                    "type": "PORT",
                    "uri": "{{hdfs-site/my-key}}",
                    "default_port": 50070,
                    "reporting": {
                        "ok": {
                            "text":
                            "(Unit Tests) TCP OK - {0:.4f} response time on port {1}"
                        },
                        "critical": {
                            "text":
                            "(Unit Tests) Could not load process info: {0}"
                        }
                    }
                }
            }
        }]

        # execute the alert immediately and verify that the collector has the result
        ash.execute_alert(execution_commands)
        self.assertEquals(1, len(ash._collector.alerts()))