Beispiel #1
0
 def test_get_biggest_num_of_dict_array(self):
     test_target_array = [{"test_value": 3},
                          {"test_value": 7},
                          {"test_value": 9}]
     result = hapcommon.get_biggest_num_of_dict_array(test_target_array,
                                                      "test_value")
     self.assertEquals(result, 9)
    def update_triggers(self, host_ids=None, fetch_id=None):
        if self.__trigger_last_info is None:
            self.__trigger_last_info = self.get_last_info("trigger")

        put_empty_contents = False
        if fetch_id is not None:
            update_type = "ALL"
            triggers = self.__api.get_triggers(host_ids=host_ids)
            put_empty_contents = True
        else:
            update_type = "UPDATED"
            triggers = self.__api.get_triggers(self.__trigger_last_info,
                                               host_ids)

        if not len(triggers):
            return

        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")

        self.divide_and_put_data(self.put_triggers,
                                 triggers,
                                 put_empty_contents,
                                 update_type=update_type,
                                 last_info=self.__trigger_last_info,
                                 fetch_id=fetch_id)
Beispiel #3
0
    def update_triggers(self, host_ids=None, fetch_id=None):
        if self.__trigger_last_info is None:
            self.__trigger_last_info = self.get_last_info("trigger")

        put_empty_contents=False
        if fetch_id is not None:
            update_type = "ALL"
            triggers = self.__api.get_triggers(host_ids=host_ids)
            put_empty_contents=True
        else:
            update_type = "UPDATED"
            triggers = self.__api.get_triggers(self.__trigger_last_info, host_ids)

        if not len(triggers):
            return

        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")

        self.divide_and_put_data(self.put_triggers, triggers,
                           put_empty_contents,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        self.divide_and_put_data(self.put_triggers, triggers,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
Beispiel #5
0
 def test_get_biggest_num_of_dict_array(self):
     test_target_array = [{
         "test_value": 3
     }, {
         "test_value": 7
     }, {
         "test_value": 9
     }]
     result = hapcommon.get_biggest_num_of_dict_array(
         test_target_array, "test_value")
     self.assertEquals(result, 9)
Beispiel #6
0
    def update_triggers(self, host_ids=None, fetch_id=None):
        if self.__trigger_last_info is None:
            self.__trigger_last_info = self.get_last_info("trigger")

        triggers = self.__api.get_triggers(self.__trigger_last_info, host_ids)
        if not len(triggers):
            return

        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        update_type = "ALL" if fetch_id is not None else "UPDATED"

        self.put_triggers(triggers, update_type=update_type,
                          last_info=self.__trigger_last_info,
                          fetch_id=fetch_id)
Beispiel #7
0
 def generate_event_last_info(self, events):
     return hapcommon.get_biggest_num_of_dict_array(events, "eventId")
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):

        if host_ids is not None and not self.__validate_object_ids(host_ids):
            logger.error("Invalid: host_ids: %s" % host_ids)
            # TODO by 15.09 (*1): send error
            # There's no definition to send error in HAPI 2.0.
            # We have to extend the specification to enable this.
            return

        t0 = "nagios_services"
        t1 = "nagios_servicestatus"
        t2 = "nagios_hosts"
        sql = "SELECT " \
              + "%s.service_object_id, " % t0 \
              + "%s.current_state, " % t1 \
              + "%s.status_update_time, " % t1 \
              + "%s.output, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.service_object_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t0, t2)

        if host_ids is not None:
            in_cond = "','".join(host_ids)
            sql += " WHERE %s.host_object_id in ('%s')" % (t2, in_cond)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                nag_time = self.__convert_to_nagios_time(self.__trigger_last_info)
                sql += " WHERE status_update_time >= '%s'" % nag_time
                update_type = "UPDATED"

        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        triggers = []
        for row in result:
            (trigger_id, state, update_time, msg, host_id, host_name) = row

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(update_time,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": str(trigger_id),
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        self.put_triggers(triggers, update_type=update_type,
                          last_info=self.__trigger_last_info,
                          fetch_id=fetch_id)
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):

        if host_ids is not None and not self.__validate_object_ids(host_ids):
            logger.error("Invalid: host_ids: %s" % host_ids)
            # TODO by 15.09 (*1): send error
            # There's no definition to send error in HAPI2.
            # We have to extend the specification to enable this.
            return

        t0 = "nagios_services"
        t1 = "nagios_servicestatus"
        t2 = "nagios_hosts"
        sql = "SELECT " \
              + "%s.service_object_id, " % t0 \
              + "%s.current_state, " % t1 \
              + "%s.status_update_time, " % t1 \
              + "%s.output, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.service_object_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t0, t2)

        if host_ids is not None:
            in_cond = "','".join(host_ids)
            sql += " WHERE %s.host_object_id in ('%s')" % (t2, in_cond)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                nag_time = self.__convert_to_nagios_time(
                    self.__trigger_last_info)
                sql += " WHERE status_update_time >= '%s'" % nag_time
                update_type = "UPDATED"

        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        triggers = []
        for row in result:
            (trigger_id, state, update_time, msg, host_id, host_name) = row

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(update_time,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": str(trigger_id),
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_triggers,
                                 triggers,
                                 put_empty_contents,
                                 update_type=update_type,
                                 last_info=self.__trigger_last_info,
                                 fetch_id=fetch_id)
Beispiel #10
0
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_triggers, triggers,
                           put_empty_contents,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
Beispiel #11
0
 def generate_event_last_info(self, events):
     return hapcommon.get_biggest_num_of_dict_array(events, "eventId")