示例#1
0
    def __collect_items_and_put(self, host_id):
        url = "%s/v2/resources/%s" % (self.__ceilometer_ep, host_id)
        response = self.__request(url)

        items = []
        for links in response["links"]:
            rel = links.get("rel")
            if rel not in self.__target_items:
                continue
            href = links.get("href")
            if href is None:
                continue
            rc = self.__get_resource(rel, href)
            if rc is None:
                continue

            timestamp = self.parse_time(rc["timestamp"])
            hapi_time = hapcommon.conv_to_hapi_time(timestamp)
            counter_name = rc["counter_name"]
            items.append({
                # Item ID must be unique so we generate it with the host ID
                # and the counter name.
                "itemId": host_id + "." + counter_name,
                "hostId": host_id,
                "brief": counter_name,
                "lastValueTime": hapi_time,
                "lastValue": str(rc["counter_volume"]),
                "itemGroupName": "",
                "unit": rc["counter_unit"],
            })
        return items
示例#2
0
    def __put_event(self, timestamp, tag, raw_msg):
        event_id = self.__generate_event_id()
        try:
            msg = json.loads(raw_msg)
        except:
            msg = {}

        brief = msg.get(self.__message_key, raw_msg)
        host = msg.get(self.__host_key, self.__default_host)

        hapi_event_type = self.__get_parameter(msg, self.__type_key,
                                               self.__default_type,
                                               haplib.EVENT_TYPES)
        hapi_status = self.__get_parameter(msg, self.__status_key,
                                           self.__default_status,
                                           haplib.TRIGGER_STATUS)
        hapi_severity = self.__get_parameter(msg, self.__severity_key,
                                             self.__default_severity,
                                             haplib.TRIGGER_SEVERITY)


        events = []
        events.append({
            "eventId": event_id,
            "time": hapcommon.conv_to_hapi_time(timestamp),
            "type": hapi_event_type,
            "status": hapi_status,
            "severity": hapi_severity,
            "hostId": host,
            "hostName": host,
            "brief": brief,
            "extendedInfo": ""
        })
        self.divide_and_put_data(self.put_events, events,
                           last_info_generator=lambda x: None)
示例#3
0
    def __collect_items_and_put(self, host_id):
        url = "%s/v2/resources/%s" % (self.__ceilometer_ep, host_id)
        response = self.__request(url)

        items = []
        for links in response["links"]:
            rel = links.get("rel")
            if rel not in self.__target_items:
                continue
            href = links.get("href")
            if href is None:
                continue
            rc = self.__get_resource(rel, href)
            if rc is None:
                continue

            timestamp = self.parse_time(rc["timestamp"])
            hapi_time = hapcommon.conv_to_hapi_time(timestamp)
            counter_name = rc["counter_name"]
            items.append({
                # Item ID must be unique so we generate it with the host ID
                # and the counter name.
                "itemId": host_id + "." + counter_name,
                "hostId": host_id,
                "brief": counter_name,
                "lastValueTime": hapi_time,
                "lastValue": str(rc["counter_volume"]),
                "itemGroupName": "",
                "unit": rc["counter_unit"],
            })
        return items
示例#4
0
    def __put_event(self, timestamp, tag, raw_msg):
        event_id = self.__generate_event_id()
        try:
            msg = json.loads(raw_msg)
        except:
            msg = {}

        brief = msg.get(self.__message_key, raw_msg)
        host = msg.get(self.__host_key, self.__default_host)

        hapi_event_type = self.__get_parameter(msg, self.__type_key,
                                               self.__default_type,
                                               haplib.EVENT_TYPES)
        hapi_status = self.__get_parameter(msg, self.__status_key,
                                           self.__default_status,
                                           haplib.TRIGGER_STATUS)
        hapi_severity = self.__get_parameter(msg, self.__severity_key,
                                             self.__default_severity,
                                             haplib.TRIGGER_SEVERITY)

        events = []
        events.append({
            "eventId": event_id,
            "time": hapcommon.conv_to_hapi_time(timestamp),
            "type": hapi_event_type,
            "status": hapi_status,
            "severity": hapi_severity,
            "hostId": host,
            "hostName": host,
            "brief": brief,
            "extendedInfo": ""
        })
        self.divide_and_put_data(self.put_events,
                                 events,
                                 last_info_generator=lambda x: None)
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        self.divide_and_put_data(self.put_triggers, triggers,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
示例#6
0
    def __collect_events_and_put(self, alarm_id, last_alarm_time, fetch_id):
        query_option = self.__get_history_query_option(last_alarm_time)
        url = self.__ceilometer_ep + \
              "/v2/alarms/%s/history%s" % (alarm_id, query_option)
        response = self.__request(url)

        # host_id, host_name and brief
        alarm_cache = self.__alarm_cache.get(alarm_id)
        if alarm_cache is not None:
            host_id = alarm_cache["host_id"]
            brief = alarm_cache["brief"]
        else:
            host_id = "N/A"
            brief = "N/A"
        host_name = self.__host_cache.get(host_id, "N/A")

        # make the events to put
        events = []
        for history in response:
            hapi_status = self.alarm_to_hapi_status(history["type"],
                                                    history.get("detail"))
            hapi_event_type = self.status_to_hapi_event_type(hapi_status)
            timestamp = self.parse_time(history["timestamp"])

            events.append({
                "eventId": history["event_id"],
                "time": hapcommon.conv_to_hapi_time(timestamp),
                "type": hapi_event_type,
                "triggerId": alarm_id,
                "status": hapi_status,
                "severity": "ERROR",
                "hostId": host_id,
                "hostName": host_name,
                "brief": brief,
                "extendedInfo": ""
            })
        sorted_events = sorted(events, key=lambda evt: evt["time"])

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(
            self.put_events,
            sorted_events,
            put_empty_contents,
            fetch_id=fetch_id,
            last_info_generator=self.__last_info_generator)
示例#7
0
    def __collect_events_and_put(self, alarm_id, last_alarm_time, fetch_id):
        query_option = self.__get_history_query_option(last_alarm_time)
        url = self.__ceilometer_ep + \
              "/v2/alarms/%s/history%s" % (alarm_id, query_option)
        response = self.__request(url)

        # host_id, host_name and brief
        alarm_cache = self.__alarm_cache.get(alarm_id)
        if alarm_cache is not None:
            host_id = alarm_cache["host_id"]
            brief = alarm_cache["brief"]
        else:
            host_id = "N/A"
            brief = "N/A"
        host_name = self.__host_cache.get(host_id, "N/A")

        # make the events to put
        events = []
        for history in response:
            hapi_status = self.alarm_to_hapi_status(
                        history["type"], history.get("detail"))
            hapi_event_type = self.status_to_hapi_event_type(hapi_status)
            timestamp = self.parse_time(history["timestamp"])

            events.append({
                "eventId": history["event_id"],
                "time": hapcommon.conv_to_hapi_time(timestamp),
                "type": hapi_event_type,
                "triggerId": alarm_id,
                "status": hapi_status,
                "severity": "ERROR",
                "hostId": host_id,
                "hostName": host_name,
                "brief": brief,
                "extendedInfo": ""
            })
        sorted_events = sorted(events, key=lambda evt: evt["time"])

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_events, sorted_events,
                                 put_empty_contents, fetch_id=fetch_id,
                                 last_info_generator=self.__last_info_generator)
示例#8
0
    def collect_history_and_put(self, fetch_id, host_id, item_id,
                                begin_time, end_time):
        meter_name = item_id.split(".", 1)[1]
        base_url = "%s/v2/meters/%s" % (self.__ceilometer_ep, meter_name)
        query1 = "?q.field=resource_id&q.field=timestamp&q.field=timestamp"
        query2 = "&q.op=eq&q.op=gt&q.op=lt"
        t_beg = self.hapi_time_to_url_enc_openstack_time(begin_time)
        t_end = self.hapi_time_to_url_enc_openstack_time(end_time)
        query3 = "&q.value=%s&q.value=%s&q.value=%s" % (host_id, t_beg, t_end)

        url = base_url + query1 + query2 + query3
        response = self.__request(url)

        samples = []
        for history in response:
            timestamp = self.parse_time(history["timestamp"])
            hapi_time = hapcommon.conv_to_hapi_time(timestamp)
            samples.append({
                "time": hapi_time,
                "value": str(history["counter_volume"]),
            })
        sorted_samples = sorted(samples, key=lambda s: s["time"])
        self.put_history(sorted_samples, item_id, fetch_id)
示例#9
0
    def collect_history_and_put(self, fetch_id, host_id, item_id, begin_time,
                                end_time):
        meter_name = item_id.split(".", 1)[1]
        base_url = "%s/v2/meters/%s" % (self.__ceilometer_ep, meter_name)
        query1 = "?q.field=resource_id&q.field=timestamp&q.field=timestamp"
        query2 = "&q.op=eq&q.op=gt&q.op=lt"
        t_beg = self.hapi_time_to_url_enc_openstack_time(begin_time)
        t_end = self.hapi_time_to_url_enc_openstack_time(end_time)
        query3 = "&q.value=%s&q.value=%s&q.value=%s" % (host_id, t_beg, t_end)

        url = base_url + query1 + query2 + query3
        response = self.__request(url)

        samples = []
        for history in response:
            timestamp = self.parse_time(history["timestamp"])
            hapi_time = hapcommon.conv_to_hapi_time(timestamp)
            samples.append({
                "time": hapi_time,
                "value": str(history["counter_volume"]),
            })
        sorted_samples = sorted(samples, key=lambda s: s["time"])
        self.divide_and_put_data(self.put_history, sorted_samples, True,
                                 item_id, fetch_id)
示例#10
0
 def test_conv_to_hapi_time(self):
     dt = datetime.datetime(2015, 6, 28, 9, 35, 11, 123456)
     self.assertEquals(hapcommon.conv_to_hapi_time(dt),
                       "20150628093511.123456")
示例#11
0
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_triggers, triggers,
                           put_empty_contents,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
示例#12
0
    def collect_events_and_put(self, fetch_id=None, last_info=None,
                               count=None, direction="ASC"):
        query = self.__socket.statehist.columns("log_output",
                                                "state",
                                                "time",
                                                "current_host_name",
                                                "current_host_alias",
                                                "service_description")

        if last_info is None:
            last_info = self.get_cached_event_last_info()

        if last_info:
            last_info = json.loads(last_info)

        if not last_info:
            pass
        elif direction == "ASC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time >= %s" % unix_timestamp)
        elif direction == "DESC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time <= %s" % unix_timestamp)

        result = query.call()
        logger.debug(query)

        try:
            latest_state_index = result.index(last_info)
            result = result[:latest_state_index]
        except ValueError:
            pass

        events = []
        for event in result:
            if not len(event["current_host_name"]):
                continue

            hapi_event_type = self.EVENT_TYPE_MAP.get(event["state"])
            if hapi_event_type is None:
                log.warning("Unknown status: " + event["state"])
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(event["state"])

            event_time = datetime.datetime.fromtimestamp(event["time"])
            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(uuid.uuid1()),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": event["service_description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": event["current_host_name"],
                "hostName": event["current_host_alias"],
                "brief": event["log_output"],
                "extendedInfo": ""
            })

        if len(result):
            # livestatus return a sorted list.
            # result[0] is latest statehist.
            self.__latest_statehist = json.dumps(result[0])

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_events, events, put_empty_contents,
                           fetch_id=fetch_id,
                           last_info_generator=self.return_latest_statehist)
示例#13
0
 def test_conv_to_hapi_time(self):
     dt = datetime.datetime(2015, 6, 28, 9, 35, 11, 123456)
     self.assertEquals(hapcommon.conv_to_hapi_time(dt),
                       "20150628093511.123456")
示例#14
0
 def test_conv_to_hapi_time_with_offset(self):
     dt = datetime.datetime(2015, 6, 28, 9, 35, 11, 123456)
     ofs = -datetime.timedelta(hours=1, minutes=35, seconds=1)
     self.assertEquals(hapcommon.conv_to_hapi_time(dt, ofs),
                       "20150628080010.123456")
示例#15
0
    def collect_events_and_put(self, fetch_id=None, last_info=None,
                               count=None, direction="ASC"):
        query = self.__socket.statehist.columns("log_output",
                                                "state",
                                                "time",
                                                "current_host_name",
                                                "current_host_alias",
                                                "service_description")

        if last_info is None:
            last_info = self.get_cached_event_last_info()

        if last_info:
            last_info = json.loads(last_info)

        if not last_info:
            pass
        elif direction == "ASC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time >= %s" % unix_timestamp)
        elif direction == "DESC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time <= %s" % unix_timestamp)

        result = query.call()
        logger.debug(query)

        try:
            latest_state_index = result.index(last_info)
            result = result[:latest_state_index]
        except ValueError:
            pass

        events = []
        for event in result:
            if not len(event["current_host_name"]):
                continue

            hapi_event_type = self.EVENT_TYPE_MAP.get(event["state"])
            if hapi_event_type is None:
                log.warning("Unknown status: " + event["state"])
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(event["state"])

            event_time = datetime.datetime.fromtimestamp(event["time"])
            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(uuid.uuid1()),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": event["service_description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": event["current_host_name"],
                "hostName": event["current_host_alias"],
                "brief": event["log_output"],
                "extendedInfo": ""
            })

        if len(result):
            # livestatus return a sorted list.
            # result[0] is latest statehist.
            self.__latest_statehist = json.dumps(result[0])
        self.divide_and_put_data(self.put_events, events, fetch_id=fetch_id,
                           last_info_generator=self.return_latest_statehist)
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):

        if host_ids is not None and not self.__validate_object_ids(host_ids):
            logger.error("Invalid: host_ids: %s" % host_ids)
            # TODO by 15.09 (*1): send error
            # There's no definition to send error in HAPI2.
            # We have to extend the specification to enable this.
            return

        t0 = "nagios_services"
        t1 = "nagios_servicestatus"
        t2 = "nagios_hosts"
        sql = "SELECT " \
              + "%s.service_object_id, " % t0 \
              + "%s.current_state, " % t1 \
              + "%s.status_update_time, " % t1 \
              + "%s.output, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.service_object_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t0, t2)

        if host_ids is not None:
            in_cond = "','".join(host_ids)
            sql += " WHERE %s.host_object_id in ('%s')" % (t2, in_cond)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                nag_time = self.__convert_to_nagios_time(
                    self.__trigger_last_info)
                sql += " WHERE status_update_time >= '%s'" % nag_time
                update_type = "UPDATED"

        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        triggers = []
        for row in result:
            (trigger_id, state, update_time, msg, host_id, host_name) = row

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(update_time,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": str(trigger_id),
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_triggers,
                                 triggers,
                                 put_empty_contents,
                                 update_type=update_type,
                                 last_info=self.__trigger_last_info,
                                 fetch_id=fetch_id)
示例#17
0
 def test_conv_to_hapi_time_with_offset(self):
     dt = datetime.datetime(2015, 6, 28, 9, 35, 11, 123456)
     ofs = -datetime.timedelta(hours=1, minutes=35, seconds=1)
     self.assertEquals(hapcommon.conv_to_hapi_time(dt, ofs),
                       "20150628080010.123456")
示例#18
0
    def collect_events_and_put(self, fetch_id=None, last_info=None,
                               count=None, direction="ASC"):
        t0 = "nagios_statehistory"
        t1 = "nagios_services"
        t2 = "nagios_hosts"

        sql = "SELECT " \
              + "%s.statehistory_id, " % t0 \
              + "%s.state, " % t0 \
              + "%s.state_time, " % t0 \
              + "%s.output, " % t0 \
              + "%s.service_object_id, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.statehistory_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t1, t2)

        # Event range to select
        if last_info is not None:
            raw_last_info = last_info
        else:
            raw_last_info = self.get_cached_event_last_info()

        if raw_last_info is not None \
            and raw_last_info != self.INITIAL_LAST_INFO:
            # The form of 'last_info' depends on a plugin. So the validation
            # of it cannot be completed in haplib.Receiver.__validate_arguments().
            # Since it is inserted into the SQL statement, we have to strictly
            # validate it here.
            last_cond = self.__extract_validated_event_last_info(raw_last_info)
            if last_cond is None:
                logger.error("Malformed last_info: '%s'",
                              str(raw_last_info))
                logger.error("Getting events was aborted.")
                # TODO by 15.09: notify error to the caller
                # See  also TODO (*1)
                return
            # TODO: Fix when direction is 'DESC'
            sql += " WHERE %s.statehistory_id>%s" % (t0, last_cond)

        # Direction
        if direction in ["ASC", "DESC"]:
            sql += " ORDER BY %s.statehistory_id %s" % (t0, direction)

        # The number of records
        if count is not None:
            sql += " LIMIT %d" % count

        logger.debug(sql)
        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        events = []
        for (event_id, state, event_time, msg, \
             trigger_id, host_id, host_name) in result:

            hapi_event_type = self.EVENT_TYPE_MAP.get(state)
            if hapi_event_type is None:
                log.warning("Unknown status: " + str(status))
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(event_id),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": trigger_id,
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })
        self.put_events(events, fetch_id=fetch_id)
示例#19
0
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):

        if host_ids is not None and not self.__validate_object_ids(host_ids):
            logger.error("Invalid: host_ids: %s" % host_ids)
            # TODO by 15.09 (*1): send error
            # There's no definition to send error in HAPI 2.0.
            # We have to extend the specification to enable this.
            return

        t0 = "nagios_services"
        t1 = "nagios_servicestatus"
        t2 = "nagios_hosts"
        sql = "SELECT " \
              + "%s.service_object_id, " % t0 \
              + "%s.current_state, " % t1 \
              + "%s.status_update_time, " % t1 \
              + "%s.output, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.service_object_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t0, t2)

        if host_ids is not None:
            in_cond = "','".join(host_ids)
            sql += " WHERE %s.host_object_id in ('%s')" % (t2, in_cond)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                nag_time = self.__convert_to_nagios_time(self.__trigger_last_info)
                sql += " WHERE status_update_time >= '%s'" % nag_time
                update_type = "UPDATED"

        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        triggers = []
        for row in result:
            (trigger_id, state, update_time, msg, host_id, host_name) = row

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(update_time,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": str(trigger_id),
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        self.put_triggers(triggers, update_type=update_type,
                          last_info=self.__trigger_last_info,
                          fetch_id=fetch_id)
    def collect_events_and_put(self,
                               fetch_id=None,
                               last_info=None,
                               count=None,
                               direction="ASC"):
        t0 = "nagios_statehistory"
        t1 = "nagios_services"
        t2 = "nagios_hosts"

        sql = "SELECT " \
              + "%s.statehistory_id, " % t0 \
              + "%s.state, " % t0 \
              + "%s.state_time, " % t0 \
              + "%s.output, " % t0 \
              + "%s.service_object_id, " % t1 \
              + "%s.host_object_id, " % t2 \
              + "%s.display_name " % t2 \
              + "FROM %s INNER JOIN %s " % (t0, t1) \
              + "ON %s.object_id=%s.service_object_id " % (t0, t1) \
              + "INNER JOIN %s " % t2 \
              + "ON %s.host_object_id=%s.host_object_id" % (t1, t2)

        # Event range to select
        if last_info is not None:
            raw_last_info = last_info
        else:
            raw_last_info = self.get_cached_event_last_info()
            if not raw_last_info:
                latest_id = self.__get_latest_statehistory_id()
                if latest_id:
                    self.set_event_last_info(latest_id)
                else:
                    self.set_event_last_info("0")

        if raw_last_info is not None \
            and raw_last_info != self.INITIAL_LAST_INFO:
            # The form of 'last_info' depends on a plugin. So the validation
            # of it cannot be completed in haplib.Receiver.__validate_arguments().
            # Since it is inserted into the SQL statement, we have to strictly
            # validate it here.
            last_cond = self.__extract_validated_event_last_info(raw_last_info)
            if last_cond is None:
                logger.error("Malformed last_info: '%s'", str(raw_last_info))
                logger.error("Getting events was aborted.")
                # TODO by 15.09: notify error to the caller
                # See  also TODO (*1)
                return
            # TODO: Fix when direction is 'DESC'
            sql += " WHERE %s.statehistory_id>%s" % (t0, last_cond)

        # Direction
        if direction in ["ASC", "DESC"]:
            sql += " ORDER BY %s.statehistory_id %s" % (t0, direction)

        # The number of records
        if count is not None:
            sql += " LIMIT %d" % count

        logger.debug(sql)
        self.__cursor.execute(sql)
        result = self.__cursor.fetchall()

        events = []
        for (event_id, state, event_time, msg, \
             trigger_id, host_id, host_name) in result:

            hapi_event_type = self.EVENT_TYPE_MAP.get(state)
            if hapi_event_type is None:
                logger.warning("Unknown state: " + str(state))
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
              self.__parse_status_and_severity(state)

            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(event_id),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": str(trigger_id),
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": str(host_id),
                "hostName": host_name,
                "brief": msg,
                "extendedInfo": ""
            })

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_events,
                                 events,
                                 put_empty_contents,
                                 fetch_id=fetch_id)