Ejemplo n.º 1
0
    def get_history(self, item_id, begin_time, end_time):
        begin_time = hapcommon.translate_hatohol_time_to_unix_time(begin_time)
        end_time = hapcommon.translate_hatohol_time_to_unix_time(end_time)
        params = {
            "output": "extend",
            "itemids": item_id,
            "history": self.get_item_value_type(item_id),
            "sortfield": "clock",
            "sortorder": "ASC",
            "time_from": begin_time,
            "time_till": end_time
        }
        res_dict = self.get_response_dict("history.get", params,
                                          self.auth_token)

        self.result = check_response(res_dict)
        if not self.result:
            return

        histories = list()

        def proc(history):
            time = \
                hapcommon.translate_unix_time_to_hatohol_time(history["clock"],
                                                              history["ns"])
            histories.append({"value": history["value"], "time": time})

        self.__iterate_in_try_block(res_dict["result"], proc)

        return histories
Ejemplo n.º 2
0
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")
        self.divide_and_put_data(self.put_triggers, triggers,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
Ejemplo n.º 3
0
    def get_history(self, item_id, begin_time, end_time):
        begin_time = hapcommon.translate_hatohol_time_to_unix_time(begin_time)
        end_time = hapcommon.translate_hatohol_time_to_unix_time(end_time)
        params = {"output": "extend", "itemids": item_id,
                  "history": self.get_item_value_type(item_id), "sortfield": "clock",
                  "sortorder": "ASC", "time_from": begin_time,
                  "time_till": end_time}
        res_dict = self.get_response_dict("history.get", params, self.auth_token)

        self.result = check_response(res_dict)
        if not self.result:
            return

        histories = list()
        def proc(history):
            time = \
                hapcommon.translate_unix_time_to_hatohol_time(history["clock"],
                                                              history["ns"])
            histories.append({"value": history["value"], "time": time})
        self.__iterate_in_try_block(res_dict["result"], proc)

        return histories
Ejemplo n.º 4
0
    def get_triggers(self, requestSince=None, host_ids=None):
        params = {"output": "extend", "selectHosts": ["name"], "active": True}
        last_change_since = int()
        if requestSince:
            last_change_since = \
                hapcommon.translate_hatohol_time_to_unix_time(requestSince)
            params["lastChangeSince"] = last_change_since
        if host_ids is not None:
            params["hostids"] = host_ids

        res_dict = self.get_response_dict("trigger.get", params,
                                          self.auth_token)
        expanded_descriptions = \
            self.get_trigger_expand_description(last_change_since, host_ids)

        self.result = check_response(res_dict)
        if not self.result:
            return

        triggers = list()

        def find_description(triggerid):
            for ex_descr in expanded_descriptions["result"]:
                if ex_descr["triggerid"] == triggerid:
                    return ex_descr["description"]
            logger.warning("Not found description: triggerid: %s" % triggerid)
            return ""

        def proc(trigger):
            description = find_description(trigger["triggerid"])
            lastchange = trigger["lastchange"]
            time = hapcommon.translate_unix_time_to_hatohol_time(lastchange)
            triggers.append({
                "triggerId": trigger["triggerid"],
                "status": TRIGGER_STATUS[trigger["value"]],
                "severity": TRIGGER_SEVERITY[trigger["priority"]],
                "lastChangeTime": time,
                "hostId": trigger["hosts"][0]["hostid"],
                "hostName": trigger["hosts"][0]["name"],
                "brief": trigger["description"],
                "extendedInfo": description
            })

        self.__iterate_in_try_block(res_dict["result"], proc)
        return triggers
Ejemplo n.º 5
0
    def get_triggers(self, requestSince=None, host_ids=None):
        params = {"output": "extend", "selectHosts": ["name"], "active": True}
        last_change_since = int()
        if requestSince:
            last_change_since = \
                hapcommon.translate_hatohol_time_to_unix_time(requestSince)
            params["lastChangeSince"] = last_change_since
        if host_ids is not None:
            params["hostids"] = host_ids

        res_dict = self.get_response_dict("trigger.get", params,
                                          self.auth_token)
        expanded_descriptions = \
            self.get_trigger_expand_description(last_change_since, host_ids)

        self.result = check_response(res_dict)
        if not self.result:
            return

        triggers = list()
        def find_description(triggerid):
            for ex_descr in expanded_descriptions["result"]:
                if ex_descr["triggerid"] == triggerid:
                    return ex_descr["description"]
            logger.warning("Not found description: triggerid: %s" % triggerid)
            return ""

        def proc(trigger):
            description = find_description(trigger["triggerid"])
            lastchange = trigger["lastchange"]
            time = hapcommon.translate_unix_time_to_hatohol_time(lastchange)
            triggers.append({"triggerId": trigger["triggerid"],
                             "status": TRIGGER_STATUS[trigger["value"]],
                             "severity": TRIGGER_SEVERITY[trigger["priority"]],
                             "lastChangeTime": time,
                             "hostId": trigger["hosts"][0]["hostid"],
                             "hostName": trigger["hosts"][0]["name"],
                             "brief": trigger["description"],
                             "extendedInfo": description})

        self.__iterate_in_try_block(res_dict["result"], proc)
        return triggers
Ejemplo n.º 6
0
    def collect_events_and_put(self, fetch_id=None, last_info=None,
                               count=None, direction="ASC"):
        query = self.__socket.statehist.columns("log_output",
                                                "state",
                                                "time",
                                                "current_host_name",
                                                "current_host_alias",
                                                "service_description")

        if last_info is None:
            last_info = self.get_cached_event_last_info()

        if last_info:
            last_info = json.loads(last_info)

        if not last_info:
            pass
        elif direction == "ASC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time >= %s" % unix_timestamp)
        elif direction == "DESC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time <= %s" % unix_timestamp)

        result = query.call()
        logger.debug(query)

        try:
            latest_state_index = result.index(last_info)
            result = result[:latest_state_index]
        except ValueError:
            pass

        events = []
        for event in result:
            if not len(event["current_host_name"]):
                continue

            hapi_event_type = self.EVENT_TYPE_MAP.get(event["state"])
            if hapi_event_type is None:
                log.warning("Unknown status: " + event["state"])
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(event["state"])

            event_time = datetime.datetime.fromtimestamp(event["time"])
            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(uuid.uuid1()),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": event["service_description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": event["current_host_name"],
                "hostName": event["current_host_alias"],
                "brief": event["log_output"],
                "extendedInfo": ""
            })

        if len(result):
            # livestatus return a sorted list.
            # result[0] is latest statehist.
            self.__latest_statehist = json.dumps(result[0])
        self.divide_and_put_data(self.put_events, events, fetch_id=fetch_id,
                           last_info_generator=self.return_latest_statehist)
Ejemplo n.º 7
0
 def test_translate_hatohol_time_to_unix_time(self):
     result = hapcommon.translate_hatohol_time_to_unix_time(
         "19700101000000.123456789")
     # This result is utc time
     self.assertAlmostEquals(result, 0.123456789, delta=0.000000001)
Ejemplo n.º 8
0
    def collect_events_and_put(self, fetch_id=None, last_info=None,
                               count=None, direction="ASC"):
        query = self.__socket.statehist.columns("log_output",
                                                "state",
                                                "time",
                                                "current_host_name",
                                                "current_host_alias",
                                                "service_description")

        if last_info is None:
            last_info = self.get_cached_event_last_info()

        if last_info:
            last_info = json.loads(last_info)

        if not last_info:
            pass
        elif direction == "ASC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time >= %s" % unix_timestamp)
        elif direction == "DESC":
            unix_timestamp = \
                hapcommon.translate_hatohol_time_to_unix_time(last_info["time"])
            query = query.filter("time <= %s" % unix_timestamp)

        result = query.call()
        logger.debug(query)

        try:
            latest_state_index = result.index(last_info)
            result = result[:latest_state_index]
        except ValueError:
            pass

        events = []
        for event in result:
            if not len(event["current_host_name"]):
                continue

            hapi_event_type = self.EVENT_TYPE_MAP.get(event["state"])
            if hapi_event_type is None:
                log.warning("Unknown status: " + event["state"])
                hapi_event_type = "UNKNOWN"

            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(event["state"])

            event_time = datetime.datetime.fromtimestamp(event["time"])
            hapi_time = hapcommon.conv_to_hapi_time(event_time,
                                                    self.__time_offset)
            events.append({
                "eventId": str(uuid.uuid1()),
                "time": hapi_time,
                "type": hapi_event_type,
                "triggerId": event["service_description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "hostId": event["current_host_name"],
                "hostName": event["current_host_alias"],
                "brief": event["log_output"],
                "extendedInfo": ""
            })

        if len(result):
            # livestatus return a sorted list.
            # result[0] is latest statehist.
            self.__latest_statehist = json.dumps(result[0])

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_events, events, put_empty_contents,
                           fetch_id=fetch_id,
                           last_info_generator=self.return_latest_statehist)
Ejemplo n.º 9
0
    def collect_triggers_and_put(self, fetch_id=None, host_ids=None):
        query = self.__socket.services.columns("plugin_output",
                                               "description",
                                               "last_state_change",
                                               "host_alias",
                                               "host_name",
                                               "state")

        if host_ids is not None:
            filter_condition = "host_name ~ "
            for host_id in enumerate(host_ids):
                if host_id[0] == 0:
                    filter_condition += host_id[1]
                else:
                    filter_condition += "|" + host_id[1]

            query = query.filter(filter_condition)

        all_triggers_should_send = lambda: fetch_id is None
        update_type = "ALL"
        if all_triggers_should_send():
            if self.__trigger_last_info is None:
                self.__trigger_last_info = self.get_last_info("trigger")

            if len(self.__trigger_last_info):
                unix_timestamp = hapcommon.translate_hatohol_time_to_unix_time(self.__trigger_last_info)
                query = query.filter("last_state_change >= %s" % unix_timestamp)
                update_type = "UPDATED"

        result = query.call()

        triggers = []
        for service in result:
            hapi_status, hapi_severity = \
                self.__parse_status_and_severity(service["state"])

            last_state_change = datetime.datetime.fromtimestamp(service["last_state_change"])
            hapi_time = hapcommon.conv_to_hapi_time(last_state_change,
                                                    self.__time_offset)
            triggers.append({
                "triggerId": service["description"],
                "status": hapi_status,
                "severity": hapi_severity,
                "lastChangeTime": hapi_time,
                "hostId": service["host_name"],
                "hostName": service["host_alias"],
                "brief": service["plugin_output"],
                "extendedInfo": ""
            })
        self.__trigger_last_info = \
            hapcommon.get_biggest_num_of_dict_array(triggers,
                                                    "lastChangeTime")

        put_empty_contents = True
        if fetch_id is None:
            put_empty_contents = False

        self.divide_and_put_data(self.put_triggers, triggers,
                           put_empty_contents,
                           update_type=update_type,
                           last_info=self.__trigger_last_info,
                           fetch_id=fetch_id)
Ejemplo n.º 10
0
 def test_translate_hatohol_time_to_unix_time(self):
     result = hapcommon.translate_hatohol_time_to_unix_time("19700101000000.123456789")
     # This result is utc time
     self.assertAlmostEquals(result, 0.123456789, delta=0.000000001)