Beispiel #1
0
 def to_hint(self, srv_reply):
     _hints = []
     if srv_reply is not None:
         # print srv_reply.pretty_print()
         info_dict = {
             "run_ids": [],
             "run_names": [],
         }
         hosts = ovirt_mod.Host.deserialize(srv_reply)
         for host in hosts:
             _host_id = host.get("id")
             _host_name = host.findtext("name")
             info_dict["run_ids"].append(_host_id)
             info_dict["run_names"].append(_host_name)
             _hints.append(
                 monitoring_hint(
                     key="host_{}".format(_host_id),
                     v_type="s",
                     info="ovirt Host {}".format(_host_name),
                     value_string=_host_name,
                     persistent=True,
                     is_active=False,
                 ))
         _hints.append(
             monitoring_hint(
                 key="overview",
                 v_type="B",
                 info="Host overview",
                 persistent=True,
                 value_blob=process_tools.compress_struct(info_dict),
                 is_active=True,
             ))
     return _hints
Beispiel #2
0
 def to_hint(self, srv_reply):
     _hints = []
     VALID_STATES = {"up", "down"}
     if srv_reply is not None:
         # print srv_reply.pretty_print()
         info_dict = {
             "run_ids": [],
             "run_names": [],
         }
         doms = ovirt_mod.StorageDomain.deserialize(srv_reply)
         for sd in doms:
             _dom_id = sd.get("id")
             _dom_name = sd.findtext("name")
             info_dict["run_ids"].append(_dom_id)
             info_dict["run_names"].append(_dom_name)
             _hints.append(
                 monitoring_hint(
                     key="storage_domain_{}".format(_dom_id),
                     v_type="s",
                     info="ovirt StorageDomain {}".format(_dom_name),
                     value_string=_dom_name,
                     persistent=True,
                     is_active=False,
                 ))
         _hints.append(
             monitoring_hint(
                 key="overview",
                 v_type="B",
                 info="StorageDomain overview",
                 persistent=True,
                 value_blob=process_tools.compress_struct(info_dict),
                 is_active=True,
             ))
     return _hints
Beispiel #3
0
 def entry(self, result, ref_dict):
     # import pprint
     # pprint.pprint(ref_dict)
     m_list, gd = result
     for _key in [key for key, _xml in m_list]:
         # update dict with attribute dicts from the top-level node
         gd.update({
             _sk: _sv
             for _sk, _sv in ref_dict[key][0].iteritems()
             if _sk in ["info", "ti"]
         })
     # set default values
     for _key, _default in [
         ("key", "KEY???"),
         ("ti", "TI???"),
     ]:
         if _key not in gd:
             gd[_key] = _default
     # overrwrite empty ti
     if not gd["ti"]:
         gd["ti"] = gd["key"]
     # expand according to dict
     compound_key = self.__key.format(**gd)
     _info = "{} ({:d})".format(self.__info.format(**gd), len(m_list))
     # build info
     _build_info = [{
         "key": _s_key,
         "color": _xml.attrib["color"],
         "draw_type": _xml.get("draw_type", "LINE1"),
         "invert": _xml.get("invert", "0"),
     } for _s_key, _xml, in m_list]
     _node = [{
         # should not be needed for display
         # "type": "compound",
         "fn":
         "",
         "ti":
         "",
         "key":
         compound_key,
         "is_active":
         True,
         "is_compound":
         True,
         "mvvs": [{
             "unit": "",
             "info": _info,
             "key": "",
             "build_info": process_tools.compress_struct(_build_info),
             # "color": _xml.attrib["color"],
             # "draw_type": _xml.get("draw_type", "LINE1"),
             # "invert": _xml.get("invert", "0"),
         }  #
                  ],
     }]
     return _node
Beispiel #4
0
 def __call__(self, srv_com, cur_ns):
     srv_com["psutil"] = "yes"
     srv_com["num_cores"] = psutil.cpu_count(logical=True)
     srv_com["process_tree"] = process_tools.compress_struct(
         process_tools.get_proc_list(attrs=[
             "pid",
             "ppid",
             "uids",
             "gids",
             "name",
             "exe",
             "cmdline",
             "status",
             "ppid",
             "cpu_affinity",
         ]))
Beispiel #5
0
 def __call__(self, srv_com, cur_ns):
     # s_time = time.time()
     if cur_ns.arguments:
         name_list = cur_ns.arguments
         if "cron" in name_list:
             name_list.append("crond")
     else:
         name_list = []
     _p_dict = {}
     for key, value in process_tools.get_proc_list(
             proc_name_list=name_list).iteritems():
         try:
             if value.is_running():
                 _p_dict[key] = value.as_dict(attrs=[
                     "pid",
                     "ppid",
                     "uids",
                     "gids",
                     "name",
                     "exe",
                     "cmdline",
                     "status",
                     "ppid",
                     "cpu_affinity",
                 ])
         except psutil.NoSuchProcess:
             pass
     if cur_ns.arguments:
         # try to be smart about cron / crond
         t_dict = {
             key: value
             for key, value in _p_dict.iteritems()
             if value["name"] in cur_ns.arguments
         }
         if not t_dict and cur_ns.arguments[0] == "cron":
             t_dict = {
                 key: value
                 for key, value in _p_dict.iteritems()
                 if value["name"] in ["crond"]
             }
         _p_dict = t_dict
     srv_com["process_tree"] = process_tools.compress_struct(_p_dict)
     srv_com["process_tree"].attrib["format"] = "2"
Beispiel #6
0
 def get_syslog(srv_com):
     # print srv_com.pretty_print()
     for _dev in srv_com.xpath(".//ns:devices/ns:device"):
         _dev.attrib.update({
             "state":
             "{:d}".format(logging_tools.LOG_LEVEL_ERROR),
             "result":
             "Device not found",
         })
         _pk = int(_dev.attrib["pk"])
         if Machine.has_device(_pk):
             dev = Machine.get_device(_pk)
             _to_read = int(_dev.attrib.get("lines", "0"))
             _minutes_to_cover = int(_dev.attrib.get("minutes", "0"))
             lines = dev.filewatcher.get_logs(to_read=_to_read,
                                              minutes=_minutes_to_cover)
             dev.log("lines found: {:d} (of {:d} / {:d})".format(
                 len(lines),
                 _to_read,
                 _minutes_to_cover,
             ))
             _dev.attrib.update({
                 "read": "{:d}".format(len(lines)),
                 # for line version
                 "version": "1",
             })
             rates = dev.filewatcher.get_rates()
             if rates:
                 _dev.append(
                     srv_com.builder(
                         "rates", *[
                             srv_com.builder(
                                 "rate",
                                 timeframe="{:d}".format(_seconds),
                                 rate="{:.4f}".format(_rate))
                             for _seconds, _rate in rates.iteritems()
                         ]))
             _dev.append(
                 srv_com.builder(
                     "lines",
                     process_tools.compress_struct(
                         [_line.get_xml_format() for _line in lines])))
Beispiel #7
0
 def to_hint(self, srv_reply):
     _hints = []
     VALID_STATES = {"up", "down"}
     if srv_reply is not None:
         # print srv_reply.pretty_print()
         info_dict = {key: 0 for key in VALID_STATES}
         info_dict["run_ids"] = []
         info_dict["run_names"] = []
         if "vms" in srv_reply:
             for vm in srv_reply.xpath(".//ns:vms")[0]:
                 _xml = etree.fromstring(
                     process_tools.decompress_struct(vm.text))
                 _state = _xml.xpath(".//status/state/text()")[0]
                 if _state in VALID_STATES:
                     info_dict[_state] += 1
                 if _state == "up":
                     _dom_id = _xml.get("id")
                     _dom_name = _xml.findtext("name")
                     info_dict["run_ids"].append(_dom_id)
                     info_dict["run_names"].append(_dom_name)
                     _hints.append(
                         monitoring_hint(
                             key="domain_{}".format(_dom_id),
                             v_type="s",
                             info="ovirt Domain {}".format(_dom_name),
                             value_string=_dom_name,
                             persistent=True,
                             is_active=False,
                         ))
         _hints.append(
             monitoring_hint(
                 key="overview",
                 v_type="B",
                 info="Domain overview",
                 persistent=True,
                 value_blob=process_tools.compress_struct(info_dict),
                 is_active=True,
             ))
     return _hints
Beispiel #8
0
 def run(self, srv_com, args):
     _dev = args.device
     # get checks
     check_pks = sorted(
         [int(_val) for _val in args.checks.strip().split(",")])
     checks = SyslogCheck.objects.filter(Q(pk__in=check_pks))
     found_pks = sorted([_check.pk for _check in checks])
     res = CheckResult()
     if check_pks != found_pks:
         res.warn("Some checks are missing: {}".format(", ".join([
             "{:d}".format(_mis) for _mis in set(check_pks) - set(found_pks)
         ])))
     if not check_pks:
         res.warn("No checks defined")
     else:
         max_minutes = max(
             [_check.minutes_to_consider for _check in checks])
         _log = _dev.filewatcher.get_logs(minutes=max_minutes)
         mon_info = E.monitor_info(
             uuid=_dev.device.uuid,
             name=_dev.device.name,
             time="{:d}".format(int(time.time())),
         )
         if not _log:
             res.error(
                 "no logs found (max_minutes={:d})".format(max_minutes))
             _res_list = [("syslog check {}".format(_check.name),
                           limits.mon_STATE_CRITICAL, "no logs found")
                          for _check in checks]
         else:
             res.ok(
                 "lines to scan: {:d}, checks: {:d}, minutes: {:d}".format(
                     len(_log), len(checks), max_minutes))
             _res_list = []
             _now = datetime.datetime.now()
             for _check in checks:
                 expressions = [
                     SyslogCheckExpression(_obj)
                     for _obj in json.loads(_check.expressions)
                 ]
                 if expressions:
                     if _check.minutes_to_consider == max_minutes:
                         _check_lines = _log
                     else:
                         _td = datetime.timedelta(
                             seconds=_check.minutes_to_consider * 60)
                         _check_lines = [
                             _line for _line in _log
                             if _now - _line.pd < _td
                         ]
                     _matches = []
                     for _expr in expressions:
                         _expr.feed(_check_lines)
                         if _expr.found:
                             _matches.append(_expr.match_str)
                     _res_list.append((
                         "slc {}".format(_check.name),
                         max(_expr.ret_state for _expr in expressions),
                         "{} / {} [{}], {}".format(
                             logging_tools.get_plural(
                                 "expression", len(expressions)),
                             logging_tools.get_plural(
                                 "line", len(_check_lines)),
                             logging_tools.get_plural(
                                 "minute", _check.minutes_to_consider),
                             ", ".join(_matches)
                             if _matches else "no expressions matched"),
                     ))
                 else:
                     _res_list.append((
                         "slc {}".format(_check.name),
                         limits.mon_STATE_WARNING,
                         "no expressions defined",
                     ))
         _result_chunk = {
             "source": "logcheck-server check",
             "prefix": args.key,
             "list": _res_list
         }
         self.send_to_remote_server(
             icswServiceEnum.monitor_server,
             server_command.srv_command(
                 command="passive_check_results_as_chunk",
                 ascii_chunk=process_tools.compress_struct(_result_chunk),
             ))
     # print srv_com, _dev, args
     res.set_result(srv_com)
Beispiel #9
0
 def interpret(self, srv_com, ns, *args, **kwargs):
     hosts = Host.deserialize(srv_com)
     # print etree.tostring(hosts, pretty_print=True)
     # print etree.tostring(sds)
     ret = ExtReturn()
     ret.feed_str(
         logging_tools.get_plural("Host", len(hosts.findall(".//host"))))
     ret.feed_str(
         logging_tools.reduce_list(hosts.xpath(".//host/name/text()")))
     ret.feed_str_state(
         *SimpleCounter(hosts.xpath(".//host/status/state/text()"),
                        ok=["up"],
                        prefix="State").result)
     ret.feed_str_state(
         *SimpleCounter(hosts.xpath(".//host/external_status/state/text()"),
                        ok=["ok"],
                        prefix="ExtStatus").result)
     ret.feed_str_state(*SimpleCounter(hosts.xpath(".//host/type/text()"),
                                       ok=["rhel"],
                                       prefix="Type").result)
     count_dict = {
         _key: sum([
             int(_val) for _val in hosts.xpath(
                 ".//host/summary/{}/text()".format(_key))
         ])
         for _key in [
             "active",
             "migrating",
             "total",
         ]
     }
     if ns.reference not in ["", "-"]:
         _ref = process_tools.decompress_struct(ns.reference)
         _passive_dict = {
             "source": "ovirt_overview",
             "prefix": ns.passive_check_prefix,
             "list": [],
         }
         for run_id, run_name in zip(_ref["run_ids"], _ref["run_names"]):
             _prefix = "ovirt Host {}".format(run_name)
             _host = hosts.xpath(".//host[@id='{}']".format(run_id))
             if len(_host):
                 _host = _host[0]
                 _state = _host.findtext(".//status/state")
                 _htype = _host.findtext("type")
                 if _state in ["up"]:
                     _nag_state = limits.mon_STATE_OK
                 else:
                     _nag_state = limits.mon_STATE_CRITICAL
                 _ret_f = [
                     "state is {}".format(_state),
                     "type is {}".format(_htype),
                 ]
                 if _host.find("summary") is not None:
                     _ret_f.extend([
                         "{}={:d}".format(
                             _key,
                             int(_host.findtext("summary/{}".format(_key))))
                         for _key in ["active", "migrating", "total"]
                     ])
                 if _host.find("memory") is not None:
                     _ret_f.append("mem {}".format(
                         logging_tools.get_size_str(
                             int(_host.findtext("memory")))))
                 _passive_dict["list"].append((
                     _prefix,
                     _nag_state,
                     ", ".join(_ret_f),
                 ))
             else:
                 _passive_dict["list"].append((
                     _prefix,
                     limits.mon_STATE_CRITICAL,
                     "Host {} not found".format(run_name),
                 ))
         # print _passive_dict
         ret.ascii_chunk = process_tools.compress_struct(_passive_dict)
     ret.feed_str(", ".join([
         "{}: {}".format(_key, count_dict[_key])
         for _key in sorted(count_dict.keys())
     ]))
     return ret
Beispiel #10
0
 def interpret(self, srv_com, ns, *args, **kwargs):
     sds = StorageDomain.deserialize(srv_com)
     # print etree.tostring(sds)
     ret = ExtReturn()
     ret.feed_str(
         logging_tools.get_plural("Storagedomain",
                                  len(sds.findall(".//storage_domain"))))
     ret.feed_str_state(
         *SimpleCounter(sds.xpath(".//external_status/state/text()"),
                        ok=["ok"],
                        prefix="State").result)
     ret.feed_str_state(
         *SimpleCounter(sds.xpath(".//storage_domain/type/text()"),
                        ok=["data", "export", "image", "iso"],
                        prefix="Domain Type").result)
     ret.feed_str_state(
         *SimpleCounter(sds.xpath(".//storage_domain/storage/type/text()"),
                        ok=["glance", "iscsi", "nfs", "fcp"],
                        prefix="Storage Type").result)
     size_dict = {
         _key: sum([
             int(_val) for _val in sds.xpath(
                 ".//storage_domain/{}/text()".format(_key))
         ])
         for _key in [
             "used",
             "available",
             "committed",
         ]
     }
     size_dict["size"] = size_dict["used"] + size_dict["available"]
     if ns.reference not in ["", "-"]:
         _ref = process_tools.decompress_struct(ns.reference)
         _passive_dict = {
             "source": "ovirt_overview",
             "prefix": ns.passive_check_prefix,
             "list": [],
         }
         for run_id, run_name in zip(_ref["run_ids"], _ref["run_names"]):
             _prefix = "ovirt StorageDomain {}".format(run_name)
             _sd = sds.xpath(".//storage_domain[@id='{}']".format(run_id))
             if len(_sd):
                 _sd = _sd[0]
                 _state = _sd.findtext(".//external_status/state")
                 if _state in ["ok"]:
                     _nag_state = limits.mon_STATE_OK
                 else:
                     _nag_state = limits.mon_STATE_CRITICAL
                 _stype = _sd.findtext("type")
                 _ret_f = [
                     "state is {}".format(_state),
                     "type is {}".format(_stype),
                     "storage type is {}".format(
                         _sd.findtext("storage/type")),
                 ]
                 if _stype in ["data", "iso", "export"]:
                     try:
                         _avail = int(_sd.findtext("available"))
                         _used = int(_sd.findtext("used"))
                         _committed = int(_sd.findtext("committed"))
                         _pused = 100. * _used / max(1, _avail + _used)
                         _size_str = "size is {} (used {} [{:.2f}%], avail {}), commited {}".format(
                             logging_tools.get_size_str(_avail + _used),
                             logging_tools.get_size_str(_used),
                             _pused,
                             logging_tools.get_size_str(_avail),
                             logging_tools.get_size_str(_committed),
                         )
                         if _pused > 95:
                             _nag_state = max(_nag_state,
                                              limits.mon_STATE_CRITICAL)
                         elif _pused > 90:
                             _nag_state = max(_nag_state,
                                              limits.mon_STATE_WARNING)
                     except:
                         _ret_f.append("cannot evaluate size")
                         _nag_state = max(_nag_state,
                                          limits.mon_STATE_WARNING)
                     else:
                         _ret_f.append(_size_str)
                 _passive_dict["list"].append((
                     _prefix,
                     _nag_state,
                     ", ".join(_ret_f),
                 ))
             else:
                 _passive_dict["list"].append((
                     _prefix,
                     limits.mon_STATE_CRITICAL,
                     "StorageDomain not found",
                 ))
         ret.ascii_chunk = process_tools.compress_struct(_passive_dict)
     ret.feed_str(", ".join([
         "{}: {}".format(_key, logging_tools.get_size_str(size_dict[_key]))
         for _key in sorted(size_dict.keys())
     ]))
     return ret
Beispiel #11
0
    def interpret(self, srv_com, ns, *args, **kwargs):
        if ns.reference not in ["", "-"]:
            _ref = process_tools.decompress_struct(ns.reference)
            _passive_dict = {
                "source": "ovirt_overview",
                "prefix": ns.passive_check_prefix,
                "list": [],
            }
        else:
            _ref = None
            _passive_dict = {}
        _vms = VM.deserialize(srv_com)
        _num_vms = len(_vms)
        _states = _vms.xpath(".//vm/status/state/text()", smart_strings=False)
        _state_dict = {
            _state: _states.count(_state)
            for _state in set(_states)
        }
        if _ref:
            for run_name in _ref["run_names"]:
                _vm = _vms.xpath(".//vm[name[text()='{}']]".format(run_name))
                _prefix = "ovirt Domain {}".format(run_name)
                if len(_vm):
                    _vm = _vm[0]
                    _memory = int(_vm.findtext("memory"))
                    _sockets = int(_vm.find("cpu/topology").get("sockets"))
                    _cores = int(_vm.find("cpu/topology").get("cores"))
                    _state = _vm.findtext("status/state")
                    _ret_f = [
                        "state is {}".format(_state), "memory {}".format(
                            logging_tools.get_size_str(_memory,
                                                       long_format=True)),
                        "CPU info: {}, {}".format(
                            logging_tools.get_plural("socket", _sockets),
                            logging_tools.get_plural("core", _cores),
                        )
                    ]
                    if _state in ["up"]:
                        _nag_state = limits.mon_STATE_OK
                    else:
                        _nag_state = limits.mon_STATE_CRITICAL
                    _passive_dict["list"].append((
                        _prefix,
                        _nag_state,
                        ", ".join(_ret_f),
                    ))
                else:
                    _passive_dict["list"].append((
                        _prefix,
                        limits.mon_STATE_CRITICAL,
                        "domain not found",
                    ))
        _error_list = []
        ret_state = limits.mon_STATE_OK
        if _ref:
            ret_state = limits.mon_STATE_OK
            for _state in ["up", "down"]:
                _current = _state_dict.get(_state, 0)
                if _current != _ref[_state]:
                    _error_list.append("{} should by {:d}".format(
                        _state,
                        _ref[_state],
                    ))
                    ret_state = max(ret_state, limits.mon_STATE_WARNING)

        if _ref is None:
            ascii_chunk = ""
        else:
            ascii_chunk = process_tools.compress_struct(_passive_dict)
        return ExtReturn(
            ret_state,
            "{}, {}".format(
                logging_tools.get_plural("VM", _num_vms),
                ", ".join([
                    "{:d} {}".format(_state_dict[_key], _key)
                    for _key in sorted(_state_dict)
                ] + _error_list),
            ),
            ascii_chunk=ascii_chunk,
        )
Beispiel #12
0
 def send_data(self):
     return process_tools.compress_struct(etree.tostring(self.xml))