Esempio n. 1
0
def _lic_show(opts, act_conf):
    elo = sge_license_tools.ExternalLicenses(opts.base,
                                             opts.site,
                                             log_com=_log)
    _xml = sge_license_tools.LicenseCheck(
        log_com=_log,
        lmutil_path=act_conf["LMUTIL_PATH"],
        license_file=act_conf["LICENSE_FILE"],
        verbose=opts.verbose,
    ).check()
    elo.read()
    elo.feed_xml_result(_xml)
    out_list = logging_tools.NewFormList()
    for _t_type in ["simple", "complex"]:
        for _name in sorted(elo.licenses.keys()):
            _lic = elo.licenses[_name]
            if _lic.license_type == _t_type:
                out_list.append(_lic.get_info_line())
    print(str(out_list))
Esempio n. 2
0
 def _log_limits(self):
     # read limits
     r_dict = {}
     try:
         import resource
     except ImportError:
         self.log("cannot import resource",
                  logging_tools.LOG_LEVEL_CRITICAL)
     else:
         available_resources = [
             key for key in dir(resource) if key.startswith("RLIMIT")
         ]
         for av_r in available_resources:
             try:
                 r_dict[av_r] = resource.getrlimit(getattr(resource, av_r))
             except ValueError:
                 r_dict[av_r] = "invalid resource"
             except:
                 r_dict[av_r] = None
         if r_dict:
             res_keys = sorted(r_dict.keys())
             self.log("{} defined".format(
                 logging_tools.get_plural("limit", len(res_keys))))
             res_list = logging_tools.NewFormList()
             for key in res_keys:
                 val = r_dict[key]
                 if isinstance(val, str):
                     info_str = val
                 elif isinstance(val, tuple):
                     info_str = "{:8d} (hard), {:8d} (soft)".format(*val)
                 else:
                     info_str = "None (error?)"
                 res_list.append([
                     logging_tools.form_entry(key, header="key"),
                     logging_tools.form_entry(info_str, header="value")
                 ])
             for line in str(res_list).split("\n"):
                 self.log(line)
         else:
             self.log("no limits found, strange ...",
                      logging_tools.LOG_LEVEL_WARN)
Esempio n. 3
0
def do_list(cur_opts, log_com):
    users = get_users(cur_opts, log_com)
    out_list = logging_tools.NewFormList()
    for _user in users:
        out_list.append(
            [
                logging_tools.form_entry(_user.login, header="login"),
                logging_tools.form_entry(_user.uid, header="uid"),
                logging_tools.form_entry(_user.active, header="active"),
                logging_tools.form_entry(_user.group.groupname, header="group"),
                logging_tools.form_entry(_user.group.gid, header="gid"),
                logging_tools.form_entry(_user.group.active, header="gactive"),
                logging_tools.form_entry(_user.first_name, header="first name"),
                logging_tools.form_entry(_user.last_name, header="last name"),
                logging_tools.form_entry(_user.email, header="email"),
                logging_tools.form_entry(_user.login_count, header="logincount"),
                logging_tools.form_entry(_user.login_fail_count, header="failedcount"),
                logging_tools.form_entry(_user.user_variable_set.all().count(), header="#vars"),
                logging_tools.form_entry(_user.comment, header="comment"),
            ]
        )
    print(str(out_list))
Esempio n. 4
0
 def get_config_info(self):
     gk = sorted(self.keys())
     if gk:
         f_obj = logging_tools.NewFormList()
         for key in gk:
             if self.get_type(key) in ["a", "d"]:
                 pv = self.pretty_print(key)
                 f_obj.append([
                     logging_tools.form_entry(key),
                     logging_tools.form_entry("list with {}:".format(
                         logging_tools.get_plural("entry", len(pv)))),
                     logging_tools.form_entry(self.get_type(key)),
                     logging_tools.form_entry(self.get_source(key)),
                 ])
                 for idx, entry in enumerate(pv):
                     f_obj.append([
                         logging_tools.form_entry(""),
                         logging_tools.form_entry(entry),
                         logging_tools.form_entry(str(idx)),
                         logging_tools.form_entry("---"),
                     ])
             else:
                 f_obj.append([
                     logging_tools.form_entry(key, header="key"),
                     logging_tools.form_entry(self.pretty_print(key),
                                              header="value"),
                     logging_tools.form_entry(self.get_type(key),
                                              pre_str=", (",
                                              post_str=" from ",
                                              header="type"),
                     logging_tools.form_entry(self.get_source(key),
                                              post_str=")",
                                              header="source"),
                 ])
         ret_str = str(f_obj).split("\n")
     else:
         ret_str = []
     return ret_str
Esempio n. 5
0
def _domain_enum_show_command(options):
    from initat.cluster.backbone.domain_enum import icswDomainEnum
    from initat.cluster.backbone.models import DomainTypeEnum
    print("")
    print("DomainEnums defined: {:d}".format(len(icswDomainEnum)))
    _list = logging_tools.NewFormList()
    _c_dict = {
        entry.enum_name: entry
        for entry in DomainTypeEnum.objects.all()
    }
    for entry in icswDomainEnum:
        if entry.name not in _c_dict:
            if options.sync:
                new_entry = DomainTypeEnum.create_db_entry(entry)
                _c_dict[new_entry.enum_name] = new_entry
            else:
                _db_str = "no"
        if entry.name in _c_dict:
            # if options.sync:
            #     _c_dict[entry.name].update_values(entry)
            _db_str = "yes ({:d})".format(_c_dict[entry.name].pk)
        if entry.value.default_enum:
            _default_info = entry.value.default_enum.name
        else:
            _default_info = "---"
        if entry.value.domain_enum:
            _domain_info = entry.value.domain_enum.name
        else:
            _domain_info = "---"
        _list.append([
            logging_tools.form_entry(entry.name, header="EnumName"),
            logging_tools.form_entry(entry.value.name, header="Name"),
            logging_tools.form_entry(entry.value.info, header="Info"),
            logging_tools.form_entry_center(_db_str, header="DB info"),
            logging_tools.form_entry(_default_info, header="Default Enum"),
            logging_tools.form_entry(_domain_info, header="Domain Enum"),
        ])
    print(str(_list))
Esempio n. 6
0
 def interpret(self, srv_com, cur_ns):
     if cur_ns.arguments:
         re_list = [re.compile(_arg) for _arg in cur_ns.arguments]
     else:
         re_list = []
     cur_vector = srv_com["data:machine_vector"]
     if cur_ns.raw:
         return limits.mon_STATE_OK, etree.tostring(cur_vector, encoding="unicode")
     else:
         vector_keys = sorted(srv_com.xpath(".//ns:mve/@name", start_el=cur_vector, smart_strings=False))
         used_keys = [key for key in vector_keys if any([cur_re.search(key) for cur_re in re_list]) or not re_list]
         ret_array = [
             "MachineVector id {}, {}, {} shown{}".format(
                 cur_vector.attrib["version"],
                 logging_tools.get_plural("key", len(vector_keys)),
                 logging_tools.get_plural("key", len(used_keys)),
                 ":" if used_keys else "",
             )
         ]
         if used_keys:
             out_list = logging_tools.NewFormList()
             max_num_keys = 0
             _list = []
             a = 2
             for mv_num, mv_key in enumerate(vector_keys):
                 if mv_key in used_keys:
                     cur_xml = srv_com.xpath("//ns:mve[@name='{}']".format(mv_key), start_el=cur_vector, smart_strings=False)[0]
                     _mv = hm_classes.MachineVectorEntry(
                         cur_xml.attrib.pop("name"),
                         **cur_xml.attrib
                     )
                     _list.append((mv_num, _mv))
                     max_num_keys = max(max_num_keys, _mv.num_keys)
             for mv_num, entry in _list:
                 out_list.append(entry.get_form_entry(mv_num, max_num_keys))
             ret_array.extend(str(out_list).split("\n"))
         return limits.mon_STATE_OK, "\n".join(ret_array)
Esempio n. 7
0
    def interpret(self, srv_com, cur_ns):
        _fe = logging_tools.form_entry

        def proc_line(_ps, **kwargs):
            nest = kwargs.get("nest", 0)
            if _psutil:
                _affinity = _ps["cpu_affinity"]
                if len(_affinity) == num_cores:
                    _affinity = "-"
                else:
                    _affinity = ",".join(
                        ["{:d}".format(_core) for _core in _affinity])
                pass
            else:
                _affinity = _ps.get("affinity", "-")
            return [
                _fe("{}{:d}".format(" " * nest, _ps["pid"]), header="pid"),
                _fe(_ps["ppid"], header="ppid"),
                _fe(_ps["uids"][0] if _psutil else proc_stuff["uid"],
                    header="uid"),
                _fe(_ps["gids"][0] if _psutil else proc_stuff["gid"],
                    header="gid"),
                _fe(_ps["state"], header="state"),
                _fe(_ps.get("last_cpu", -1), header="cpu"),
                _fe(_affinity, header="aff"),
                _fe(_ps["out_name"], header="process"),
            ]

        def draw_tree(m_pid, nest=0):
            proc_stuff = result[m_pid]
            r_list = [proc_line(proc_stuff, nest=nest)]
            # _fe("%s%s" % (" " * nest, m_pid), header="pid"),
            for dt_entry in [
                    draw_tree(y, nest + 2) for y in result[m_pid]["childs"]
            ]:
                r_list.extend([z for z in dt_entry])
            return r_list

        tree_view = cur_ns.tree
        comline_view = cur_ns.comline
        if cur_ns.filter:
            name_re = re.compile("^.*%s.*$" % ("|".join(cur_ns.filter)),
                                 re.IGNORECASE)
            tree_view = False
        else:
            name_re = re.compile(".*")
        result = srv_com["process_tree"]
        _psutil = "psutil" in srv_com
        if _psutil:
            num_cores = srv_com["*num_cores"]
            # unpack and cast pid to integer
            result = {
                int(key): value
                for key, value in server_command.decompress(result.text,
                                                            json=True).items()
            }
            for _val in result.values():
                _val["state"] = process_tools.PROC_STATUSES_REV[_val["status"]]
        # print etree.tostring(srv_com.tree, pretty_print=True)
        ret_state = limits.mon_STATE_CRITICAL
        pids = sorted([
            key for key, value in result.items()
            if name_re.match(value["name"])
        ])
        for act_pid in pids:
            proc_stuff = result[act_pid]
            proc_name = proc_stuff["name"] if proc_stuff["exe"] else "[%s]" % (
                proc_stuff["name"])
            if comline_view:
                proc_name = " ".join(proc_stuff.get("cmdline")) or proc_name
            proc_stuff["out_name"] = proc_name
        ret_a = [
            "found {} matching {}".format(
                logging_tools.get_plural("process", len(pids)),
                name_re.pattern)
        ]
        form_list = logging_tools.NewFormList()
        if tree_view:
            for act_pid in pids:
                result[act_pid]["childs"] = [
                    pid for pid in pids if result[pid]["ppid"] == int(act_pid)
                ]
            for init_pid in [pid for pid in pids if not result[pid]["ppid"]]:
                form_list.extend(
                    [add_line for add_line in draw_tree(init_pid)])
        else:
            form_list.extend([proc_line(result[_pid]) for _pid in pids])
        if form_list:
            ret_a.extend(str(form_list).split("\n"))
        return ret_state, "\n".join(ret_a)
Esempio n. 8
0
def do_info(cur_opts, log_com):
    if not cur_opts.username:
        print("No user name given")
        return 1
    _user = _get_user(cur_opts.username)
    _ret_state = 0
    if _user is None:
        _ret_state = 1
    else:
        from initat.cluster.backbone.models import user_quota_setting, user_variable
        from django.db.models import Q
        print("")
        print(
            "User with loginname '{}' (user {}), uid={:d}, group={} (gid={:d})".format(
                _user.login,
                str(_user),
                _user.uid,
                str(_user.group),
                _user.group.gid,
            )
        )
        num_qs = _user.user_quota_setting_set.all().count()
        if num_qs and cur_opts.system_wide_quota:
            print("")
            print(
                "{} found:".format(
                    logging_tools.get_plural("system-wide quota setting", num_qs)
                )
            )
            for _qs in _user.user_quota_setting_set.all():
                _bd = _qs.quota_capable_blockdevice
                print(
                    "    device {} ({} on {}): {}".format(
                        str(_bd.device.full_name),
                        _bd.block_device_path,
                        _bd.mount_path,
                        get_quota_str(_qs),
                    )
                )
        try:
            _cmd = "quota --show-mntpoint -wp -u {}".format(
                _user.login,
            )
            _res = subprocess.check_output(
                _cmd.split(),
                stderr=subprocess.STDOUT,
            ).decode("utf-8")
        except subprocess.CalledProcessError as sb_exc:
            _res = sb_exc.output.decode("utf-8")
            # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info()))
            _ret_state = 1
        except OSError as sb_exc:
            # quota command not founda
            _res = "denied: {}".format(sb_exc)
            # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info()))
            _ret_state = 1
        else:
            _ret_state = 0
        if _res.lower().count("denied"):
            print("    error getting local quotas for {}: {}".format(_user.login, _res))
        else:
            # print _res
            _lines = [_line.strip().split() for _line in _res.split("\n") if _line.strip()]
            _lines = [_line for _line in _lines if len(_line) == 10]
            if _lines:
                print("", "local quota:", sep="\n")
                _line = _lines[-1]
                _bytes_violate = _line[2].count("*") > 0
                _local = user_quota_setting(
                    bytes_used=int(_line[2].replace("*", "")) * 1024,
                    bytes_soft=int(_line[3]) * 1024,
                    bytes_hard=int(_line[4]) * 1024,
                    bytes_gracetime=int(_line[5]),
                )
                print(
                    "    local mountpoint: {}".format(
                        get_quota_str(_local),
                    )
                )
        if cur_opts.delete_var:
            print("")
            try:
                _cv = user_variable.objects.get(Q(user=_user) & Q(idx=cur_opts.delete_var))
            except user_variable.DoesNotExist:
                print("Variable to delete does not exist")
            else:
                print("Deleting '{}'".format(str(_cv)))
                _cv.delete()
        if cur_opts.show_vars:
            out_list = logging_tools.NewFormList()
            for _var in _user.user_variable_set.all().order_by("name"):
                out_list.append(
                    [
                        logging_tools.form_entry(_var.idx, header="idx"),
                        logging_tools.form_entry(_var.name, header="name"),
                        logging_tools.form_entry(_var.var_type, header="type"),
                        logging_tools.form_entry_right(_var.value if _var.var_type != "j" else "{:d} Bytes".format(len(_var.json_value)), header="value"),
                        logging_tools.form_entry_center("yes" if _var.editable else "no", header="editable"),
                        logging_tools.form_entry_center("yes" if _var.hidden else "no", header="hidden"),
                        logging_tools.form_entry(_var.date.strftime("%H:%m:%S %a, %d. %b %Y"), header="created"),
                        logging_tools.form_entry(_var.description, header="description"),
                    ]
                )
            print(str(out_list))

    return _ret_state
Esempio n. 9
0
 def interpret(self, srv_com, cur_ns):
     r_dict = server_command.decompress(srv_com["pkg_list"].text,
                                        pickle=True)
     root_dir = srv_com["root_dir"].text
     in_format = srv_com["format"].text
     out_f = logging_tools.NewFormList()
     keys = sorted(r_dict.keys())
     header_line = "{} found, system is {} (root is {})".format(
         logging_tools.get_plural("package", len(keys)),
         in_format,
         root_dir,
     )
     if keys:
         if in_format == "rpm":
             for key in keys:
                 for value in r_dict[key]:
                     if isinstance(value, tuple):
                         if len(value) == 4:
                             ver, rel, arch, summary = value
                             size = 0
                         else:
                             ver, rel, arch, size, summary = value
                     else:
                         ver, rel, arch, size, summary = (value["version"],
                                                          value["release"],
                                                          value["arch"],
                                                          value["size"],
                                                          value["summary"])
                     out_f.append([
                         logging_tools.form_entry(key, header="name"),
                         logging_tools.form_entry_right(ver,
                                                        header="version"),
                         logging_tools.form_entry(rel, header="release"),
                         logging_tools.form_entry(arch, header="arch"),
                         logging_tools.form_entry_right(size,
                                                        header="size"),
                         logging_tools.form_entry(summary,
                                                  header="summary"),
                     ])
         elif in_format == "debian":
             for key in keys:
                 for value in r_dict[key]:
                     d_flag, s_flag, e_flag = value["flags"]
                     ver, rel = (value["version"], value["release"])
                     summary = value["summary"]
                     out_f.append([
                         logging_tools.form_entry(key, header="name"),
                         logging_tools.form_entry_right(d_flag,
                                                        header="d_flag"),
                         logging_tools.form_entry_right(s_flag,
                                                        header="s_flag"),
                         logging_tools.form_entry_right(e_flag,
                                                        header="e_flag"),
                         logging_tools.form_entry_right(ver,
                                                        header="version"),
                         logging_tools.form_entry(rel, header="release"),
                         logging_tools.form_entry(summary,
                                                  header="summary"),
                     ])
                     out_f.add_line(
                         (key, d_flag, s_flag, e_flag, ver, rel, summary))
         return limits.mon_STATE_OK, "{}\n{}".format(
             header_line, str(out_f))
     else:
         return limits.mon_STATE_CRITICAL, "{}, nothing found".format(
             header_line)
Esempio n. 10
0
def _service_enum_show_command(options):

    from initat.cluster.backbone.server_enums import icswServiceEnum
    from initat.cluster.backbone.models import ConfigServiceEnum, config
    from initat.cluster.backbone import factories
    from django.core.exceptions import ValidationError

    _c_dict = {
        entry.enum_name: entry
        for entry in ConfigServiceEnum.objects.all()
    }
    print("")
    print("ServiceEnums defined: {:d}".format(len(icswServiceEnum)))
    _list = logging_tools.NewFormList()
    for entry in icswServiceEnum:
        if entry.name not in _c_dict:
            if options.sync and (entry.value.server_service
                                 or entry.value.relayer_service
                                 ) and entry.value.sync_config:
                new_entry = ConfigServiceEnum.create_db_entry(entry)
                _c_dict[new_entry.enum_name] = new_entry
            else:
                _db_str = "no"
        if entry.name in _c_dict:
            if options.sync:
                _c_dict[entry.name].update_values(entry)
            _db_str = "yes ({:d})".format(_c_dict[entry.name].pk)
        if entry.value.server_service:
            _egg_action = ", ".join(
                [str(_action)
                 for _action in entry.value.egg_actions]) or "none"
        else:
            _egg_action = "---"
        _list.append([
            logging_tools.form_entry(entry.name, header="EnumName"),
            logging_tools.form_entry(entry.value.name, header="Name"),
            logging_tools.form_entry_center(
                "yes" if entry.value.root_service else "no",
                header="Root Service"),
            logging_tools.form_entry_center(
                "yes" if entry.value.server_service else "no",
                header="Server"),
            logging_tools.form_entry_center(
                "yes" if entry.value.relayer_service else "no",
                header="Relayer"),
            logging_tools.form_entry(entry.value.info, header="Info"),
            logging_tools.form_entry_center(_db_str, header="DB info"),
            logging_tools.form_entry(_egg_action, header="Egg actions"),
        ])
    print(str(_list))
    if options.sync:
        _change_list = []
        # compat dict
        comp_dict = {
            "rrd_grapher": icswServiceEnum.grapher_server.name,
            "rrd_server": icswServiceEnum.collectd_server.name,
            "rrd_collector": icswServiceEnum.collectd_server.name,
            "server": icswServiceEnum.cluster_server.name,
            "ldap_server": icswServiceEnum.ldap_server.name,
        }
        for c_con in config.objects.all():
            if not c_con.config_service_enum_id:
                _check_names = [c_con.name]
                if c_con.name in comp_dict:
                    _check_names.append(comp_dict[c_con.name])
                for _check_name in _check_names:
                    if _check_name in _c_dict:
                        c_con.config_service_enum = _c_dict[_check_name]
                        try:
                            c_con.save(update_fields=["config_service_enum"])
                        except ValidationError:
                            print("cannot save {}: {}".format(
                                str(c_con), process_tools.get_except_info()))
                        else:
                            _change_list.append(c_con)
                            break
        _create_list = []
        for db_enum in _c_dict.values():
            if not db_enum.config_set.all().count():
                _create_list.append(
                    factories.Config(
                        name=db_enum.name,
                        description=db_enum.info,
                        config_service_enum=db_enum,
                        server_config=True,
                    ))

        if len(_change_list):
            print("")
            print("{} moved to ConfigServiceEnum:".format(
                logging_tools.get_plural("Config", len(_change_list))))
            for entry in _change_list:
                print("    {} ({})".format(entry.name,
                                           str(entry.config_service_enum)))
        if len(_create_list):
            print("")
            print("{} created:".format(
                logging_tools.get_plural("Config", len(_create_list))))
            for entry in _create_list:
                print("    {} ({})".format(entry.name,
                                           str(entry.config_service_enum)))
Esempio n. 11
0
def main(options):
    options.overview = True if (not options.stat and not options.index and not options.num) else False
    options.index = [int(cur_idx) for cur_idx in options.index]
    err_file_name = os.path.join(LOG_ROOT, "logging-server", "err_py")
    if not os.path.isfile(err_file_name):
        print("{} does not exist".format(err_file_name))
        sys.exit(1)
    if options.clear:
        new_file_name = "{}_{}.tar".format(
            err_file_name,
            time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())
        )
        if process_tools.find_file("xz"):
            _pf = ".xz"
            _compr = "J"
            c_stat, out = subprocess.getstatusoutput(
                "tar cpJf {}{} {}".format(
                    new_file_name,
                    _pf,
                    err_file_name
                )
            )
        elif process_tools.find_file("bzip2"):
            _pf = ".bz2"
            _compr = "j"
            c_stat, out = subprocess.getstatusoutput(
                "tar cpjf {}{} {}".format(
                    new_file_name,
                    _pf,
                    err_file_name
                )
            )
        else:
            _pf = ""
            _compr = ""
        print(
            "taring {} to {}{} ...".format(
                err_file_name,
                new_file_name,
                _pf
            )
        )
        c_stat, out = subprocess.getstatusoutput(
            "tar cp{}f {}{} {}".format(
                _compr,
                new_file_name,
                _pf,
                err_file_name
            )
        )
        if c_stat:
            print("*** error (%d): %s" % (c_stat, out))
        else:
            os.unlink(err_file_name)
        sys.exit(c_stat)
    try:
        err_lines = [
            line.strip() for line in open(
                err_file_name,
                "r"
            ).read().split("\n") if line.count("[ES ")
        ]
    except IOError:
        print(
            "Cannot read '{}': {}".format(
                err_file_name,
                process_tools.get_except_info()
            )
        )
        sys.exit(1)
    print(
        "Found error_file {} with {}".format(
            err_file_name,
            logging_tools.get_plural("line", len(err_lines))
        )
    )
    errs_found, act_err = ([], None)
    act_idx, idx_dict, prev_dt = (0, {}, None)
    for line in err_lines:
        line_parts = line.split(":")
        _chunk = line_parts[-1].split()[-1]
        # date is always the first 4 parts
        line_date = ":".join(line_parts[0:3]).strip()
        info_part = line_parts[3].strip()
        err_line = ":".join(line_parts[4:])
        # parse info_part
        try:
            if info_part.startswith("("):
                line_state = ""
            else:
                line_state = info_part.split()[0]
                info_part = info_part[len(line_state):].strip()
            info_parts = info_part.split()
            # skip error-thread name and "from pid" string
            info_parts.pop(0)
            info_parts.pop(0)
            info_parts.pop(0)
        except:
            print(
                "Error pre-parsing line '{}': {}".format(
                    line,
                    process_tools.get_except_info()
                )
            )
        else:
            try:
                _struct = json.loads(bz2.decompress(base64.b64decode(_chunk)))
                # get pid
                line_pid = _struct["pid"]
                # unknown or full source
                line_s_name = _struct["name"]
                line_uid = _struct["uid"]
                line_gid = _struct["gid"]
                line_uname = _struct["uname"]
                line_gname = _struct["gname"]
                err_line = _struct["line"]
                cur_dt = datetime.datetime.strptime(line_date, "%a %b %d %H:%M:%S %Y")
                if prev_dt:
                    dt_change = abs(cur_dt - prev_dt).seconds > 5
                else:
                    dt_change = False
                prev_dt = cur_dt
                if not act_err or act_err.pid != line_pid or dt_change or line.count("<type"):
                    act_idx += 1
                    act_err = ErrorRecord(
                        line_pid,
                        line_s_name,
                        line_uid,
                        line_uname,
                        line_gid,
                        line_gname,
                    )
                    act_err.set_idx(act_idx)
                    idx_dict[act_idx] = act_err
                    errs_found.append(act_err)
                if err_line.strip() or not options.noempty:
                    act_err.add_line(line_date, line_state, err_line)
            except:
                print(
                    "Error parsing line '{}': {}".format(
                        line,
                        process_tools.get_except_info()
                    )
                )

    print(
        "Found {}".format(
            logging_tools.get_plural("error record", len(errs_found))
        )
    )
    if options.overview:
        if errs_found:
            out_list = logging_tools.NewFormList()
            for err in errs_found:
                out_list.append(err.get_form_parts())
            print(str(out_list))
    elif options.stat:
        uid_dict = {}
        for err in errs_found:
            uid_dict.setdefault(err.uid, []).append(err)
        all_uids = list(uid_dict.keys())
        all_uids.sort()
        out_list = logging_tools.NewFormList()
        for uid in all_uids:
            uid_stuff = uid_dict[uid]
            diff_sources = []
            for err in uid_stuff:
                if err.source_name not in diff_sources:
                    diff_sources.append(err.source_name)
            diff_sources.sort()
            out_list.append(
                [
                    logging_tools.form_entry(uid, header="uid"),
                    logging_tools.form_entry(uid_stuff[0].uname, header="uname"),
                    logging_tools.form_entry(len(uid_stuff), header="# err"),
                    logging_tools.form_entry(len(diff_sources), header="# sources"),
                    logging_tools.form_entry(", ".join(diff_sources), header="sources"),
                ]
            )
        print(str(out_list))
    elif options.num:
        idx_l = list(idx_dict.keys())
        idx_l.sort()
        idx_show = []
        while options.num and idx_l:
            options.num -= 1
            idx_show.append(idx_l.pop(-1))
        idx_show.reverse()
        options.index = idx_show
    if options.index:
        for idx in options.index:
            if idx in idx_dict:
                act_err = idx_dict[idx]
                print(act_err.get_header())
                print(act_err.show_lines())
            else:
                print(
                    "Index {:d} not in index_list {}".format(
                        idx,
                        logging_tools.compress_num_list(list(idx_dict.keys()))
                    )
                )
Esempio n. 12
0
    def _call(self, cur_inst):
        file_list = []
        server_idxs = [self.server_idx]
        # get additional idx if host is virtual server

        sc_result = config_tools.icswServerCheck(service_type_enum=icswServiceEnum.cluster_server).get_result()
        if sc_result.effective_device is not None and sc_result.effective_device.idx != self.server_idx:
            server_idxs.append(sc_result.effective_device.idx)
        # recognize for which devices i am responsible
        dev_r = cluster_location.DeviceRecognition()
        server_idxs = list(set(server_idxs) | set(dev_r.device_dict.keys()))
        # get all peers to local machine and local netdevices
        my_idxs = netdevice.objects.exclude(
            Q(enabled=False)
        ).filter(
            Q(device__in=server_idxs) &
            Q(device__enabled=True) &
            Q(device__device_group__enabled=True)
        ).values_list("pk", flat=True)
        # ref_table
        route_obj = config_tools.RouterObject(cur_inst.log)
        all_paths = []
        for s_ndev in my_idxs:
            all_paths.extend(list(networkx.shortest_path(route_obj.nx, s_ndev, weight="weight").values()))
        # pprint.pprint(all_paths)
        nd_lut = {
            cur_nd.pk: cur_nd for cur_nd in netdevice.objects.all().select_related(
                "device"
            ).prefetch_related(
                "net_ip_set", "net_ip_set__network", "net_ip_set__domain_tree_node"
            )
        }
        # fetch key-information
        ssh_vars = device_variable.objects.filter(Q(name="ssh_host_rsa_key_pub")).select_related("device")
        rsa_key_dict = {}
        for _db_rec in ssh_vars:
            pass
        # read pre/post lines from /etc/hosts
        pre_host_lines, post_host_lines = ([], [])
        # parse pre/post host_lines
        try:
            host_lines = [line.strip() for line in codecs.open(ETC_HOSTS_FILENAME, "r", "utf-8").read().split("\n")]
        except:
            self.log(
                "error reading / parsing {}: {}".format(
                    ETC_HOSTS_FILENAME,
                    process_tools.get_except_info()),
                logging_tools.LOG_LEVEL_ERROR)
        else:
            mode, any_modes_found = (0, False)
            for line in host_lines:
                if line.lower().startswith("### aeh-start-pre"):
                    mode, any_modes_found = (1, True)
                elif line.lower().startswith("### aeh-start-post"):
                    mode, any_modes_found = (2, True)
                elif line.lower().startswith("### aeh-end"):
                    mode, any_modes_found = (0, True)
                else:
                    if mode == 1:
                        pre_host_lines.append(line)
                    elif mode == 2:
                        post_host_lines.append(line)
            if not any_modes_found:
                self.log(
                    "no ### aeh-.* stuff found in {}, copying to {}.orig".format(
                        ETC_HOSTS_FILENAME, ETC_HOSTS_FILENAME
                    )
                )
                try:
                    pass
                except:
                    self.log(
                        "error writing {}.orig: {}".format(
                            ETC_HOSTS_FILENAME,
                            process_tools.get_except_info()
                        )
                    )
        # mapping from device_name to all names for ssh_host_keys
        name_dict = {}
        # ip dictionary
        ip_dict = {}
        # min_target_dict
        min_target_dict = {}
        for cur_path in all_paths:
            min_value = route_obj.get_penalty(cur_path)
            target_nd = nd_lut[cur_path[-1]]
            min_target_dict[target_nd] = min(min_target_dict.get(target_nd, 999999999), min_value)
        tl_dtn = domain_tree_node.objects.get(Q(depth=0))
        for cur_path in all_paths:
            target_nd = nd_lut[cur_path[-1]]
            min_value = min_target_dict[target_nd]
            for cur_ip in nd_lut[cur_path[-1]].net_ip_set.all():
                # get names
                host_names = []
                cur_dtn = cur_ip.domain_tree_node or tl_dtn
                if not (cur_ip.alias.strip() and cur_ip.alias_excl):
                    host_names.append("{}{}".format(target_nd.device.name, cur_dtn.node_postfix))
                host_names.extend(["{}".format(cur_entry) for cur_entry in cur_ip.alias.strip().split()])
                if "localhost" in [x.split(".")[0] for x in host_names]:
                    host_names = [host_name for host_name in host_names if host_name.split(".")[0] == "localhost"]
                if cur_dtn.full_name:
                    if cur_dtn.create_short_names:
                        # also create short_names
                        out_names = (
                            " ".join(
                                [
                                    "{}.{} {}".format(host_name, cur_dtn.full_name, host_name) for host_name in host_names if not host_name.count(".")
                                ]
                            )
                        ).split()
                    else:
                        # only print the long names
                        out_names = ["{}.{}".format(host_name, cur_dtn.full_name) for host_name in host_names if not host_name.count(".")]
                else:
                    if cur_dtn.create_short_names:
                        # also create short_names
                        out_names = (" ".join(["{}".format(host_name) for host_name in host_names if not host_name.count(".")])).split()
                    else:
                        # only print the long names
                        out_names = ["{}".format(host_name) for host_name in host_names if not host_name.count(".")]
                # add names with dot
                out_names.extend([host_name for host_name in host_names if host_name.count(".")])
                # name_dict without localhost
                name_dict.setdefault(
                    target_nd.device.name, []
                ).extend(
                    [
                        out_name for out_name in out_names if out_name not in name_dict[target_nd.device.name] and not out_name.startswith("localhost")
                    ]
                )
                ip_dict.setdefault(cur_ip.ip, [])
                if out_names not in [entry[1] for entry in ip_dict[cur_ip.ip]]:
                    if cur_ip.ip != "0.0.0.0":
                        ip_dict[cur_ip.ip].append((min_value, out_names))
        # out_list
        loc_dict = {}
        for ip, h_list in ip_dict.items():
            all_values = sorted([entry[0] for entry in h_list])
            if all_values:
                min_value = all_values[0]
                out_names = []
                for val in all_values:
                    for _act_val, act_list in [(x_value, x_list) for x_value, x_list in h_list if x_value == val]:
                        out_names.extend([value for value in act_list if value not in out_names])
                # print min_value, ip, out_names
                loc_dict.setdefault(min_value, []).append([ipvx_tools.IPv4(ip)] + out_names)
        pen_list = sorted(loc_dict.keys())
        out_file = []
        for pen_value in pen_list:
            act_out_list = logging_tools.NewFormList()
            for entry in sorted(loc_dict[pen_value]):
                act_out_list.append(
                    [
                        logging_tools.form_entry(entry[0])
                    ] + [
                        logging_tools.form_entry(cur_e) for cur_e in entry[1:]
                    ]
                )
            host_lines = str(act_out_list).split("\n")
            out_file.extend(
                [
                    "# penalty {:d}, {}".format(
                        pen_value,
                        logging_tools.get_plural("host entry", len(host_lines))
                    ),
                    ""
                ] + host_lines + [""]
            )
        if not os.path.isdir(GROUP_DIR):
            try:
                os.makedirs(GROUP_DIR)
            except:
                pass
        if os.path.isdir(GROUP_DIR):
            # remove old files
            for file_name in os.listdir(GROUP_DIR):
                try:
                    os.unlink(os.path.join(GROUP_DIR, file_name))
                except:
                    pass
            # get all devices with netips
            all_devs = device.objects.filter(
                Q(enabled=True) &
                Q(device_group__enabled=True) &
                Q(netdevice__net_ip__ip__contains=".")
            ).values_list(
                "name",
                "device_group__name"
            ).order_by(
                "device_group__name",
                "name"
            )
            dg_dict = {}
            for dev_name, dg_name in all_devs:
                dg_dict.setdefault(dg_name, []).append(dev_name)
            for file_name, content in dg_dict.items():
                codecs.open(
                    os.path.join(GROUP_DIR, file_name),
                    "w",
                    "utf-8"
                ).write("\n".join(sorted(set(content)) + [""]))
        file_list.append(ETC_HOSTS_FILENAME)
        codecs.open(ETC_HOSTS_FILENAME, "w+", "utf-8").write(
            "\n".join(
                [
                    "### AEH-START-PRE insert pre-host lines below"
                ] + pre_host_lines +
                [
                    "### AEH-END-PRE insert pre-host lines above",
                    ""
                ] + out_file +
                [
                    "",
                    "### AEH-START-POST insert post-host lines below"
                ] + post_host_lines +
                [
                    "### AEH-END-POST insert post-host lines above",
                    ""
                ]
            )
        )
        # write known_hosts_file
        if os.path.isdir(os.path.dirname(SSH_KNOWN_HOSTS_FILENAME)):
            skh_f = open(SSH_KNOWN_HOSTS_FILENAME, "w")
            for ssh_key_node in sorted(rsa_key_dict.keys()):
                skh_f.write(
                    "{} {}\n".format(
                        ",".join(name_dict.get(ssh_key_node, [ssh_key_node])), rsa_key_dict[ssh_key_node]
                    )
                )
            skh_f.close()
            file_list.append(SSH_KNOWN_HOSTS_FILENAME)
        cur_inst.srv_com.set_result(
            "wrote {}".format(", ".join(sorted(file_list)))
        )
Esempio n. 13
0
    def instance_to_form_list(self, opt_ns, res_xml):
        def numeric_output(in_dict, state):
            if opt_ns.numeric:
                _out = "{} [{:3d}]".format(
                    in_dict[state][0],
                    state,
                )
            else:
                _out = in_dict[state][0]
            return _out

        prc_dict = {
            SERVICE_OK: ("running", "ok"),
            SERVICE_DEAD: ("error", "critical"),
            SERVICE_INCOMPLETE: ("incomplete", "critical"),
            SERVICE_NOT_INSTALLED: ("not installed", "warning"),
            SERVICE_NOT_CONFIGURED: ("not configured", "warning"),
        }
        crc_dict = {
            CONF_STATE_RUN: ("run", "ok"),
            CONF_STATE_STOP: ("stop", "critical"),
            CONF_STATE_IP_MISMATCH: ("ip mismatch", "critical"),
        }
        meta_dict = {
            "t": {
                TARGET_STATE_RUNNING: ("run", "ok"),
                TARGET_STATE_STOPPED: ("stop", "critical"),
            },
            "i": {
                0: ("monitor", "ok"),
                1: ("ignore", "warning"),
            }

        }
        if License is not None:
            lic_dict = {
                -1: ("-", ""),
                LicenseState.none: ("no license", "critical"),
                LicenseState.violated: ("parameter violated", "critical"),
                LicenseState.valid: ("valid", "ok"),
                LicenseState.grace: ("in grace", "warning"),
                LicenseState.expired: ("expired", "critical"),
                LicenseState.fp_mismatch: ("wrong fingerprint", "critical"),
                # LicenseState.ip_mismatch: ("ip mismatch", "critical"),
            }
        else:
            lic_dict = None
        out_bl = logging_tools.NewFormList()
        types = sorted(list(set(res_xml.xpath(".//instance/@runs_on", start_strings=False))))
        _list = sum(
            [
                res_xml.xpath("instance[result and @runs_on='{}']".format(_type)) for _type in types
            ],
            []
        )
        for act_struct in _list:
            _res = act_struct.find("result")
            p_state = int(act_struct.find(".//process_state_info").get("state", SERVICE_DEAD))
            c_state = int(act_struct.find(".//configured_state_info").get("state", CONF_STATE_STOP))
            if not opt_ns.failed or (opt_ns.failed and p_state not in [SERVICE_OK]):
                cur_line = [logging_tools.form_entry(act_struct.attrib["name"], header="Name")]
                cur_line.append(logging_tools.form_entry(act_struct.attrib["runs_on"], header="runson"))
                cur_line.append(logging_tools.form_entry(_res.find("process_state_info").get("check_source", "N/A"), header="source"))
                if opt_ns.process:
                    s_info = act_struct.find(".//process_state_info")
                    if "num_started" not in s_info.attrib:
                        cur_line.append(logging_tools.form_entry(s_info.text))
                    else:
                        num_diff, any_ok = (
                            int(s_info.get("num_diff")),
                            True if int(act_struct.attrib["any-processes-ok"]) else False
                        )
                        # print etree.tostring(act_struct, pretty_print=True)
                        num_pids = len(_res.findall(".//pids/pid"))
                        da_name = ""
                        if any_ok:
                            pass
                        else:
                            if num_diff < 0:
                                da_name = "critical"
                            elif num_diff > 0:
                                da_name = "warning"
                        cur_line.append(logging_tools.form_entry(s_info.attrib["proc_info_str"], header="Process info", display_attribute=da_name))
                    pid_dict = {}
                    for cur_pid in act_struct.findall(".//pids/pid"):
                        pid_dict[int(cur_pid.text)] = int(cur_pid.get("count", "1"))
                    if pid_dict:
                        p_list = sorted(pid_dict.keys())
                        if max(pid_dict.values()) == 1:
                            cur_line.append(logging_tools.form_entry(logging_tools.compress_num_list(p_list), header="pids"))
                        else:
                            cur_line.append(
                                logging_tools.form_entry(
                                    ",".join(["{:d}{}".format(
                                        key,
                                        " ({:d})".format(pid_dict[key]) if pid_dict[key] > 1 else "") for key in p_list]
                                             ),
                                    header="pids"
                                )
                            )
                    else:
                        cur_line.append(logging_tools.form_entry("no PIDs", header="pids"))
                if opt_ns.started:
                    start_time = int(act_struct.find(".//process_state_info").get("start_time", "0"))
                    if start_time:
                        diff_time = max(0, time.mktime(time.localtime()) - start_time)
                        diff_days = int(diff_time / (3600 * 24))
                        diff_hours = int((diff_time - 3600 * 24 * diff_days) / 3600)
                        diff_mins = int((diff_time - 3600 * (24 * diff_days + diff_hours)) / 60)
                        diff_secs = int(diff_time - 60 * (60 * (24 * diff_days + diff_hours) + diff_mins))
                        ret_str = "{}".format(
                            time.strftime("%a, %d. %b %Y, %H:%M:%S", time.localtime(start_time))
                        )
                    else:
                        ret_str = "no start info found"
                    cur_line.append(logging_tools.form_entry(ret_str, header="started"))
                if opt_ns.config:
                    cur_line.append(logging_tools.form_entry(act_struct.find(".//config_info").text, header="config info"))
                if opt_ns.memory:
                    cur_mem = act_struct.find(".//memory_info")
                    if cur_mem is not None:
                        mem_str = process_tools.beautify_mem_info(int(cur_mem.text))
                    else:
                        # no pids hence no memory info
                        mem_str = ""
                    cur_line.append(logging_tools.form_entry_right(mem_str, header="Memory"))
                if opt_ns.version:
                    if "version" in _res.attrib:
                        _version = _res.attrib["version"]
                    else:
                        _version = ""
                    cur_line.append(logging_tools.form_entry_right(_version, header="Version"))
                _lic_info = _res.find("license_info")
                _lic_state = int(_lic_info.attrib["state"])
                if lic_dict is None:
                    cur_line.append(
                        logging_tools.form_entry(
                            "---",
                            header="License",
                        )
                    )
                else:
                    cur_line.append(
                        logging_tools.form_entry(
                            numeric_output(lic_dict, _lic_state),
                            header="License",
                            left=not opt_ns.numeric,
                            display_attribute=lic_dict[_lic_state][1],
                        )
                    )

                cur_line.append(
                    logging_tools.form_entry(
                        numeric_output(prc_dict, p_state),
                        header="PState",
                        left=not opt_ns.numeric,
                        display_attribute=prc_dict[p_state][1],
                    )
                )
                cur_line.append(
                    logging_tools.form_entry(
                        numeric_output(crc_dict, c_state),
                        header="CState",
                        left=not opt_ns.numeric,
                        display_attribute=crc_dict[c_state][1],
                    )
                )
                if opt_ns.meta:
                    _meta_res = act_struct.find(".//meta_result")
                    if _meta_res is not None:
                        t_state = int(_meta_res.get("target_state"))
                        ignore = int(_meta_res.get("ignore"))
                        cur_line.append(
                            logging_tools.form_entry(
                                numeric_output(meta_dict["t"], t_state),
                                header="TargetState",
                                left=not opt_ns.numeric,
                                display_attribute=meta_dict["t"][t_state][1],
                            )
                        )
                        cur_line.append(
                            logging_tools.form_entry(
                                meta_dict["i"][ignore][0],
                                header="Ignore",
                                display_attribute=meta_dict["i"][ignore][1],
                            )
                        )
                    else:
                        cur_line.append(
                            logging_tools.form_entry(
                                "N/A",
                                header="TargetState",
                                display_attribute="warning",
                            )
                        )
                        cur_line.append(
                            logging_tools.form_entry(
                                "N/A",
                                header="Ignore",
                                display_attribute="warning",
                            )
                        )
                out_bl.append(cur_line)
        return out_bl
Esempio n. 14
0
 def interpret(self, srv_com, cur_ns):
     l_dict = {}
     for key in IPMI_LONG_LIMITS:
         try:
             l_dict[key] = float(getattr(cur_ns, key))
         except:
             l_dict[key] = None
     s_list = srv_com.xpath(".//ns:sensor_list", smart_strings=False)
     if s_list:
         s_list = s_list[0]
         if cur_ns.arguments:
             el = s_list[0]
             cur_value = float(el.attrib["value"])
             ret_state = limits.mon_STATE_OK
             for t_name, log, t_state in [
                 ("lowern", False, limits.mon_STATE_CRITICAL),
                 ("lowerc", False, limits.mon_STATE_CRITICAL),
                 ("lowerw", False, limits.mon_STATE_WARNING),
                 ("upperw", True, limits.mon_STATE_WARNING),
                 ("upperc", True, limits.mon_STATE_CRITICAL),
                 ("uppern", True, limits.mon_STATE_CRITICAL),
             ]:
                 if l_dict[t_name] is not None:
                     if (log and cur_value >= l_dict[t_name]) or (
                             not log and cur_value <= l_dict[t_name]):
                         ret_state = max(ret_state, t_state)
             return ret_state, "{}: {} is {:.2f} {}".format(
                 el.attrib["key"],
                 el.attrib["info"],
                 cur_value,
                 el.attrib["unit"],
             )
         else:
             # list mode
             keys = s_list.xpath(".//@key", smart_strings=False)
             out_list = logging_tools.NewFormList()
             for key in keys:
                 el = s_list.xpath("*[@key='{}']".format(key),
                                   smart_strings=False)[0]
                 v_list = [
                     logging_tools.form_entry(key, header="key"),
                     logging_tools.form_entry_right(el.attrib["value"],
                                                    header="value"),
                     logging_tools.form_entry_right(el.attrib["base"],
                                                    header="base"),
                     logging_tools.form_entry(el.attrib["unit"],
                                              header="unit"),
                     logging_tools.form_entry(el.attrib["info"],
                                              header="info"),
                 ]
                 for l_key in IPMI_LIMITS:
                     x_key = "limit_{}".format(l_key)
                     v_list.append(
                         logging_tools.form_entry(el.attrib.get(x_key, "-"),
                                                  header=x_key))
                 out_list.append(v_list)
             return limits.mon_STATE_OK, "found {}:\n{}".format(
                 logging_tools.get_plural("IPMI sensor", len(keys)),
                 str(out_list))
     else:
         return limits.mon_STATE_WARNING, "no IPMI sensors found"
Esempio n. 15
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--port",
                        type=int,
                        default=1055,
                        help="license server [%(default)d]")
    parser.add_argument("--server",
                        type=str,
                        default="localhost",
                        help="license port [%(default)s]")
    parser.add_argument(
        "--license-file",
        type=str,
        default="",
        help=
        "to query multiple servers, format is {PORT@ADDR}:{PORT@ADDR} [%(default)s]"
    )
    parser.add_argument("--mode",
                        type=str,
                        default="xml",
                        choices=["xml", "check", "csv", "list"],
                        help="output mode [%(default)s]")
    parser.add_argument("--check-eval",
                        type=str,
                        default="true",
                        help="check string, should return true or false")
    opts = parser.parse_args()
    if opts.license_file:
        my_lc = sge_license_tools.LicenseCheck(license_file=opts.license_file)
    else:
        my_lc = sge_license_tools.LicenseCheck(
            server=opts.server,
            port=opts.port,
        )
    xml_res = my_lc.check()
    ret_code = 0
    if opts.mode == "xml":
        print(
            etree.tostring(xml_res, pretty_print=True,
                           encoding="utf-8").decode("utf-8"))
    elif opts.mode == "check":
        glob_dict = {}
        for cur_lic in xml_res.findall(".//license"):
            lic_name = cur_lic.attrib["name"]
            for attr_name in ["issued", "used", "free", "reserved"]:
                glob_dict["{}_{}".format(lic_name, attr_name)] = int(
                    cur_lic.attrib[attr_name])
        ret_val = eval(opts.check_eval, glob_dict)
        if not ret_val:
            ret_code = 1
    elif opts.mode == "csv":
        print(",".join(["name", "issued", "used", "free", "reserved"]))
        for cur_lic in xml_res.findall(".//license"):
            print(",".join([
                cur_lic.attrib["name"],
                cur_lic.attrib["issued"],
                cur_lic.attrib["used"],
                cur_lic.attrib["free"],
                cur_lic.attrib["reserved"],
            ]))
    elif opts.mode == "list":
        out_form = logging_tools.NewFormList()
        for cur_lic in xml_res.findall(".//license"):
            out_form.append([
                logging_tools.form_entry(cur_lic.attrib["name"],
                                         header="name"),
                logging_tools.form_entry_right(cur_lic.attrib["issued"],
                                               header="issued"),
                logging_tools.form_entry_right(cur_lic.attrib["used"],
                                               header="used"),
                logging_tools.form_entry_right(cur_lic.attrib["free"],
                                               header="free"),
                logging_tools.form_entry_right(cur_lic.attrib["reserved"],
                                               header="reserved"),
            ])
        print(str(out_form))
    sys.exit(ret_code)
Esempio n. 16
0
def device_syslog(opt_ns, cur_dev, j_logs):
    print(
        "Information about device '{}' (full name {}, devicegroup {})".format(
            str(cur_dev), str(cur_dev.full_name), str(cur_dev.device_group)))
    print("UUID is '{}', database-ID is {:d}".format(cur_dev.uuid, cur_dev.pk))
    _cr = routing.SrvTypeRouting(force=True, ignore_errors=True)
    _ST = "logcheck-server"
    if _ST in _cr.service_types:
        _inst_xml = InstanceXML(quiet=True)
        # get logcheck-server IP
        _ls_ip = _cr[_ST][0][1]
        # get logcheck-server Port
        _ls_port = _inst_xml.get_port_dict(_ST, ptype="command")
        _sc = server_command.srv_command(command="get_syslog", )
        _sc["devices"] = _sc.builder(
            "devices", *[
                _sc.builder(
                    "device",
                    pk="{:d}".format(cur_dev.pk),
                    lines="{:d}".format(opt_ns.loglines),
                    minutes="{:d}".format(opt_ns.minutes),
                )
            ])
        _conn_str = "tcp://{}:{:d}".format(_ls_ip, _ls_port)
        _result = net_tools.ZMQConnection("icsw_state_{:d}".format(
            os.getpid())).add_connection(
                _conn_str,
                _sc,
            )
        if _result is not None:
            _dev = _result.xpath(".//ns:devices/ns:device[@pk]")[0]
            _lines = _result.xpath("ns:lines", start_el=_dev)[0]
            _rates = _result.xpath("ns:rates", start_el=_dev)
            if _rates:
                _rates = {
                    int(_el.get("timeframe")): float(_el.get("rate"))
                    for _el in _rates[0]
                }
                print("rate info: {}".format(", ".join([
                    "{:.2f} lines/sec in {}".format(
                        _rates[_seconds],
                        logging_tools.get_diff_time_str(_seconds))
                    for _seconds in sorted(_rates)
                ])))
            else:
                print("no rate info found")
                print(_rates)
            _out_lines = logging_tools.NewFormList()
            for _entry in server_command.decompress(_lines.text, json=True):
                _out_lines.append([
                    logging_tools.form_entry(_entry["line_id"], header="idx"),
                    logging_tools.form_entry(
                        "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}".format(
                            *_entry["line_datetime_parsed"]),
                        header="Timestamp",
                    ),
                ] + [
                    logging_tools.form_entry(_entry[_key], header=_key)
                    for _key in ["hostname", "priority", "facility", "tag"]
                ] + [
                    logging_tools.form_entry(_entry["text"], header="text"),
                ])
            print(str(_out_lines))
        else:
            print("got no result from {} ({})".format(_conn_str, _ST))
    else:
        print("No logcheck-server found, skipping syslog display")
Esempio n. 17
0
def show_overview(local_mc, valid_names):
    mod_list = logging_tools.NewFormList()
    cmd_list = logging_tools.NewFormList()
    # iterate over modules
    for _idx, mod in enumerate(local_mc.module_list, 1):
        c_names = [name for name in valid_names if local_mc.command_dict[name].module == mod]
        local_valid_names = []
        for com_name in c_names:
            local_valid_names.append(com_name)
        local_valid_names = sorted(local_valid_names)
        # show module overview
        mod_list.append(
            [
                logging_tools.form_entry_right(_idx, header="#"),
                logging_tools.form_entry(mod.name, header="Module name"),
                logging_tools.form_entry(mod.Meta.uuid, header="uuid"),
                logging_tools.form_entry(mod.checksum, header="Checksum"),
                logging_tools.form_entry_center(mod.Meta.required_access.name, header="Access"),
                logging_tools.form_entry_center(
                    ",".join(
                        [
                            _platform.name for _platform in mod.Meta.required_platform
                        ]
                    ),
                    header="Platform",
                ),
                logging_tools.form_entry_right(mod.Meta.priority, header="priority"),
                logging_tools.form_entry_right(
                    "yes" if hasattr(mod, "init_machine_vector") else "no",
                    header="MachineVector"
                ),
                logging_tools.form_entry_right(len(local_valid_names), header="#coms"),
                logging_tools.form_entry(", ".join(local_valid_names), header="commands"),
            ]
        )
    # iterate over command
    for _idx, cmd_name in enumerate(sorted(local_mc.command_dict.keys()), 1):
        cmd = local_mc[cmd_name]
        # print(cmd)
        # print(inspect.getsource(cmd.__class__))
        cmd_list.append(
            [
                logging_tools.form_entry_right(_idx, header="#"),
                logging_tools.form_entry(cmd_name, header="Name"),
                logging_tools.form_entry(cmd.module.name, header="Module name"),
                logging_tools.form_entry(cmd.Meta.uuid, header="uuid"),
                logging_tools.form_entry(cmd.checksum, header="Checksum"),
                logging_tools.form_entry(cmd.Meta.check_instance.name, header="Server"),
                logging_tools.form_entry_center(cmd.Meta.required_access.name, header="Access"),
                logging_tools.form_entry_center(
                    ",".join(
                        [
                            _platform.name for _platform in cmd.Meta.required_platform
                        ]
                    ),
                    header="Platform",
                ),
                logging_tools.form_entry_center(
                    "yes" if cmd.Meta.has_perfdata else "no",
                    header="perfdata",
                ),
                logging_tools.form_entry_center(
                    "yes" if cmd.Meta.create_mon_check_command else "no",
                    header="create MCC",
                ),
                logging_tools.form_entry(
                    ", ".join(
                        cmd.Meta.alternate_names
                    ) if cmd.Meta.alternate_names else "---",
                    header="Alternate names",
                ),
                logging_tools.form_entry(
                    cmd.Meta.ports.get_port_spec(),
                    header="PortSpec",
                ),
                logging_tools.form_entry(
                    cmd.Meta.description,
                    header="description",
                ),
            ]
        )
    print("\nModule overview:\n{}".format(str(mod_list)))
    print("\nCommand overview:\n{}".format(str(cmd_list)))
Esempio n. 18
0
def main(args):
    ignore_re = re.compile(args.ignore)
    coffefiles = []
    htmlfiles = []
    for root, dirs, files in os.walk(args.path, topdown=False):
        coffefiles.extend([
            os.path.join(root, f) for f in files
            if f.endswith("coffee") and not ignore_re.search(f)
        ])
        htmlfiles.extend([
            os.path.join(root, f) for f in files
            if f.endswith("html") and not ignore_re.search(f)
        ])

    print("{:d} Coffee and {:d} HTML files".format(len(coffefiles),
                                                   len(htmlfiles)))

    def_matcher = re.compile(
        ".*\.(?P<type>(directive|service|controller|factory))\((\'|\")(?P<name>(.*?))(\'|\").*"
    )
    html_matcher = re.compile(
        ".*script type=.text/ng-template. id=(\'|\")(?P<name>.*)(\'|\").")

    my_sink = DataSink(args.path)

    print("Getting defs...")

    # get definitions

    for name in coffefiles:
        for line_num, line in enumerate(open(name, "rb"), 1):
            match = def_matcher.match(line)
            if match:
                _gd = match.groupdict()
                my_sink.feed(name, line_num, line, _gd["type"], _gd["name"])
    print("done (found {:d})".format(len(my_sink._defs)))

    # find refs in HTML to services

    dir_defs = my_sink.get_type_defs("directive") + my_sink.get_type_defs(
        "controller")
    dir_dict = {}
    for _def in dir_defs:
        dir_dict[_def.camel_name] = _def
        dir_dict[_def.hyphen_name] = _def
    dir_matcher = set(dir_dict.keys())

    _refs = 0
    s_time = time.time()
    for name in htmlfiles:
        for line_num, line in enumerate(open(name, "rb"), 1):
            match = html_matcher.match(line)
            if match:
                _gd = match.groupdict()
                my_sink.feed(name, line_num, line, "html", _gd["name"])
            else:
                # print line
                _add_dict = {}
                for word in re.split("([^a-zA-Z\-])+", line):
                    if word in dir_matcher:
                        # skip lines with only closing tags
                        if "</{}".format(word) in line and "<{}".format(
                                word) not in line:
                            continue
                        # only one match per line
                        _add_dict[word] = True
                for word in _add_dict.keys():
                    dir_dict[word].add_reference(name, line_num, line)
                    _refs += 1
    e_time = time.time()
    print("Reference from HTML to directive took {} (found: {:d})".format(
        logging_tools.get_diff_time_str(e_time - s_time),
        _refs,
    ))

    # find refs to Services and Factories in coffee
    sf_refs = my_sink.get_type_defs("factory") + my_sink.get_type_defs(
        "service")
    sf_dict = {_sf.camel_name: _sf for _sf in sf_refs}
    sf_matcher = set(sf_dict.keys())
    # also find refs to html templates in coffee
    html_ref_re = re.compile(
        ".*(template|templateUrl)\s*:\s*.*(\'|\")(?P<temp_name>.*?)(\'|\").*")
    html_dict = {
        _html.hyphen_name: _html
        for _html in my_sink.get_type_defs("html")
    }
    html_matcher = set(html_dict.keys())
    # print html_matcher
    _refs = 0
    s_time = time.time()
    for name in coffefiles:
        for line_num, line in enumerate(open(name, "rb"), 1):
            # print line
            for word in re.split("([^a-zA-Z])+", line):
                if word in sf_matcher:
                    # check if reference is by literal
                    if "'{}'".format(word) in line or "\"{}\"".format(
                            word) in line:
                        sf_dict[word].add_reference(name, line_num, line)
                        _refs += 1
            _html_match = html_ref_re.match(line)
            if _html_match:
                _temp_ref = _html_match.groupdict()["temp_name"]
                if _temp_ref in html_matcher:
                    html_dict[_temp_ref].add_reference(name, line_num, line)
    e_time = time.time()
    print("Reference from coffee to service / factory took {} (found: {:d})".
          format(
              logging_tools.get_diff_time_str(e_time - s_time),
              _refs,
          ))

    # generate output
    # raw list
    _list = sum(
        [my_sink.get_type_defs(_type) for _type in my_sink.get_types()], [])

    # filter
    if args.ignore_valid:
        _list = [entry for entry in _list if not entry.is_valid]

    name_re = re.compile(args.filter, re.IGNORECASE)
    _list = [entry for entry in _list if name_re.search(entry.name)]

    if _list:
        print("{} in result list:".format(
            logging_tools.get_plural("entry", len(_list)), ))

        if args.order_by == "name":
            _list = sorted(_list, key=attrgetter("name"))
        if args.order_by == "toplevel":
            _list = sorted(_list, key=attrgetter("top_level_dir"))
        out_list = logging_tools.NewFormList()

        files_referenced = {}
        for _def in _list:
            files_referenced.setdefault(_def.file_name, []).append(_def)
            out_list.append([
                logging_tools.form_entry(_def.type, header="Type"),
                logging_tools.form_entry(_def.name, header="Name"),
                logging_tools.form_entry(_def.file_name, header="File"),
                logging_tools.form_entry_right(_def.line_num, header="line"),
                logging_tools.form_entry_right(len(_def.refs), header="#refs"),
                logging_tools.form_entry_center(
                    "yes" if _def.namespace_ok else "no", header="NS ok"),
                logging_tools.form_entry_center(
                    "yes" if _def.name_valid else "no", header="valid"),
            ])
            if args.show_refs:
                out_list.extend(_def.get_ref_list())
                _def.add_file_refs(files_referenced)
        print(str(out_list))

        if args.show_refs and files_referenced:
            print()
            print("Referenced files:")
            print()
            pprint.pprint(files_referenced)
Esempio n. 19
0
    def _recv_command(self, zmq_sock):
        body = zmq_sock.recv().decode()
        if zmq_sock.getsockopt(zmq.RCVMORE):
            _src_id = body
            body = zmq_sock.recv()
        parameter_ok = False
        xml_input = body.startswith("<")
        if global_config["DEBUG_LEVEL"] > 3:
            self.log("received {:d} bytes, xml_input is {}".format(len(body), str(xml_input)))
        if xml_input:
            srv_com = server_command.srv_command(source=body)
            srv_com.set_result("no reply set", server_command.SRV_REPLY_STATE_UNSET)
            try:
                host = srv_com.xpath(".//ns:host", smart_strings=False)[0].text
                snmp_version = int(srv_com.xpath(".//ns:snmp_version", smart_strings=False)[0].text)
                snmp_community = srv_com.xpath(".//ns:snmp_community", smart_strings=False)[0].text
                comline = srv_com.xpath(".//ns:command", smart_strings=False)[0].text
                timeout = int(srv_com.get(".//ns:timeout", "10"))
            except:
                self._send_return(body, limits.mon_STATE_CRITICAL, "message format error: {}".format(process_tools.get_except_info()))
            else:
                envelope = srv_com["identity"].text
                parameter_ok = True
                if len(srv_com.xpath(".//ns:arg_list/text()", smart_strings=False)):
                    comline = " ".join([comline] + srv_com.xpath(".//ns:arg_list/text()", smart_strings=False)[0].strip().split())
        else:
            srv_com = None
            if body.count(";") >= 3:
                if body.startswith(";"):
                    # new format
                    proto_version, body = body[1:].split(";", 1)
                else:
                    proto_version, body = ("0", body)
                proto_version = int(proto_version)
                if proto_version == 0:
                    parts = body.split(";", 4)
                    parts.insert(4, "10")
                else:
                    parts = body.split(";", 5)
                envelope = parts.pop(0)
                # parse new format
                if parts[4].endswith(";"):
                    com_part = parts[4][:-1]
                else:
                    com_part = parts[4].split(";")
                # iterative parser
                try:
                    arg_list = []
                    while com_part.count(";"):
                        cur_size, cur_str = com_part.split(";", 1)
                        cur_size = int(cur_size)
                        com_part = cur_str[cur_size + 1:]
                        arg_list.append(cur_str[:cur_size].decode("utf-8"))
                    if com_part:
                        raise ValueError("not fully parsed ({})".format(com_part))
                except:
                    self.log("error parsing {}".format(body), logging_tools.LOG_LEVEL_ERROR)
                    arg_list = []
                host, snmp_version, snmp_community, timeout = parts[0:4]
                timeout = int(timeout)
                comline = " ".join(arg_list)
                # print host, snmp_version, snmp_community, timeout, arg_list
                parameter_ok = True
                # envelope, host, snmp_version, snmp_community, comline = body.split(";", 4)
        if parameter_ok:
            try:
                snmp_version = int(snmp_version)
                comline_split = comline.split()
                scheme = comline_split.pop(0)
            except:
                self._send_return(envelope, limits.mon_STATE_CRITICAL, "message format error: {}".format(process_tools.get_except_info()))
            else:
                self.__ret_dict[envelope] = time.time()
                if scheme in self.__local_schemes:
                    act_scheme, s_type = (self.__local_schemes[scheme], "L")
                elif scheme in self.__gen_schemes:
                    act_handler, s_type = (self.__gen_schemes[scheme], "G")
                elif scheme == "list_schemes":
                    _out_list = logging_tools.NewFormList()
                    for _s_name in sorted(self.__local_schemes):
                        try:
                            _scheme = self.__local_schemes[_s_name](dummy_init=True, options="")
                        except:
                            pass
                        else:
                            _out_list.append(
                                [
                                    logging_tools.form_entry("local", header="type"),
                                    logging_tools.form_entry(_s_name, header="name"),
                                    logging_tools.form_entry(len(_scheme.requests), header="#reqs"),
                                    logging_tools.form_entry(", ".join(sorted([repr(_req) for _req in _scheme.requests])), header="requests"),
                                ]
                            )
                    for _s_name in sorted(self.__gen_schemes):
                        _scheme = self.__gen_schemes[_s_name]
                        _out_list.append(
                            [
                                logging_tools.form_entry("generic", header="type"),
                                logging_tools.form_entry(_s_name, header="name"),
                            ]
                        )
                    self._send_return(envelope, limits.mon_STATE_OK, str(_out_list))
                    s_type = None
                else:
                    guess_list = ", ".join(difflib.get_close_matches(scheme, list(self.__local_schemes.keys()) + list(self.__gen_schemes.keys())))
                    err_str = "got unknown scheme '{}'{}".format(
                        scheme,
                        ", maybe one of {}".format(guess_list) if guess_list else ", no similar scheme found"
                    )
                    self._send_return(envelope, limits.mon_STATE_CRITICAL, err_str)
                    s_type = None
                if s_type:
                    host = self._resolve_address(host)
                    host_obj = self._get_host_object(host, snmp_community, snmp_version)
                    if global_config["DEBUG_LEVEL"]:
                        self.log(
                            "got request for scheme {} (host {}, community {}, version {:d}, envelope  {}, timeout {:d})".format(
                                scheme,
                                host,
                                snmp_community,
                                snmp_version,
                                envelope,
                                timeout,
                            )
                        )
                    try:
                        if s_type == "L":
                            act_scheme = act_scheme(
                                net_obj=host_obj,
                                envelope=envelope,
                                options=comline_split,
                                xml_input=xml_input,
                                srv_com=srv_com,
                                init_time=time.time(),
                                timeout=timeout,
                            )
                        else:
                            act_scheme = SNMPGeneralScheme(
                                net_obj=host_obj,
                                envelope=envelope,
                                options=comline_split,
                                xml_input=xml_input,
                                srv_com=srv_com,
                                init_time=time.time(),
                                timeout=timeout,
                                handler=act_handler,
                            )
                    except IOError:
                        err_str = "error while creating scheme {}: {}".format(
                            scheme,
                            process_tools.get_except_info()
                        )
                        self._send_return(envelope, limits.mon_STATE_CRITICAL, err_str)
                    else:
                        if act_scheme.get_errors():
                            err_str = "problem in creating scheme {}: {}".format(
                                scheme,
                                ", ".join(act_scheme.get_errors())
                            )
                            self._send_return(envelope, limits.mon_STATE_CRITICAL, err_str)
                        else:
                            self._start_snmp_fetch(act_scheme)

        elif not xml_input:
            self._send_return(envelope, limits.mon_STATE_CRITICAL, "message format error")
        self.__num_messages += 1
        if global_config["DEBUG_LEVEL"] > 3:
            self.log("recv() done")
        if not self.__num_messages % 100:
            cur_mem = process_tools.get_mem_info(self.CC.msi_block.get_unique_pids() if self.CC.msi_block else 0)
            self.log(
                "memory usage is {} after {}".format(
                    logging_tools.get_size_str(cur_mem),
                    logging_tools.get_plural("message", self.__num_messages)
                )
            )
        if not self.__num_messages % 50:
            # log process usage
            self.log(self.spc.get_usage())