def get_form_entry(self, idx, max_num_keys): act_line = [] sub_keys = (self.name.split(".") + [""] * max_num_keys)[0:max_num_keys] for key_idx, sub_key in zip(xrange(max_num_keys), sub_keys): act_line.append( logging_tools.form_entry("{}{}".format( "" if (key_idx == 0 or sub_key == "") else ".", sub_key), header="key{:d}".format(key_idx))) # check for unknow if self.value is None: # unknown value act_pf, val_str = ("", "<unknown>") else: act_pf, val_str = self._get_val_str(self.value * self.factor) act_line.extend([ logging_tools.form_entry_right(val_str, header="value"), logging_tools.form_entry_right(act_pf, header=" "), logging_tools.form_entry(self.unit, header="unit"), logging_tools.form_entry("({:3d})".format(idx), header="idx"), logging_tools.form_entry( "{:d}".format(self.valid_until) if self.valid_until else "---", header="valid_until"), logging_tools.form_entry(self._build_info_string(), header="info") ]) return act_line
def get_info_line(self): return [ logging_tools.form_entry(self.name, header="name"), logging_tools.form_entry(self.license_type, header="type"), logging_tools.form_entry("yes" if self.is_used else "no", header="for SGE"), logging_tools.form_entry("yes" if self.show else "no", header="show"), logging_tools.form_entry_right(self.total, header="total"), logging_tools.form_entry_right(self.reserved, header="reserved"), logging_tools.form_entry_right(self.limit, header="limit"), logging_tools.form_entry_right(self.used, header="used"), logging_tools.form_entry_right(self.total - self.limit, header="avail"), logging_tools.form_entry_right(self.sge_used, header="cluster"), logging_tools.form_entry_right(self.sge_used_requested, header="cluster(requested)"), logging_tools.form_entry_right(self.sge_used_issued, header="cluster(issued)"), logging_tools.form_entry_right(self.external_used, header="external"), logging_tools.form_entry_right(self.free, header="free"), logging_tools.form_entry( self.expires.strftime(EXPIRY_DT) if self.expires else "---", header="expires"), ]
def get_info_line(self): _consumers = self.get_num_consumers() return [ logging_tools.form_entry(self.action, header="action"), logging_tools.form_entry(str(self.config_service_enum), header="ConfigService"), logging_tools.form_entry_right(self.multiplier, header="Weight"), logging_tools.form_entry_center("yes" if self.ghost else "no", header="Ghost"), logging_tools.form_entry_center(str(self.content_type), header="ContentType"), logging_tools.form_entry_center(self.license_id_name or "global", header="License"), logging_tools.form_entry_right( logging_tools.get_diff_time_str(self.timeframe_secs) if self.timeframe_secs else "---", header="timeframe", ), logging_tools.form_entry_right(_consumers, header="entries"), logging_tools.form_entry_right(self.consumed, header="consumed"), logging_tools.form_entry_right( "{:.2f}".format(float(self.consumed) / float(_consumers)) if _consumers else "-", header="mean"), ]
def show(self, **opions): present_perms = self._get_perms() out_list = logging_tools.new_form_list() for perm in present_perms: out_list.append([ logging_tools.form_entry(perm.content_type.app_label, header="App Label"), logging_tools.form_entry(perm.content_type.model, header="Model"), logging_tools.form_entry(perm.codename, header="Code"), logging_tools.form_entry(perm.name, header="Info"), logging_tools.form_entry_center( "G/O" if perm.valid_for_object_level else "G", header="Scope"), logging_tools.form_entry( perm.created.strftime("%Y-%m-%d %H:%M:%S"), header="Created"), logging_tools.form_entry_right( perm.rolepermission_set.all().count(), header="RolePerms"), logging_tools.form_entry_right( perm.csw_object_permission_set.all().count(), header="RoleObjPerms"), ]) print("{} defined:".format( logging_tools.get_plural("Permission", len(out_list)), )) print unicode(out_list)
def get_form_entry(self, max_num_keys): act_line = [ logging_tools.form_entry(self.wm_type.value.name, header="Type"), logging_tools.form_entry_center( "yes" if self.wm_type.value.has_sub_values else "no", header="SubValue"), logging_tools.form_entry(self.db_idx, header="db_idx"), logging_tools.form_entry(self.spec, header="Spec"), ] sub_keys = (self.key.split(".") + [""] * max_num_keys)[0:max_num_keys] for key_idx, sub_key in zip(range(max_num_keys), sub_keys): act_line.append( logging_tools.form_entry("{}{}".format( "" if (key_idx == 0 or sub_key == "") else ".", sub_key), header="key{:d}".format(key_idx))) # check for unknow if self.value is None: # unknown value act_pf, val_str = ("", "<unknown>") else: act_pf, val_str = self._get_val_str(self.value) act_line.extend([ logging_tools.form_entry_right(val_str, header="value"), logging_tools.form_entry_right(act_pf, header=" "), ]) return act_line
def get_info(self, short=True): vg_names = sorted(self.lv_dict.get("vg", {}).keys()) vg_info = {} for vg_name in vg_names: vg_stuff = self.lv_dict.get("vg", {})[vg_name] _vg_extent_size = vg_stuff["extent_size"] _vg_extent_count = vg_stuff["extent_count"] vg_info[vg_name] = (self._get_size_str(vg_stuff["size"]), self._get_size_str(vg_stuff["free"])) lv_names = sorted(self.lv_dict.get("lv", {}).keys()) lv_info = {} for lv_name in lv_names: lv_stuff = self.lv_dict.get("lv", {})[lv_name] vg_name = lv_stuff["vg_name"] lv_size = lv_stuff["size"] lv_info.setdefault(vg_name, []).append("{}{} ({})".format( lv_name, lv_stuff["attr"][5] == "o" and "[open]" or "", self._get_size_str(lv_size))) # print "*", lv_name, vg_stuff["name"], vg_extent_size, vg_extent_count, vg_size, lv_extents if short: ret_info = [] for vg_name in vg_names: ret_info.append("{} ({}, {} free, {}: {})".format( vg_name, vg_info[vg_name][0], vg_info[vg_name][1], logging_tools.get_plural("LV", len(lv_info.get(vg_name, []))), ", ".join(lv_info.get(vg_name, [])) or "NONE")) return "{}: {}".format( logging_tools.get_plural("VG", len(ret_info)), "; ".join(ret_info)) else: ret_info = logging_tools.new_form_list() for vg_name in vg_names: ret_info.append([ logging_tools.form_entry("VG", header="type"), logging_tools.form_entry(vg_name, header="name"), logging_tools.form_entry_right(vg_info[vg_name][0], header="size"), logging_tools.form_entry_right(vg_info[vg_name][1], header="free"), logging_tools.form_entry("", header="options"), ]) for lv_name in lv_names: lv_stuff = self.lv_dict.get("lv", {})[lv_name] if lv_stuff["vg_name"] == vg_name: ret_info.append([ logging_tools.form_entry(" LV", header="type"), logging_tools.form_entry(lv_name, header="name"), logging_tools.form_entry_right(self._get_size_str( lv_stuff["size"]), header="size"), logging_tools.form_entry(""), logging_tools.form_entry(lv_stuff["attr"]), ]) return unicode(ret_info)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--port", type=int, default=1055, help="license server [%(default)d]") parser.add_argument("--server", type=str, default="localhost", help="license port [%(default)s]") parser.add_argument("--mode", type=str, default="xml", choices=["xml", "check", "csv", "list"], help="output mode [%(default)s]") parser.add_argument("--check-eval", type=str, default="true", help="check string, should return true or false") opts = parser.parse_args() my_lc = sge_license_tools.license_check( server=opts.server, port=opts.port, ) xml_res = my_lc.check() ret_code = 0 if opts.mode == "xml": print(etree.tostring(xml_res, pretty_print=True)) # @UndefinedVariable elif opts.mode == "check": glob_dict = {} for cur_lic in xml_res.findall(".//license"): lic_name = cur_lic.attrib["name"] for attr_name in ["issued", "used", "free", "reserved"]: glob_dict["{}_{}".format(lic_name, attr_name)] = int(cur_lic.attrib[attr_name]) ret_val = eval(opts.check_eval, glob_dict) if not ret_val: ret_code = 1 elif opts.mode == "csv": print(",".join(["name", "issued", "used", "free", "reserved"])) for cur_lic in xml_res.findall(".//license"): print( ",".join( [ cur_lic.attrib["name"], cur_lic.attrib["issued"], cur_lic.attrib["used"], cur_lic.attrib["free"], cur_lic.attrib["reserved"], ] ) ) elif opts.mode == "list": out_form = logging_tools.new_form_list() for cur_lic in xml_res.findall(".//license"): out_form.append( [ logging_tools.form_entry(cur_lic.attrib["name"], header="name"), logging_tools.form_entry_right(cur_lic.attrib["issued"], header="issued"), logging_tools.form_entry_right(cur_lic.attrib["used"], header="used"), logging_tools.form_entry_right(cur_lic.attrib["free"], header="free"), logging_tools.form_entry_right(cur_lic.attrib["reserved"], header="reserved"), ] ) print(unicode(out_form)) sys.exit(ret_code)
def get_info_line(self): return [ logging_tools.form_entry_center("yes" if self.dummy else "no", header="Dummy"), logging_tools.form_entry(str(self.valid_from), header="valid from"), logging_tools.form_entry(str(self.valid_to), header="valid to"), logging_tools.form_entry(self.license_id_name or "global", header="License"), logging_tools.form_entry_center("yes" if self.is_valid else "no", header="valid"), logging_tools.form_entry_right(self.installed, header="installed"), logging_tools.form_entry_right(self.available, header="available"), logging_tools.form_entry_right(self.available_ghost, header="ghost"), ]
def get_info_line(self): return [ logging_tools.form_entry(self.action, header="action"), logging_tools.form_entry(unicode(self.config_service_enum), header="ConfigService"), logging_tools.form_entry_right(self.multiplier, header="Weight"), logging_tools.form_entry_center(unicode(self.content_type), header="ContentType"), logging_tools.form_entry_right( logging_tools.get_diff_time_str(self.timeframe_secs) if self.timeframe_secs else "---", header="timeframe", ), logging_tools.form_entry_right(self.get_num_consumers(), header="entries"), logging_tools.form_entry_right(self.get_all_consumed(), header="consumed"), ]
def raw_license_info(opts): if opts.delete: print("Deleting LicenseFile Entry from database with idx {:d}".format( opts.delete)) try: License.objects.get(Q(idx=opts.delete)).delete() except License.DoesNotExist: # ignore pass out_list = logging_tools.new_form_list() _to_save = [] _query = License.objects.all() if opts.only_valid: _query = _query.filter(Q(valid=True)) for lic in _query: try: _info = License.objects.get_license_info(lic) except: _info = process_tools.get_except_info() _raw_info = None _error = True else: _raw_info = License.objects.get_raw_license_info(lic) _error = False if _error: if opts.mark_error: _valid = False elif opts.unmark_all: _valid = True else: _valid = lic.valid else: _valid = True if lic.valid != _valid: lic.valid = _valid _to_save.append(lic) # todo, extract fingerprint info from raw_license_info # import pprint # pprint.pprint(_raw_info) out_list.append([ logging_tools.form_entry(lic.file_name, header="Filename"), logging_tools.form_entry(lic.date.isoformat(), header="created"), logging_tools.form_entry_right(lic.idx, header="idx"), logging_tools.form_entry_center( "valid" if lic.valid else "invalid", header="validity"), logging_tools.form_entry_center("error" if _error else "ok", header="error"), logging_tools.form_entry(_info, header="Info"), ]) print(str(out_list)) if len(_to_save): print("") print("Updating LicenseFile states ({:d})".format(len(_to_save))) for lic_to_save in _to_save: lic_to_save.save(update_fields=["valid"]) print("...done")
def show_cs_help(options): import initat.cluster_server.modules com_names = initat.cluster_server.modules.command_names to_show = [] for com_name in com_names: _show = True if options.args: _show = any([com_name.count(arg) for arg in options.args]) if _show: to_show.append(com_name) print( "cluster-server commands defined / to show : {:d} / {:d}".format( len(com_names), len(to_show), ) ) if to_show: out_list = logging_tools.NewFormList() for _idx, com_name in enumerate(com_names, 1): com = initat.cluster_server.modules.command_dict[com_name] out_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(com_name, header="Command"), logging_tools.form_entry( "yes" if com.Meta.disabled else "no", header="disabled", ), logging_tools.form_entry( ", ".join( [ _cfg.name for _cfg in com.Meta.needed_configs ] ) or "---", header="configs" ), logging_tools.form_entry( ", ".join(com.Meta.needed_option_keys) or "---", header="options" ), ] ) print("\n{}".format(str(out_list)))
def do_info(cur_opts, log_com): if not cur_opts.username: print("No user name given") return 1 _user = _get_user(cur_opts.username) _ret_state = 0 if _user is None: _ret_state = 1 else: from initat.cluster.backbone.models import user_quota_setting, user_variable from django.db.models import Q print("") print( "User with loginname '{}' (user {}), uid={:d}, group={} (gid={:d})".format( _user.login, str(_user), _user.uid, str(_user.group), _user.group.gid, ) ) num_qs = _user.user_quota_setting_set.all().count() if num_qs and cur_opts.system_wide_quota: print("") print( "{} found:".format( logging_tools.get_plural("system-wide quota setting", num_qs) ) ) for _qs in _user.user_quota_setting_set.all(): _bd = _qs.quota_capable_blockdevice print( " device {} ({} on {}): {}".format( str(_bd.device.full_name), _bd.block_device_path, _bd.mount_path, get_quota_str(_qs), ) ) try: _cmd = "quota --show-mntpoint -wp -u {}".format( _user.login, ) _res = subprocess.check_output( _cmd.split(), stderr=subprocess.STDOUT, ).decode("utf-8") except subprocess.CalledProcessError as sb_exc: _res = sb_exc.output.decode("utf-8") # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info())) _ret_state = 1 except OSError as sb_exc: # quota command not founda _res = "denied: {}".format(sb_exc) # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info())) _ret_state = 1 else: _ret_state = 0 if _res.lower().count("denied"): print(" error getting local quotas for {}: {}".format(_user.login, _res)) else: # print _res _lines = [_line.strip().split() for _line in _res.split("\n") if _line.strip()] _lines = [_line for _line in _lines if len(_line) == 10] if _lines: print("", "local quota:", sep="\n") _line = _lines[-1] _bytes_violate = _line[2].count("*") > 0 _local = user_quota_setting( bytes_used=int(_line[2].replace("*", "")) * 1024, bytes_soft=int(_line[3]) * 1024, bytes_hard=int(_line[4]) * 1024, bytes_gracetime=int(_line[5]), ) print( " local mountpoint: {}".format( get_quota_str(_local), ) ) if cur_opts.delete_var: print("") try: _cv = user_variable.objects.get(Q(user=_user) & Q(idx=cur_opts.delete_var)) except user_variable.DoesNotExist: print("Variable to delete does not exist") else: print("Deleting '{}'".format(str(_cv))) _cv.delete() if cur_opts.show_vars: out_list = logging_tools.NewFormList() for _var in _user.user_variable_set.all().order_by("name"): out_list.append( [ logging_tools.form_entry(_var.idx, header="idx"), logging_tools.form_entry(_var.name, header="name"), logging_tools.form_entry(_var.var_type, header="type"), logging_tools.form_entry_right(_var.value if _var.var_type != "j" else "{:d} Bytes".format(len(_var.json_value)), header="value"), logging_tools.form_entry_center("yes" if _var.editable else "no", header="editable"), logging_tools.form_entry_center("yes" if _var.hidden else "no", header="hidden"), logging_tools.form_entry(_var.date.strftime("%H:%m:%S %a, %d. %b %Y"), header="created"), logging_tools.form_entry(_var.description, header="description"), ] ) print(str(out_list)) return _ret_state
def instance_to_form_list(self, opt_ns, res_xml): prc_dict = { SERVICE_OK: ("running", "ok"), SERVICE_DEAD: ("error", "critical"), SERVICE_INCOMPLETE: ("incomplete", "critical"), SERVICE_NOT_INSTALLED: ("not installed", "warning"), SERVICE_NOT_CONFIGURED: ("not configured", "warning"), } crc_dict = { CONF_STATE_RUN: ("run", "ok"), CONF_STATE_STOP: ("stop", "critical"), CONF_STATE_IP_MISMATCH: ("ip mismatch", "critical"), } meta_dict = { "t": { TARGET_STATE_RUNNING: ("run", "ok"), TARGET_STATE_STOPPED: ("stop", "critical"), }, "i": { 0: ("monitor", "ok"), 1: ("ignore", "warning"), } } if License is not None: lic_dict = { -1: ("-", ""), LicenseState.none: ("no license", "critical"), LicenseState.violated: ("parameter violated", "critical"), LicenseState.valid: ("valid", "ok"), LicenseState.grace: ("in grace", "warning"), LicenseState.expired: ("expired", "critical"), LicenseState.fp_mismatch: ("wrong fingerprint", "critical"), # LicenseState.ip_mismatch: ("ip mismatch", "critical"), } else: lic_dict = None out_bl = logging_tools.new_form_list() types = sorted( list( set(res_xml.xpath(".//instance/@runs_on", start_strings=False)))) _list = sum([ res_xml.xpath("instance[result and @runs_on='{}']".format(_type)) for _type in types ], []) for act_struct in _list: _res = act_struct.find("result") p_state = int( act_struct.find(".//process_state_info").get( "state", SERVICE_DEAD)) c_state = int( act_struct.find(".//configured_state_info").get( "state", CONF_STATE_STOP)) if not opt_ns.failed or (opt_ns.failed and p_state not in [SERVICE_OK]): cur_line = [ logging_tools.form_entry(act_struct.attrib["name"], header="Name") ] cur_line.append( logging_tools.form_entry(act_struct.attrib["runs_on"], header="runson")) cur_line.append( logging_tools.form_entry( _res.find("process_state_info").get( "check_source", "N/A"), header="source")) if opt_ns.process: s_info = act_struct.find(".//process_state_info") if "num_started" not in s_info.attrib: cur_line.append(logging_tools.form_entry(s_info.text)) else: num_diff, any_ok = (int( s_info.get("num_diff")), True if int( act_struct.attrib["any-processes-ok"]) else False) # print etree.tostring(act_struct, pretty_print=True) num_pids = len(_res.findall(".//pids/pid")) da_name = "" if any_ok: pass else: if num_diff < 0: da_name = "critical" elif num_diff > 0: da_name = "warning" cur_line.append( logging_tools.form_entry( s_info.attrib["proc_info_str"], header="Process info", display_attribute=da_name)) pid_dict = {} for cur_pid in act_struct.findall(".//pids/pid"): pid_dict[int(cur_pid.text)] = int( cur_pid.get("count", "1")) if pid_dict: p_list = sorted(pid_dict.keys()) if max(pid_dict.values()) == 1: cur_line.append( logging_tools.form_entry( logging_tools.compress_num_list(p_list), header="pids")) else: cur_line.append( logging_tools.form_entry(",".join([ "{:d}{}".format( key, " ({:d})".format(pid_dict[key]) if pid_dict[key] > 1 else "") for key in p_list ]), header="pids")) else: cur_line.append( logging_tools.form_entry("no PIDs", header="pids")) if opt_ns.started: start_time = int( act_struct.find(".//process_state_info").get( "start_time", "0")) if start_time: diff_time = max( 0, time.mktime(time.localtime()) - start_time) diff_days = int(diff_time / (3600 * 24)) diff_hours = int( (diff_time - 3600 * 24 * diff_days) / 3600) diff_mins = int((diff_time - 3600 * (24 * diff_days + diff_hours)) / 60) diff_secs = int( diff_time - 60 * (60 * (24 * diff_days + diff_hours) + diff_mins)) ret_str = "{}".format( time.strftime("%a, %d. %b %Y, %H:%M:%S", time.localtime(start_time))) else: ret_str = "no start info found" cur_line.append( logging_tools.form_entry(ret_str, header="started")) if opt_ns.config: cur_line.append( logging_tools.form_entry( act_struct.find(".//config_info").text, header="config info")) if opt_ns.memory: cur_mem = act_struct.find(".//memory_info") if cur_mem is not None: mem_str = process_tools.beautify_mem_info( int(cur_mem.text)) else: # no pids hence no memory info mem_str = "" cur_line.append( logging_tools.form_entry_right(mem_str, header="Memory")) if opt_ns.version: if "version" in _res.attrib: _version = _res.attrib["version"] else: _version = "" cur_line.append( logging_tools.form_entry_right(_version, header="Version")) _lic_info = _res.find("license_info") _lic_state = int(_lic_info.attrib["state"]) if lic_dict is None: cur_line.append( logging_tools.form_entry( "---", header="License", )) else: cur_line.append( logging_tools.form_entry( lic_dict[_lic_state][0], header="License", display_attribute=lic_dict[_lic_state][1], )) cur_line.append( logging_tools.form_entry( prc_dict[p_state][0], header="PState", display_attribute=prc_dict[p_state][1], )) cur_line.append( logging_tools.form_entry( crc_dict[c_state][0], header="CState", display_attribute=crc_dict[c_state][1], )) if opt_ns.meta: _meta_res = act_struct.find(".//meta_result") if _meta_res is not None: t_state = int(_meta_res.get("target_state")) ignore = int(_meta_res.get("ignore")) cur_line.append( logging_tools.form_entry( meta_dict["t"][t_state][0], header="TargetState", display_attribute=meta_dict["t"][t_state][1], )) cur_line.append( logging_tools.form_entry( meta_dict["i"][ignore][0], header="Ignore", display_attribute=meta_dict["i"][ignore][1], )) else: cur_line.append( logging_tools.form_entry( "N/A", header="TargetState", display_attribute="warning", )) cur_line.append( logging_tools.form_entry( "N/A", header="Ignore", display_attribute="warning", )) out_bl.append(cur_line) return out_bl
def interpret(self, srv_com, cur_ns): l_dict = {} for key in IPMI_LONG_LIMITS: try: l_dict[key] = float(getattr(cur_ns, key)) except: l_dict[key] = None s_list = srv_com.xpath(".//ns:sensor_list", smart_strings=False) if s_list: s_list = s_list[0] if cur_ns.arguments: el = s_list[0] cur_value = float(el.attrib["value"]) ret_state = limits.mon_STATE_OK for t_name, log, t_state in [ ("lowern", False, limits.mon_STATE_CRITICAL), ("lowerc", False, limits.mon_STATE_CRITICAL), ("lowerw", False, limits.mon_STATE_WARNING), ("upperw", True, limits.mon_STATE_WARNING), ("upperc", True, limits.mon_STATE_CRITICAL), ("uppern", True, limits.mon_STATE_CRITICAL), ]: if l_dict[t_name] is not None: if (log and cur_value >= l_dict[t_name]) or ( not log and cur_value <= l_dict[t_name]): ret_state = max(ret_state, t_state) return ret_state, "{}: {} is {:.2f} {}".format( el.attrib["key"], el.attrib["info"], cur_value, el.attrib["unit"], ) else: # list mode keys = s_list.xpath(".//@key", smart_strings=False) out_list = logging_tools.new_form_list() for key in keys: el = s_list.xpath("*[@key='{}']".format(key), smart_strings=False)[0] v_list = [ logging_tools.form_entry(key, header="key"), logging_tools.form_entry_right(el.attrib["value"], header="value"), logging_tools.form_entry_right(el.attrib["base"], header="base"), logging_tools.form_entry(el.attrib["unit"], header="unit"), logging_tools.form_entry(el.attrib["info"], header="info"), ] for l_key in IPMI_LIMITS: x_key = "limit_{}".format(l_key) v_list.append( logging_tools.form_entry(el.attrib.get(x_key, "-"), header=x_key)) out_list.append(v_list) return limits.mon_STATE_OK, "found {}:\n{}".format( logging_tools.get_plural("IPMI sensor", len(keys)), unicode(out_list)) else: return limits.mon_STATE_WARNING, "no IPMI sensors found"
def show_overview(local_mc, valid_names): mod_list = logging_tools.NewFormList() cmd_list = logging_tools.NewFormList() # iterate over modules for _idx, mod in enumerate(local_mc.module_list, 1): c_names = [name for name in valid_names if local_mc.command_dict[name].module == mod] local_valid_names = [] for com_name in c_names: local_valid_names.append(com_name) local_valid_names = sorted(local_valid_names) # show module overview mod_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(mod.name, header="Module name"), logging_tools.form_entry(mod.Meta.uuid, header="uuid"), logging_tools.form_entry(mod.checksum, header="Checksum"), logging_tools.form_entry_center(mod.Meta.required_access.name, header="Access"), logging_tools.form_entry_center( ",".join( [ _platform.name for _platform in mod.Meta.required_platform ] ), header="Platform", ), logging_tools.form_entry_right(mod.Meta.priority, header="priority"), logging_tools.form_entry_right( "yes" if hasattr(mod, "init_machine_vector") else "no", header="MachineVector" ), logging_tools.form_entry_right(len(local_valid_names), header="#coms"), logging_tools.form_entry(", ".join(local_valid_names), header="commands"), ] ) # iterate over command for _idx, cmd_name in enumerate(sorted(local_mc.command_dict.keys()), 1): cmd = local_mc[cmd_name] # print(cmd) # print(inspect.getsource(cmd.__class__)) cmd_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(cmd_name, header="Name"), logging_tools.form_entry(cmd.module.name, header="Module name"), logging_tools.form_entry(cmd.Meta.uuid, header="uuid"), logging_tools.form_entry(cmd.checksum, header="Checksum"), logging_tools.form_entry(cmd.Meta.check_instance.name, header="Server"), logging_tools.form_entry_center(cmd.Meta.required_access.name, header="Access"), logging_tools.form_entry_center( ",".join( [ _platform.name for _platform in cmd.Meta.required_platform ] ), header="Platform", ), logging_tools.form_entry_center( "yes" if cmd.Meta.has_perfdata else "no", header="perfdata", ), logging_tools.form_entry_center( "yes" if cmd.Meta.create_mon_check_command else "no", header="create MCC", ), logging_tools.form_entry( ", ".join( cmd.Meta.alternate_names ) if cmd.Meta.alternate_names else "---", header="Alternate names", ), logging_tools.form_entry( cmd.Meta.ports.get_port_spec(), header="PortSpec", ), logging_tools.form_entry( cmd.Meta.description, header="description", ), ] ) print("\nModule overview:\n{}".format(str(mod_list))) print("\nCommand overview:\n{}".format(str(cmd_list)))
def main(args): ignore_re = re.compile(args.ignore) coffefiles = [] htmlfiles = [] for root, dirs, files in os.walk(args.path, topdown=False): coffefiles.extend([ os.path.join(root, f) for f in files if f.endswith("coffee") and not ignore_re.search(f) ]) htmlfiles.extend([ os.path.join(root, f) for f in files if f.endswith("html") and not ignore_re.search(f) ]) print("{:d} Coffee and {:d} HTML files".format(len(coffefiles), len(htmlfiles))) def_matcher = re.compile( ".*\.(?P<type>(directive|service|controller|factory))\((\'|\")(?P<name>(.*?))(\'|\").*" ) html_matcher = re.compile( ".*script type=.text/ng-template. id=(\'|\")(?P<name>.*)(\'|\").") my_sink = DataSink(args.path) print("Getting defs...") # get definitions for name in coffefiles: for line_num, line in enumerate(open(name, "rb"), 1): match = def_matcher.match(line) if match: _gd = match.groupdict() my_sink.feed(name, line_num, line, _gd["type"], _gd["name"]) print("done (found {:d})".format(len(my_sink._defs))) # find refs in HTML to services dir_defs = my_sink.get_type_defs("directive") + my_sink.get_type_defs( "controller") dir_dict = {} for _def in dir_defs: dir_dict[_def.camel_name] = _def dir_dict[_def.hyphen_name] = _def dir_matcher = set(dir_dict.keys()) _refs = 0 s_time = time.time() for name in htmlfiles: for line_num, line in enumerate(open(name, "rb"), 1): match = html_matcher.match(line) if match: _gd = match.groupdict() my_sink.feed(name, line_num, line, "html", _gd["name"]) else: # print line _add_dict = {} for word in re.split("([^a-zA-Z\-])+", line): if word in dir_matcher: # skip lines with only closing tags if "</{}".format(word) in line and "<{}".format( word) not in line: continue # only one match per line _add_dict[word] = True for word in _add_dict.keys(): dir_dict[word].add_reference(name, line_num, line) _refs += 1 e_time = time.time() print("Reference from HTML to directive took {} (found: {:d})".format( logging_tools.get_diff_time_str(e_time - s_time), _refs, )) # find refs to Services and Factories in coffee sf_refs = my_sink.get_type_defs("factory") + my_sink.get_type_defs( "service") sf_dict = {_sf.camel_name: _sf for _sf in sf_refs} sf_matcher = set(sf_dict.keys()) # also find refs to html templates in coffee html_ref_re = re.compile( ".*(template|templateUrl)\s*:\s*.*(\'|\")(?P<temp_name>.*?)(\'|\").*") html_dict = { _html.hyphen_name: _html for _html in my_sink.get_type_defs("html") } html_matcher = set(html_dict.keys()) # print html_matcher _refs = 0 s_time = time.time() for name in coffefiles: for line_num, line in enumerate(open(name, "rb"), 1): # print line for word in re.split("([^a-zA-Z])+", line): if word in sf_matcher: # check if reference is by literal if "'{}'".format(word) in line or "\"{}\"".format( word) in line: sf_dict[word].add_reference(name, line_num, line) _refs += 1 _html_match = html_ref_re.match(line) if _html_match: _temp_ref = _html_match.groupdict()["temp_name"] if _temp_ref in html_matcher: html_dict[_temp_ref].add_reference(name, line_num, line) e_time = time.time() print("Reference from coffee to service / factory took {} (found: {:d})". format( logging_tools.get_diff_time_str(e_time - s_time), _refs, )) # generate output # raw list _list = sum( [my_sink.get_type_defs(_type) for _type in my_sink.get_types()], []) # filter if args.ignore_valid: _list = [entry for entry in _list if not entry.is_valid] name_re = re.compile(args.filter, re.IGNORECASE) _list = [entry for entry in _list if name_re.search(entry.name)] if _list: print("{} in result list:".format( logging_tools.get_plural("entry", len(_list)), )) if args.order_by == "name": _list = sorted(_list, key=attrgetter("name")) if args.order_by == "toplevel": _list = sorted(_list, key=attrgetter("top_level_dir")) out_list = logging_tools.NewFormList() files_referenced = {} for _def in _list: files_referenced.setdefault(_def.file_name, []).append(_def) out_list.append([ logging_tools.form_entry(_def.type, header="Type"), logging_tools.form_entry(_def.name, header="Name"), logging_tools.form_entry(_def.file_name, header="File"), logging_tools.form_entry_right(_def.line_num, header="line"), logging_tools.form_entry_right(len(_def.refs), header="#refs"), logging_tools.form_entry_center( "yes" if _def.namespace_ok else "no", header="NS ok"), logging_tools.form_entry_center( "yes" if _def.name_valid else "no", header="valid"), ]) if args.show_refs: out_list.extend(_def.get_ref_list()) _def.add_file_refs(files_referenced) print(str(out_list)) if args.show_refs and files_referenced: print() print("Referenced files:") print() pprint.pprint(files_referenced)
def interpret(self, srv_com, cur_ns): r_dict = server_command.decompress(srv_com["pkg_list"].text, pickle=True) root_dir = srv_com["root_dir"].text in_format = srv_com["format"].text out_f = logging_tools.NewFormList() keys = sorted(r_dict.keys()) header_line = "{} found, system is {} (root is {})".format( logging_tools.get_plural("package", len(keys)), in_format, root_dir, ) if keys: if in_format == "rpm": for key in keys: for value in r_dict[key]: if isinstance(value, tuple): if len(value) == 4: ver, rel, arch, summary = value size = 0 else: ver, rel, arch, size, summary = value else: ver, rel, arch, size, summary = (value["version"], value["release"], value["arch"], value["size"], value["summary"]) out_f.append([ logging_tools.form_entry(key, header="name"), logging_tools.form_entry_right(ver, header="version"), logging_tools.form_entry(rel, header="release"), logging_tools.form_entry(arch, header="arch"), logging_tools.form_entry_right(size, header="size"), logging_tools.form_entry(summary, header="summary"), ]) elif in_format == "debian": for key in keys: for value in r_dict[key]: d_flag, s_flag, e_flag = value["flags"] ver, rel = (value["version"], value["release"]) summary = value["summary"] out_f.append([ logging_tools.form_entry(key, header="name"), logging_tools.form_entry_right(d_flag, header="d_flag"), logging_tools.form_entry_right(s_flag, header="s_flag"), logging_tools.form_entry_right(e_flag, header="e_flag"), logging_tools.form_entry_right(ver, header="version"), logging_tools.form_entry(rel, header="release"), logging_tools.form_entry(summary, header="summary"), ]) out_f.add_line( (key, d_flag, s_flag, e_flag, ver, rel, summary)) return limits.mon_STATE_OK, "{}\n{}".format( header_line, str(out_f)) else: return limits.mon_STATE_CRITICAL, "{}, nothing found".format( header_line)