def get_info_line(self): _consumers = self.get_num_consumers() return [ logging_tools.form_entry(self.action, header="action"), logging_tools.form_entry(str(self.config_service_enum), header="ConfigService"), logging_tools.form_entry_right(self.multiplier, header="Weight"), logging_tools.form_entry_center("yes" if self.ghost else "no", header="Ghost"), logging_tools.form_entry_center(str(self.content_type), header="ContentType"), logging_tools.form_entry_center(self.license_id_name or "global", header="License"), logging_tools.form_entry_right( logging_tools.get_diff_time_str(self.timeframe_secs) if self.timeframe_secs else "---", header="timeframe", ), logging_tools.form_entry_right(_consumers, header="entries"), logging_tools.form_entry_right(self.consumed, header="consumed"), logging_tools.form_entry_right( "{:.2f}".format(float(self.consumed) / float(_consumers)) if _consumers else "-", header="mean"), ]
def get_form_entry(self, idx, max_num_keys): act_line = [] sub_keys = (self.name.split(".") + [""] * max_num_keys)[0:max_num_keys] for key_idx, sub_key in zip(xrange(max_num_keys), sub_keys): act_line.append( logging_tools.form_entry("{}{}".format( "" if (key_idx == 0 or sub_key == "") else ".", sub_key), header="key{:d}".format(key_idx))) # check for unknow if self.value is None: # unknown value act_pf, val_str = ("", "<unknown>") else: act_pf, val_str = self._get_val_str(self.value * self.factor) act_line.extend([ logging_tools.form_entry_right(val_str, header="value"), logging_tools.form_entry_right(act_pf, header=" "), logging_tools.form_entry(self.unit, header="unit"), logging_tools.form_entry("({:3d})".format(idx), header="idx"), logging_tools.form_entry( "{:d}".format(self.valid_until) if self.valid_until else "---", header="valid_until"), logging_tools.form_entry(self._build_info_string(), header="info") ]) return act_line
def get_form_entry(self, max_num_keys): act_line = [ logging_tools.form_entry(self.wm_type.value.name, header="Type"), logging_tools.form_entry_center( "yes" if self.wm_type.value.has_sub_values else "no", header="SubValue"), logging_tools.form_entry(self.db_idx, header="db_idx"), logging_tools.form_entry(self.spec, header="Spec"), ] sub_keys = (self.key.split(".") + [""] * max_num_keys)[0:max_num_keys] for key_idx, sub_key in zip(range(max_num_keys), sub_keys): act_line.append( logging_tools.form_entry("{}{}".format( "" if (key_idx == 0 or sub_key == "") else ".", sub_key), header="key{:d}".format(key_idx))) # check for unknow if self.value is None: # unknown value act_pf, val_str = ("", "<unknown>") else: act_pf, val_str = self._get_val_str(self.value) act_line.extend([ logging_tools.form_entry_right(val_str, header="value"), logging_tools.form_entry_right(act_pf, header=" "), ]) return act_line
def show(self, **opions): present_perms = self._get_perms() out_list = logging_tools.new_form_list() for perm in present_perms: out_list.append([ logging_tools.form_entry(perm.content_type.app_label, header="App Label"), logging_tools.form_entry(perm.content_type.model, header="Model"), logging_tools.form_entry(perm.codename, header="Code"), logging_tools.form_entry(perm.name, header="Info"), logging_tools.form_entry_center( "G/O" if perm.valid_for_object_level else "G", header="Scope"), logging_tools.form_entry( perm.created.strftime("%Y-%m-%d %H:%M:%S"), header="Created"), logging_tools.form_entry_right( perm.rolepermission_set.all().count(), header="RolePerms"), logging_tools.form_entry_right( perm.csw_object_permission_set.all().count(), header="RoleObjPerms"), ]) print("{} defined:".format( logging_tools.get_plural("Permission", len(out_list)), )) print unicode(out_list)
def get_info_line(self): return [ logging_tools.form_entry(self.name, header="name"), logging_tools.form_entry(self.license_type, header="type"), logging_tools.form_entry("yes" if self.is_used else "no", header="for SGE"), logging_tools.form_entry("yes" if self.show else "no", header="show"), logging_tools.form_entry_right(self.total, header="total"), logging_tools.form_entry_right(self.reserved, header="reserved"), logging_tools.form_entry_right(self.limit, header="limit"), logging_tools.form_entry_right(self.used, header="used"), logging_tools.form_entry_right(self.total - self.limit, header="avail"), logging_tools.form_entry_right(self.sge_used, header="cluster"), logging_tools.form_entry_right(self.sge_used_requested, header="cluster(requested)"), logging_tools.form_entry_right(self.sge_used_issued, header="cluster(issued)"), logging_tools.form_entry_right(self.external_used, header="external"), logging_tools.form_entry_right(self.free, header="free"), logging_tools.form_entry( self.expires.strftime(EXPIRY_DT) if self.expires else "---", header="expires"), ]
def raw_license_info(opts): if opts.delete: print("Deleting LicenseFile Entry from database with idx {:d}".format( opts.delete)) try: License.objects.get(Q(idx=opts.delete)).delete() except License.DoesNotExist: # ignore pass out_list = logging_tools.new_form_list() _to_save = [] _query = License.objects.all() if opts.only_valid: _query = _query.filter(Q(valid=True)) for lic in _query: try: _info = License.objects.get_license_info(lic) except: _info = process_tools.get_except_info() _raw_info = None _error = True else: _raw_info = License.objects.get_raw_license_info(lic) _error = False if _error: if opts.mark_error: _valid = False elif opts.unmark_all: _valid = True else: _valid = lic.valid else: _valid = True if lic.valid != _valid: lic.valid = _valid _to_save.append(lic) # todo, extract fingerprint info from raw_license_info # import pprint # pprint.pprint(_raw_info) out_list.append([ logging_tools.form_entry(lic.file_name, header="Filename"), logging_tools.form_entry(lic.date.isoformat(), header="created"), logging_tools.form_entry_right(lic.idx, header="idx"), logging_tools.form_entry_center( "valid" if lic.valid else "invalid", header="validity"), logging_tools.form_entry_center("error" if _error else "ok", header="error"), logging_tools.form_entry(_info, header="Info"), ]) print(str(out_list)) if len(_to_save): print("") print("Updating LicenseFile states ({:d})".format(len(_to_save))) for lic_to_save in _to_save: lic_to_save.save(update_fields=["valid"]) print("...done")
def list_images(opt_ns): images = image.objects.all().order_by("name") print("{} defined:".format(logging_tools.get_plural("image", len(images)))) _list = logging_tools.NewFormList() for _image in images: _list.append([ logging_tools.form_entry(str(_image), header="info"), logging_tools.form_entry(_image.version, header="version"), logging_tools.form_entry(_image.release, header="release"), logging_tools.form_entry("yes" if _image.build_lock else "--", header="locked"), ]) print(str(_list))
def sjs(s_info, opt_dict): s_info.update() # print etree.tostring(sge_tools.build_running_list(s_info, opt_dict), pretty_print=True) ret_list = [time.ctime()] s_info.build_luts() # running jobs r_out_list = logging_tools.NewFormList() left_justified = {"id", "task", "nodelist"} run_list = sge_tools.build_running_list(s_info, opt_dict) for run_job in run_list: r_out_list.append([ logging_tools.form_entry(cur_el.text, header=cur_el.tag, left=cur_el.tag in left_justified) for cur_el in run_job ]) if len(run_list) == int(run_list.get("total")): ret_list.append("{}".format( logging_tools.get_plural("running job", len(run_list)))) else: ret_list.append("{}, showing only {:d} (due to filter)".format( logging_tools.get_plural("running job", int(run_list.get("total"))), len(run_list))) if r_out_list: ret_list.append(str(r_out_list)) # waiting jobs w_out_list = logging_tools.NewFormList() left_justified = {"id", "task", "depends"} wait_list = sge_tools.build_waiting_list(s_info, opt_dict) for wait_job in wait_list: w_out_list.append([ logging_tools.form_entry(cur_el.text, header=cur_el.tag, left=cur_el.tag in left_justified) for cur_el in wait_job ]) if len(wait_list) == int(wait_list.get("total")): ret_list.append("{}".format( logging_tools.get_plural("waiting job", len(wait_list)))) else: ret_list.append("{}, showing only {:d} (due to filter)".format( logging_tools.get_plural("waiting job", int(wait_list.get("total"))), len(wait_list))) if w_out_list: ret_list.append(str(w_out_list)) if opt_dict.interactive: return "\n".join(ret_list) else: print("\n".join(ret_list))
def get_info_line(self): return [ logging_tools.form_entry_center("yes" if self.dummy else "no", header="Dummy"), logging_tools.form_entry(str(self.valid_from), header="valid from"), logging_tools.form_entry(str(self.valid_to), header="valid to"), logging_tools.form_entry(self.license_id_name or "global", header="License"), logging_tools.form_entry_center("yes" if self.is_valid else "no", header="valid"), logging_tools.form_entry_right(self.installed, header="installed"), logging_tools.form_entry_right(self.available, header="available"), logging_tools.form_entry_right(self.available_ghost, header="ghost"), ]
def sns(s_info, opt_dict): s_info.update() ret_list = [time.ctime()] s_info.build_luts() node_list = sge_tools.build_node_list(s_info, opt_dict) left_justified = { "host", "queue", "queues", "node", "seqno", "state", "type", "complex", "pe_list", "userlists", "projects", "jobs" } short_dict = { # one queue per line # "slot_info": "si", # for merged info "slots_used": "su", "slots_reserved": "sr", "slots_total": "st", } out_list = logging_tools.NewFormList() for cur_node in node_list: out_list.append([ logging_tools.form_entry(cur_el.text, header=short_dict.get( cur_el.tag, cur_el.tag), left=cur_el.tag in left_justified) for cur_el in cur_node ]) if out_list: ret_list.append(str(out_list)) if opt_dict.interactive: return "\n".join(ret_list) else: print("\n".join(ret_list))
def get_info_line(self): return [ logging_tools.form_entry(self.action, header="action"), logging_tools.form_entry(unicode(self.config_service_enum), header="ConfigService"), logging_tools.form_entry_right(self.multiplier, header="Weight"), logging_tools.form_entry_center(unicode(self.content_type), header="ContentType"), logging_tools.form_entry_right( logging_tools.get_diff_time_str(self.timeframe_secs) if self.timeframe_secs else "---", header="timeframe", ), logging_tools.form_entry_right(self.get_num_consumers(), header="entries"), logging_tools.form_entry_right(self.get_all_consumed(), header="consumed"), ]
def show_cs_help(options): import initat.cluster_server.modules com_names = initat.cluster_server.modules.command_names to_show = [] for com_name in com_names: _show = True if options.args: _show = any([com_name.count(arg) for arg in options.args]) if _show: to_show.append(com_name) print( "cluster-server commands defined / to show : {:d} / {:d}".format( len(com_names), len(to_show), ) ) if to_show: out_list = logging_tools.NewFormList() for _idx, com_name in enumerate(com_names, 1): com = initat.cluster_server.modules.command_dict[com_name] out_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(com_name, header="Command"), logging_tools.form_entry( "yes" if com.Meta.disabled else "no", header="disabled", ), logging_tools.form_entry( ", ".join( [ _cfg.name for _cfg in com.Meta.needed_configs ] ) or "---", header="configs" ), logging_tools.form_entry( ", ".join(com.Meta.needed_option_keys) or "---", header="options" ), ] ) print("\n{}".format(str(out_list)))
def _log_limits(self): # read limits r_dict = {} try: import resource except ImportError: self.log("cannot import resource", logging_tools.LOG_LEVEL_CRITICAL) else: available_resources = [ key for key in dir(resource) if key.startswith("RLIMIT") ] for av_r in available_resources: try: r_dict[av_r] = resource.getrlimit(getattr(resource, av_r)) except ValueError: r_dict[av_r] = "invalid resource" except: r_dict[av_r] = None if r_dict: res_keys = sorted(r_dict.keys()) self.log("{} defined".format( logging_tools.get_plural("limit", len(res_keys)))) res_list = logging_tools.NewFormList() for key in res_keys: val = r_dict[key] if isinstance(val, str): info_str = val elif isinstance(val, tuple): info_str = "{:8d} (hard), {:8d} (soft)".format(*val) else: info_str = "None (error?)" res_list.append([ logging_tools.form_entry(key, header="key"), logging_tools.form_entry(info_str, header="value") ]) for line in str(res_list).split("\n"): self.log(line) else: self.log("no limits found, strange ...", logging_tools.LOG_LEVEL_WARN)
def get_form_parts(self): return [ logging_tools.form_entry(self.__idx, header="idx"), logging_tools.form_entry(self.get_err_time_str(), header="time"), logging_tools.form_entry(self.__pid, header="PID"), logging_tools.form_entry(len(self.__lines), header="lines"), logging_tools.form_entry("%d (%s)" % (self.__uid, self.__uname), header="user"), logging_tools.form_entry("%d (%s)" % (self.__gid, self.__gname), header="group"), logging_tools.form_entry(self.__source_name, header="source") ]
def get_info(self, short=True): vg_names = sorted(self.lv_dict.get("vg", {}).keys()) vg_info = {} for vg_name in vg_names: vg_stuff = self.lv_dict.get("vg", {})[vg_name] _vg_extent_size = vg_stuff["extent_size"] _vg_extent_count = vg_stuff["extent_count"] vg_info[vg_name] = (self._get_size_str(vg_stuff["size"]), self._get_size_str(vg_stuff["free"])) lv_names = sorted(self.lv_dict.get("lv", {}).keys()) lv_info = {} for lv_name in lv_names: lv_stuff = self.lv_dict.get("lv", {})[lv_name] vg_name = lv_stuff["vg_name"] lv_size = lv_stuff["size"] lv_info.setdefault(vg_name, []).append("{}{} ({})".format( lv_name, lv_stuff["attr"][5] == "o" and "[open]" or "", self._get_size_str(lv_size))) # print "*", lv_name, vg_stuff["name"], vg_extent_size, vg_extent_count, vg_size, lv_extents if short: ret_info = [] for vg_name in vg_names: ret_info.append("{} ({}, {} free, {}: {})".format( vg_name, vg_info[vg_name][0], vg_info[vg_name][1], logging_tools.get_plural("LV", len(lv_info.get(vg_name, []))), ", ".join(lv_info.get(vg_name, [])) or "NONE")) return "{}: {}".format( logging_tools.get_plural("VG", len(ret_info)), "; ".join(ret_info)) else: ret_info = logging_tools.new_form_list() for vg_name in vg_names: ret_info.append([ logging_tools.form_entry("VG", header="type"), logging_tools.form_entry(vg_name, header="name"), logging_tools.form_entry_right(vg_info[vg_name][0], header="size"), logging_tools.form_entry_right(vg_info[vg_name][1], header="free"), logging_tools.form_entry("", header="options"), ]) for lv_name in lv_names: lv_stuff = self.lv_dict.get("lv", {})[lv_name] if lv_stuff["vg_name"] == vg_name: ret_info.append([ logging_tools.form_entry(" LV", header="type"), logging_tools.form_entry(lv_name, header="name"), logging_tools.form_entry_right(self._get_size_str( lv_stuff["size"]), header="size"), logging_tools.form_entry(""), logging_tools.form_entry(lv_stuff["attr"]), ]) return unicode(ret_info)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--port", type=int, default=1055, help="license server [%(default)d]") parser.add_argument("--server", type=str, default="localhost", help="license port [%(default)s]") parser.add_argument("--mode", type=str, default="xml", choices=["xml", "check", "csv", "list"], help="output mode [%(default)s]") parser.add_argument("--check-eval", type=str, default="true", help="check string, should return true or false") opts = parser.parse_args() my_lc = sge_license_tools.license_check( server=opts.server, port=opts.port, ) xml_res = my_lc.check() ret_code = 0 if opts.mode == "xml": print(etree.tostring(xml_res, pretty_print=True)) # @UndefinedVariable elif opts.mode == "check": glob_dict = {} for cur_lic in xml_res.findall(".//license"): lic_name = cur_lic.attrib["name"] for attr_name in ["issued", "used", "free", "reserved"]: glob_dict["{}_{}".format(lic_name, attr_name)] = int(cur_lic.attrib[attr_name]) ret_val = eval(opts.check_eval, glob_dict) if not ret_val: ret_code = 1 elif opts.mode == "csv": print(",".join(["name", "issued", "used", "free", "reserved"])) for cur_lic in xml_res.findall(".//license"): print( ",".join( [ cur_lic.attrib["name"], cur_lic.attrib["issued"], cur_lic.attrib["used"], cur_lic.attrib["free"], cur_lic.attrib["reserved"], ] ) ) elif opts.mode == "list": out_form = logging_tools.new_form_list() for cur_lic in xml_res.findall(".//license"): out_form.append( [ logging_tools.form_entry(cur_lic.attrib["name"], header="name"), logging_tools.form_entry_right(cur_lic.attrib["issued"], header="issued"), logging_tools.form_entry_right(cur_lic.attrib["used"], header="used"), logging_tools.form_entry_right(cur_lic.attrib["free"], header="free"), logging_tools.form_entry_right(cur_lic.attrib["reserved"], header="reserved"), ] ) print(unicode(out_form)) sys.exit(ret_code)
def main(opts): ret_code = -1 if opts.mode == "list": def_query = Q() if opts.level: def_query &= Q(level__identifier=opts.level) if opts.source: def_query &= Q(source__identifier=opts.source) if opts.device: def_query &= Q(device__name=opts.device) # print def_query all_logs = DeviceLogEntry.objects.filter(def_query).select_related( "source", "source__device", "level", "user", "device").order_by("-date") print("{} found:".format( logging_tools.get_plural("Log entry", all_logs.count()))) new_entry = logging_tools.NewFormList() for cur_dl in all_logs: new_entry.append([ logging_tools.form_entry(str(cur_dl.date), header="date"), logging_tools.form_entry(str(cur_dl.device), header="device"), logging_tools.form_entry(str(cur_dl.source), header="source"), logging_tools.form_entry(str(cur_dl.source.device or "---"), header="sdevice"), logging_tools.form_entry(str(cur_dl.level), header="level"), logging_tools.form_entry(str(cur_dl.user or "---"), header="user"), logging_tools.form_entry(str(cur_dl.text), header="text"), ]) print(str(new_entry)) ret_code = 0 elif opts.mode == "create": if opts.user: _user = user.objects.get(Q(login=opts.user)) else: _user = None if not opts.text: print("no text entered") else: log_dev = device.objects.get(Q(name=opts.device)) new_log_entry = DeviceLogEntry.new( device=log_dev, # todo, fixme # source=def_source, user=_user, level=LogLevel.objects.get(Q(identifier=opts.level)), text=" ".join(opts.text), ) ret_code = 0 print("created '{}'".format(str(new_log_entry))) else: print("Uknown mode '{}'".format(opts.mode)) sys.exit(ret_code)
def _domain_enum_show_command(options): from initat.cluster.backbone.domain_enum import icswDomainEnum from initat.cluster.backbone.models import DomainTypeEnum print("") print("DomainEnums defined: {:d}".format(len(icswDomainEnum))) _list = logging_tools.NewFormList() _c_dict = { entry.enum_name: entry for entry in DomainTypeEnum.objects.all() } for entry in icswDomainEnum: if entry.name not in _c_dict: if options.sync: new_entry = DomainTypeEnum.create_db_entry(entry) _c_dict[new_entry.enum_name] = new_entry else: _db_str = "no" if entry.name in _c_dict: # if options.sync: # _c_dict[entry.name].update_values(entry) _db_str = "yes ({:d})".format(_c_dict[entry.name].pk) if entry.value.default_enum: _default_info = entry.value.default_enum.name else: _default_info = "---" if entry.value.domain_enum: _domain_info = entry.value.domain_enum.name else: _domain_info = "---" _list.append([ logging_tools.form_entry(entry.name, header="EnumName"), logging_tools.form_entry(entry.value.name, header="Name"), logging_tools.form_entry(entry.value.info, header="Info"), logging_tools.form_entry_center(_db_str, header="DB info"), logging_tools.form_entry(_default_info, header="Default Enum"), logging_tools.form_entry(_domain_info, header="Domain Enum"), ]) print(str(_list))
def interpret(self, srv_com, cur_ns): l_dict = {} for key in IPMI_LONG_LIMITS: try: l_dict[key] = float(getattr(cur_ns, key)) except: l_dict[key] = None s_list = srv_com.xpath(".//ns:sensor_list", smart_strings=False) if s_list: s_list = s_list[0] if cur_ns.arguments: el = s_list[0] cur_value = float(el.attrib["value"]) ret_state = limits.mon_STATE_OK for t_name, log, t_state in [ ("lowern", False, limits.mon_STATE_CRITICAL), ("lowerc", False, limits.mon_STATE_CRITICAL), ("lowerw", False, limits.mon_STATE_WARNING), ("upperw", True, limits.mon_STATE_WARNING), ("upperc", True, limits.mon_STATE_CRITICAL), ("uppern", True, limits.mon_STATE_CRITICAL), ]: if l_dict[t_name] is not None: if (log and cur_value >= l_dict[t_name]) or ( not log and cur_value <= l_dict[t_name]): ret_state = max(ret_state, t_state) return ret_state, "{}: {} is {:.2f} {}".format( el.attrib["key"], el.attrib["info"], cur_value, el.attrib["unit"], ) else: # list mode keys = s_list.xpath(".//@key", smart_strings=False) out_list = logging_tools.new_form_list() for key in keys: el = s_list.xpath("*[@key='{}']".format(key), smart_strings=False)[0] v_list = [ logging_tools.form_entry(key, header="key"), logging_tools.form_entry_right(el.attrib["value"], header="value"), logging_tools.form_entry_right(el.attrib["base"], header="base"), logging_tools.form_entry(el.attrib["unit"], header="unit"), logging_tools.form_entry(el.attrib["info"], header="info"), ] for l_key in IPMI_LIMITS: x_key = "limit_{}".format(l_key) v_list.append( logging_tools.form_entry(el.attrib.get(x_key, "-"), header=x_key)) out_list.append(v_list) return limits.mon_STATE_OK, "found {}:\n{}".format( logging_tools.get_plural("IPMI sensor", len(keys)), unicode(out_list)) else: return limits.mon_STATE_WARNING, "no IPMI sensors found"
def _service_enum_show_command(options): from initat.cluster.backbone.server_enums import icswServiceEnum from initat.cluster.backbone.models import ConfigServiceEnum, config from initat.cluster.backbone import factories from django.core.exceptions import ValidationError _c_dict = { entry.enum_name: entry for entry in ConfigServiceEnum.objects.all() } print("") print("ServiceEnums defined: {:d}".format(len(icswServiceEnum))) _list = logging_tools.NewFormList() for entry in icswServiceEnum: if entry.name not in _c_dict: if options.sync and (entry.value.server_service or entry.value.relayer_service ) and entry.value.sync_config: new_entry = ConfigServiceEnum.create_db_entry(entry) _c_dict[new_entry.enum_name] = new_entry else: _db_str = "no" if entry.name in _c_dict: if options.sync: _c_dict[entry.name].update_values(entry) _db_str = "yes ({:d})".format(_c_dict[entry.name].pk) if entry.value.server_service: _egg_action = ", ".join( [str(_action) for _action in entry.value.egg_actions]) or "none" else: _egg_action = "---" _list.append([ logging_tools.form_entry(entry.name, header="EnumName"), logging_tools.form_entry(entry.value.name, header="Name"), logging_tools.form_entry_center( "yes" if entry.value.root_service else "no", header="Root Service"), logging_tools.form_entry_center( "yes" if entry.value.server_service else "no", header="Server"), logging_tools.form_entry_center( "yes" if entry.value.relayer_service else "no", header="Relayer"), logging_tools.form_entry(entry.value.info, header="Info"), logging_tools.form_entry_center(_db_str, header="DB info"), logging_tools.form_entry(_egg_action, header="Egg actions"), ]) print(str(_list)) if options.sync: _change_list = [] # compat dict comp_dict = { "rrd_grapher": icswServiceEnum.grapher_server.name, "rrd_server": icswServiceEnum.collectd_server.name, "rrd_collector": icswServiceEnum.collectd_server.name, "server": icswServiceEnum.cluster_server.name, "ldap_server": icswServiceEnum.ldap_server.name, } for c_con in config.objects.all(): if not c_con.config_service_enum_id: _check_names = [c_con.name] if c_con.name in comp_dict: _check_names.append(comp_dict[c_con.name]) for _check_name in _check_names: if _check_name in _c_dict: c_con.config_service_enum = _c_dict[_check_name] try: c_con.save(update_fields=["config_service_enum"]) except ValidationError: print("cannot save {}: {}".format( str(c_con), process_tools.get_except_info())) else: _change_list.append(c_con) break _create_list = [] for db_enum in _c_dict.values(): if not db_enum.config_set.all().count(): _create_list.append( factories.Config( name=db_enum.name, description=db_enum.info, config_service_enum=db_enum, server_config=True, )) if len(_change_list): print("") print("{} moved to ConfigServiceEnum:".format( logging_tools.get_plural("Config", len(_change_list)))) for entry in _change_list: print(" {} ({})".format(entry.name, str(entry.config_service_enum))) if len(_create_list): print("") print("{} created:".format( logging_tools.get_plural("Config", len(_create_list)))) for entry in _create_list: print(" {} ({})".format(entry.name, str(entry.config_service_enum)))
def device_syslog(opt_ns, cur_dev, j_logs): print( "Information about device '{}' (full name {}, devicegroup {})".format( str(cur_dev), str(cur_dev.full_name), str(cur_dev.device_group))) print("UUID is '{}', database-ID is {:d}".format(cur_dev.uuid, cur_dev.pk)) _cr = routing.SrvTypeRouting(force=True, ignore_errors=True) _ST = "logcheck-server" if _ST in _cr.service_types: _inst_xml = InstanceXML(quiet=True) # get logcheck-server IP _ls_ip = _cr[_ST][0][1] # get logcheck-server Port _ls_port = _inst_xml.get_port_dict(_ST, ptype="command") _sc = server_command.srv_command(command="get_syslog", ) _sc["devices"] = _sc.builder( "devices", *[ _sc.builder( "device", pk="{:d}".format(cur_dev.pk), lines="{:d}".format(opt_ns.loglines), minutes="{:d}".format(opt_ns.minutes), ) ]) _conn_str = "tcp://{}:{:d}".format(_ls_ip, _ls_port) _result = net_tools.ZMQConnection("icsw_state_{:d}".format( os.getpid())).add_connection( _conn_str, _sc, ) if _result is not None: _dev = _result.xpath(".//ns:devices/ns:device[@pk]")[0] _lines = _result.xpath("ns:lines", start_el=_dev)[0] _rates = _result.xpath("ns:rates", start_el=_dev) if _rates: _rates = { int(_el.get("timeframe")): float(_el.get("rate")) for _el in _rates[0] } print("rate info: {}".format(", ".join([ "{:.2f} lines/sec in {}".format( _rates[_seconds], logging_tools.get_diff_time_str(_seconds)) for _seconds in sorted(_rates) ]))) else: print("no rate info found") print(_rates) _out_lines = logging_tools.NewFormList() for _entry in server_command.decompress(_lines.text, json=True): _out_lines.append([ logging_tools.form_entry(_entry["line_id"], header="idx"), logging_tools.form_entry( "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}".format( *_entry["line_datetime_parsed"]), header="Timestamp", ), ] + [ logging_tools.form_entry(_entry[_key], header=_key) for _key in ["hostname", "priority", "facility", "tag"] ] + [ logging_tools.form_entry(_entry["text"], header="text"), ]) print(str(_out_lines)) else: print("got no result from {} ({})".format(_conn_str, _ST)) else: print("No logcheck-server found, skipping syslog display")
def get_config_info(self): gk = sorted(self.keys()) if gk: f_obj = logging_tools.new_form_list() for key in gk: if self.get_type(key) in ["a", "d"]: pv = self.pretty_print(key) f_obj.append( [ logging_tools.form_entry(key), logging_tools.form_entry("list with {}:".format(logging_tools.get_plural("entry", len(pv)))), logging_tools.form_entry(self.get_type(key)), logging_tools.form_entry(self.get_source(key)), ] ) for idx, entry in enumerate(pv): f_obj.append( [ logging_tools.form_entry(""), logging_tools.form_entry(""), logging_tools.form_entry(entry), logging_tools.form_entry(str(idx)), logging_tools.form_entry("---"), ] ) else: f_obj.append( [ logging_tools.form_entry(key, header="key"), logging_tools.form_entry(self.is_global(key) and "global" or "local", post_str=" : ", header="global"), logging_tools.form_entry(self.pretty_print(key), header="value"), logging_tools.form_entry(self.get_type(key), pre_str=", (", post_str=" from ", header="type"), logging_tools.form_entry(self.get_source(key), post_str=")", header="source"), ] ) ret_str = unicode(f_obj).split("\n") else: ret_str = [] return ret_str
def do_info(cur_opts, log_com): if not cur_opts.username: print("No user name given") return 1 _user = _get_user(cur_opts.username) _ret_state = 0 if _user is None: _ret_state = 1 else: from initat.cluster.backbone.models import user_quota_setting, user_variable from django.db.models import Q print("") print( "User with loginname '{}' (user {}), uid={:d}, group={} (gid={:d})".format( _user.login, str(_user), _user.uid, str(_user.group), _user.group.gid, ) ) num_qs = _user.user_quota_setting_set.all().count() if num_qs and cur_opts.system_wide_quota: print("") print( "{} found:".format( logging_tools.get_plural("system-wide quota setting", num_qs) ) ) for _qs in _user.user_quota_setting_set.all(): _bd = _qs.quota_capable_blockdevice print( " device {} ({} on {}): {}".format( str(_bd.device.full_name), _bd.block_device_path, _bd.mount_path, get_quota_str(_qs), ) ) try: _cmd = "quota --show-mntpoint -wp -u {}".format( _user.login, ) _res = subprocess.check_output( _cmd.split(), stderr=subprocess.STDOUT, ).decode("utf-8") except subprocess.CalledProcessError as sb_exc: _res = sb_exc.output.decode("utf-8") # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info())) _ret_state = 1 except OSError as sb_exc: # quota command not founda _res = "denied: {}".format(sb_exc) # print("error calling '{}': {}".format(_cmd, process_tools.get_except_info())) _ret_state = 1 else: _ret_state = 0 if _res.lower().count("denied"): print(" error getting local quotas for {}: {}".format(_user.login, _res)) else: # print _res _lines = [_line.strip().split() for _line in _res.split("\n") if _line.strip()] _lines = [_line for _line in _lines if len(_line) == 10] if _lines: print("", "local quota:", sep="\n") _line = _lines[-1] _bytes_violate = _line[2].count("*") > 0 _local = user_quota_setting( bytes_used=int(_line[2].replace("*", "")) * 1024, bytes_soft=int(_line[3]) * 1024, bytes_hard=int(_line[4]) * 1024, bytes_gracetime=int(_line[5]), ) print( " local mountpoint: {}".format( get_quota_str(_local), ) ) if cur_opts.delete_var: print("") try: _cv = user_variable.objects.get(Q(user=_user) & Q(idx=cur_opts.delete_var)) except user_variable.DoesNotExist: print("Variable to delete does not exist") else: print("Deleting '{}'".format(str(_cv))) _cv.delete() if cur_opts.show_vars: out_list = logging_tools.NewFormList() for _var in _user.user_variable_set.all().order_by("name"): out_list.append( [ logging_tools.form_entry(_var.idx, header="idx"), logging_tools.form_entry(_var.name, header="name"), logging_tools.form_entry(_var.var_type, header="type"), logging_tools.form_entry_right(_var.value if _var.var_type != "j" else "{:d} Bytes".format(len(_var.json_value)), header="value"), logging_tools.form_entry_center("yes" if _var.editable else "no", header="editable"), logging_tools.form_entry_center("yes" if _var.hidden else "no", header="hidden"), logging_tools.form_entry(_var.date.strftime("%H:%m:%S %a, %d. %b %Y"), header="created"), logging_tools.form_entry(_var.description, header="description"), ] ) print(str(out_list)) return _ret_state
def do_list(cur_opts, log_com): users = get_users(cur_opts, log_com) out_list = logging_tools.NewFormList() for _user in users: out_list.append( [ logging_tools.form_entry(_user.login, header="login"), logging_tools.form_entry(_user.uid, header="uid"), logging_tools.form_entry(_user.active, header="active"), logging_tools.form_entry(_user.group.groupname, header="group"), logging_tools.form_entry(_user.group.gid, header="gid"), logging_tools.form_entry(_user.group.active, header="gactive"), logging_tools.form_entry(_user.first_name, header="first name"), logging_tools.form_entry(_user.last_name, header="last name"), logging_tools.form_entry(_user.email, header="email"), logging_tools.form_entry(_user.login_count, header="logincount"), logging_tools.form_entry(_user.login_fail_count, header="failedcount"), logging_tools.form_entry(_user.user_variable_set.all().count(), header="#vars"), logging_tools.form_entry(_user.comment, header="comment"), ] ) print(str(out_list))
def interpret(self, srv_com, cur_ns): r_dict = server_command.decompress(srv_com["pkg_list"].text, pickle=True) root_dir = srv_com["root_dir"].text in_format = srv_com["format"].text out_f = logging_tools.NewFormList() keys = sorted(r_dict.keys()) header_line = "{} found, system is {} (root is {})".format( logging_tools.get_plural("package", len(keys)), in_format, root_dir, ) if keys: if in_format == "rpm": for key in keys: for value in r_dict[key]: if isinstance(value, tuple): if len(value) == 4: ver, rel, arch, summary = value size = 0 else: ver, rel, arch, size, summary = value else: ver, rel, arch, size, summary = (value["version"], value["release"], value["arch"], value["size"], value["summary"]) out_f.append([ logging_tools.form_entry(key, header="name"), logging_tools.form_entry_right(ver, header="version"), logging_tools.form_entry(rel, header="release"), logging_tools.form_entry(arch, header="arch"), logging_tools.form_entry_right(size, header="size"), logging_tools.form_entry(summary, header="summary"), ]) elif in_format == "debian": for key in keys: for value in r_dict[key]: d_flag, s_flag, e_flag = value["flags"] ver, rel = (value["version"], value["release"]) summary = value["summary"] out_f.append([ logging_tools.form_entry(key, header="name"), logging_tools.form_entry_right(d_flag, header="d_flag"), logging_tools.form_entry_right(s_flag, header="s_flag"), logging_tools.form_entry_right(e_flag, header="e_flag"), logging_tools.form_entry_right(ver, header="version"), logging_tools.form_entry(rel, header="release"), logging_tools.form_entry(summary, header="summary"), ]) out_f.add_line( (key, d_flag, s_flag, e_flag, ver, rel, summary)) return limits.mon_STATE_OK, "{}\n{}".format( header_line, str(out_f)) else: return limits.mon_STATE_CRITICAL, "{}, nothing found".format( header_line)
def main(args): ignore_re = re.compile(args.ignore) coffefiles = [] htmlfiles = [] for root, dirs, files in os.walk(args.path, topdown=False): coffefiles.extend([ os.path.join(root, f) for f in files if f.endswith("coffee") and not ignore_re.search(f) ]) htmlfiles.extend([ os.path.join(root, f) for f in files if f.endswith("html") and not ignore_re.search(f) ]) print("{:d} Coffee and {:d} HTML files".format(len(coffefiles), len(htmlfiles))) def_matcher = re.compile( ".*\.(?P<type>(directive|service|controller|factory))\((\'|\")(?P<name>(.*?))(\'|\").*" ) html_matcher = re.compile( ".*script type=.text/ng-template. id=(\'|\")(?P<name>.*)(\'|\").") my_sink = DataSink(args.path) print("Getting defs...") # get definitions for name in coffefiles: for line_num, line in enumerate(open(name, "rb"), 1): match = def_matcher.match(line) if match: _gd = match.groupdict() my_sink.feed(name, line_num, line, _gd["type"], _gd["name"]) print("done (found {:d})".format(len(my_sink._defs))) # find refs in HTML to services dir_defs = my_sink.get_type_defs("directive") + my_sink.get_type_defs( "controller") dir_dict = {} for _def in dir_defs: dir_dict[_def.camel_name] = _def dir_dict[_def.hyphen_name] = _def dir_matcher = set(dir_dict.keys()) _refs = 0 s_time = time.time() for name in htmlfiles: for line_num, line in enumerate(open(name, "rb"), 1): match = html_matcher.match(line) if match: _gd = match.groupdict() my_sink.feed(name, line_num, line, "html", _gd["name"]) else: # print line _add_dict = {} for word in re.split("([^a-zA-Z\-])+", line): if word in dir_matcher: # skip lines with only closing tags if "</{}".format(word) in line and "<{}".format( word) not in line: continue # only one match per line _add_dict[word] = True for word in _add_dict.keys(): dir_dict[word].add_reference(name, line_num, line) _refs += 1 e_time = time.time() print("Reference from HTML to directive took {} (found: {:d})".format( logging_tools.get_diff_time_str(e_time - s_time), _refs, )) # find refs to Services and Factories in coffee sf_refs = my_sink.get_type_defs("factory") + my_sink.get_type_defs( "service") sf_dict = {_sf.camel_name: _sf for _sf in sf_refs} sf_matcher = set(sf_dict.keys()) # also find refs to html templates in coffee html_ref_re = re.compile( ".*(template|templateUrl)\s*:\s*.*(\'|\")(?P<temp_name>.*?)(\'|\").*") html_dict = { _html.hyphen_name: _html for _html in my_sink.get_type_defs("html") } html_matcher = set(html_dict.keys()) # print html_matcher _refs = 0 s_time = time.time() for name in coffefiles: for line_num, line in enumerate(open(name, "rb"), 1): # print line for word in re.split("([^a-zA-Z])+", line): if word in sf_matcher: # check if reference is by literal if "'{}'".format(word) in line or "\"{}\"".format( word) in line: sf_dict[word].add_reference(name, line_num, line) _refs += 1 _html_match = html_ref_re.match(line) if _html_match: _temp_ref = _html_match.groupdict()["temp_name"] if _temp_ref in html_matcher: html_dict[_temp_ref].add_reference(name, line_num, line) e_time = time.time() print("Reference from coffee to service / factory took {} (found: {:d})". format( logging_tools.get_diff_time_str(e_time - s_time), _refs, )) # generate output # raw list _list = sum( [my_sink.get_type_defs(_type) for _type in my_sink.get_types()], []) # filter if args.ignore_valid: _list = [entry for entry in _list if not entry.is_valid] name_re = re.compile(args.filter, re.IGNORECASE) _list = [entry for entry in _list if name_re.search(entry.name)] if _list: print("{} in result list:".format( logging_tools.get_plural("entry", len(_list)), )) if args.order_by == "name": _list = sorted(_list, key=attrgetter("name")) if args.order_by == "toplevel": _list = sorted(_list, key=attrgetter("top_level_dir")) out_list = logging_tools.NewFormList() files_referenced = {} for _def in _list: files_referenced.setdefault(_def.file_name, []).append(_def) out_list.append([ logging_tools.form_entry(_def.type, header="Type"), logging_tools.form_entry(_def.name, header="Name"), logging_tools.form_entry(_def.file_name, header="File"), logging_tools.form_entry_right(_def.line_num, header="line"), logging_tools.form_entry_right(len(_def.refs), header="#refs"), logging_tools.form_entry_center( "yes" if _def.namespace_ok else "no", header="NS ok"), logging_tools.form_entry_center( "yes" if _def.name_valid else "no", header="valid"), ]) if args.show_refs: out_list.extend(_def.get_ref_list()) _def.add_file_refs(files_referenced) print(str(out_list)) if args.show_refs and files_referenced: print() print("Referenced files:") print() pprint.pprint(files_referenced)
def instance_to_form_list(self, opt_ns, res_xml): prc_dict = { SERVICE_OK: ("running", "ok"), SERVICE_DEAD: ("error", "critical"), SERVICE_INCOMPLETE: ("incomplete", "critical"), SERVICE_NOT_INSTALLED: ("not installed", "warning"), SERVICE_NOT_CONFIGURED: ("not configured", "warning"), } crc_dict = { CONF_STATE_RUN: ("run", "ok"), CONF_STATE_STOP: ("stop", "critical"), CONF_STATE_IP_MISMATCH: ("ip mismatch", "critical"), } meta_dict = { "t": { TARGET_STATE_RUNNING: ("run", "ok"), TARGET_STATE_STOPPED: ("stop", "critical"), }, "i": { 0: ("monitor", "ok"), 1: ("ignore", "warning"), } } if License is not None: lic_dict = { -1: ("-", ""), LicenseState.none: ("no license", "critical"), LicenseState.violated: ("parameter violated", "critical"), LicenseState.valid: ("valid", "ok"), LicenseState.grace: ("in grace", "warning"), LicenseState.expired: ("expired", "critical"), LicenseState.fp_mismatch: ("wrong fingerprint", "critical"), # LicenseState.ip_mismatch: ("ip mismatch", "critical"), } else: lic_dict = None out_bl = logging_tools.new_form_list() types = sorted( list( set(res_xml.xpath(".//instance/@runs_on", start_strings=False)))) _list = sum([ res_xml.xpath("instance[result and @runs_on='{}']".format(_type)) for _type in types ], []) for act_struct in _list: _res = act_struct.find("result") p_state = int( act_struct.find(".//process_state_info").get( "state", SERVICE_DEAD)) c_state = int( act_struct.find(".//configured_state_info").get( "state", CONF_STATE_STOP)) if not opt_ns.failed or (opt_ns.failed and p_state not in [SERVICE_OK]): cur_line = [ logging_tools.form_entry(act_struct.attrib["name"], header="Name") ] cur_line.append( logging_tools.form_entry(act_struct.attrib["runs_on"], header="runson")) cur_line.append( logging_tools.form_entry( _res.find("process_state_info").get( "check_source", "N/A"), header="source")) if opt_ns.process: s_info = act_struct.find(".//process_state_info") if "num_started" not in s_info.attrib: cur_line.append(logging_tools.form_entry(s_info.text)) else: num_diff, any_ok = (int( s_info.get("num_diff")), True if int( act_struct.attrib["any-processes-ok"]) else False) # print etree.tostring(act_struct, pretty_print=True) num_pids = len(_res.findall(".//pids/pid")) da_name = "" if any_ok: pass else: if num_diff < 0: da_name = "critical" elif num_diff > 0: da_name = "warning" cur_line.append( logging_tools.form_entry( s_info.attrib["proc_info_str"], header="Process info", display_attribute=da_name)) pid_dict = {} for cur_pid in act_struct.findall(".//pids/pid"): pid_dict[int(cur_pid.text)] = int( cur_pid.get("count", "1")) if pid_dict: p_list = sorted(pid_dict.keys()) if max(pid_dict.values()) == 1: cur_line.append( logging_tools.form_entry( logging_tools.compress_num_list(p_list), header="pids")) else: cur_line.append( logging_tools.form_entry(",".join([ "{:d}{}".format( key, " ({:d})".format(pid_dict[key]) if pid_dict[key] > 1 else "") for key in p_list ]), header="pids")) else: cur_line.append( logging_tools.form_entry("no PIDs", header="pids")) if opt_ns.started: start_time = int( act_struct.find(".//process_state_info").get( "start_time", "0")) if start_time: diff_time = max( 0, time.mktime(time.localtime()) - start_time) diff_days = int(diff_time / (3600 * 24)) diff_hours = int( (diff_time - 3600 * 24 * diff_days) / 3600) diff_mins = int((diff_time - 3600 * (24 * diff_days + diff_hours)) / 60) diff_secs = int( diff_time - 60 * (60 * (24 * diff_days + diff_hours) + diff_mins)) ret_str = "{}".format( time.strftime("%a, %d. %b %Y, %H:%M:%S", time.localtime(start_time))) else: ret_str = "no start info found" cur_line.append( logging_tools.form_entry(ret_str, header="started")) if opt_ns.config: cur_line.append( logging_tools.form_entry( act_struct.find(".//config_info").text, header="config info")) if opt_ns.memory: cur_mem = act_struct.find(".//memory_info") if cur_mem is not None: mem_str = process_tools.beautify_mem_info( int(cur_mem.text)) else: # no pids hence no memory info mem_str = "" cur_line.append( logging_tools.form_entry_right(mem_str, header="Memory")) if opt_ns.version: if "version" in _res.attrib: _version = _res.attrib["version"] else: _version = "" cur_line.append( logging_tools.form_entry_right(_version, header="Version")) _lic_info = _res.find("license_info") _lic_state = int(_lic_info.attrib["state"]) if lic_dict is None: cur_line.append( logging_tools.form_entry( "---", header="License", )) else: cur_line.append( logging_tools.form_entry( lic_dict[_lic_state][0], header="License", display_attribute=lic_dict[_lic_state][1], )) cur_line.append( logging_tools.form_entry( prc_dict[p_state][0], header="PState", display_attribute=prc_dict[p_state][1], )) cur_line.append( logging_tools.form_entry( crc_dict[c_state][0], header="CState", display_attribute=crc_dict[c_state][1], )) if opt_ns.meta: _meta_res = act_struct.find(".//meta_result") if _meta_res is not None: t_state = int(_meta_res.get("target_state")) ignore = int(_meta_res.get("ignore")) cur_line.append( logging_tools.form_entry( meta_dict["t"][t_state][0], header="TargetState", display_attribute=meta_dict["t"][t_state][1], )) cur_line.append( logging_tools.form_entry( meta_dict["i"][ignore][0], header="Ignore", display_attribute=meta_dict["i"][ignore][1], )) else: cur_line.append( logging_tools.form_entry( "N/A", header="TargetState", display_attribute="warning", )) cur_line.append( logging_tools.form_entry( "N/A", header="Ignore", display_attribute="warning", )) out_bl.append(cur_line) return out_bl
def show_overview(local_mc, valid_names): mod_list = logging_tools.NewFormList() cmd_list = logging_tools.NewFormList() # iterate over modules for _idx, mod in enumerate(local_mc.module_list, 1): c_names = [name for name in valid_names if local_mc.command_dict[name].module == mod] local_valid_names = [] for com_name in c_names: local_valid_names.append(com_name) local_valid_names = sorted(local_valid_names) # show module overview mod_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(mod.name, header="Module name"), logging_tools.form_entry(mod.Meta.uuid, header="uuid"), logging_tools.form_entry(mod.checksum, header="Checksum"), logging_tools.form_entry_center(mod.Meta.required_access.name, header="Access"), logging_tools.form_entry_center( ",".join( [ _platform.name for _platform in mod.Meta.required_platform ] ), header="Platform", ), logging_tools.form_entry_right(mod.Meta.priority, header="priority"), logging_tools.form_entry_right( "yes" if hasattr(mod, "init_machine_vector") else "no", header="MachineVector" ), logging_tools.form_entry_right(len(local_valid_names), header="#coms"), logging_tools.form_entry(", ".join(local_valid_names), header="commands"), ] ) # iterate over command for _idx, cmd_name in enumerate(sorted(local_mc.command_dict.keys()), 1): cmd = local_mc[cmd_name] # print(cmd) # print(inspect.getsource(cmd.__class__)) cmd_list.append( [ logging_tools.form_entry_right(_idx, header="#"), logging_tools.form_entry(cmd_name, header="Name"), logging_tools.form_entry(cmd.module.name, header="Module name"), logging_tools.form_entry(cmd.Meta.uuid, header="uuid"), logging_tools.form_entry(cmd.checksum, header="Checksum"), logging_tools.form_entry(cmd.Meta.check_instance.name, header="Server"), logging_tools.form_entry_center(cmd.Meta.required_access.name, header="Access"), logging_tools.form_entry_center( ",".join( [ _platform.name for _platform in cmd.Meta.required_platform ] ), header="Platform", ), logging_tools.form_entry_center( "yes" if cmd.Meta.has_perfdata else "no", header="perfdata", ), logging_tools.form_entry_center( "yes" if cmd.Meta.create_mon_check_command else "no", header="create MCC", ), logging_tools.form_entry( ", ".join( cmd.Meta.alternate_names ) if cmd.Meta.alternate_names else "---", header="Alternate names", ), logging_tools.form_entry( cmd.Meta.ports.get_port_spec(), header="PortSpec", ), logging_tools.form_entry( cmd.Meta.description, header="description", ), ] ) print("\nModule overview:\n{}".format(str(mod_list))) print("\nCommand overview:\n{}".format(str(cmd_list)))
def get_ref_list(self): return [[ logging_tools.form_entry(" {:6d}@{:<30s} {}".format( _line_num, _file_name, _line)) ] for _file_name, _line_num, _line in self.refs]
def main(options): options.overview = True if (not options.stat and not options.index and not options.num) else False options.index = [int(cur_idx) for cur_idx in options.index] err_file_name = os.path.join(LOG_ROOT, "logging-server", "err_py") if not os.path.isfile(err_file_name): print("{} does not exist".format(err_file_name)) sys.exit(1) if options.clear: new_file_name = "{}_{}.tar".format( err_file_name, time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())) if process_tools.find_file("xz"): _pf = ".xz" _compr = "J" c_stat, out = commands.getstatusoutput("tar cpJf {}{} {}".format( new_file_name, _pf, err_file_name)) elif process_tools.find_file("bzip2"): _pf = ".bz2" _compr = "j" c_stat, out = commands.getstatusoutput("tar cpjf {}{} {}".format( new_file_name, _pf, err_file_name)) else: _pf = "" _compr = "" print("taring {} to {}{} ...".format(err_file_name, new_file_name, _pf)) c_stat, out = commands.getstatusoutput("tar cp{}f {}{} {}".format( _compr, new_file_name, _pf, err_file_name)) if c_stat: print("*** error (%d): %s" % (c_stat, out)) else: os.unlink(err_file_name) sys.exit(c_stat) try: err_lines = [ line.strip() for line in file(err_file_name, "r").read().split("\n") if line.count("from pid") ] except IOError: print("Cannot read '{}': {}".format(err_file_name, process_tools.get_except_info())) sys.exit(1) print("Found error_file {} with {}".format( err_file_name, logging_tools.get_plural("line", len(err_lines)))) errs_found, act_err = ([], None) act_idx, idx_dict, prev_dt = (0, {}, None) for line in err_lines: line_parts = line.split(":") # date is always the first 4 parts line_date = ":".join(line_parts[0:3]).strip() info_part = line_parts[3].strip() err_line = ":".join(line_parts[4:]) # parse info_part try: if info_part.startswith("("): line_state = "" else: line_state = info_part.split()[0] info_part = info_part[len(line_state):].strip() info_parts = info_part.split() # skip error-thread name and "from pid" string info_parts.pop(0) info_parts.pop(0) info_parts.pop(0) except: print("Error pre-parsing line '{}': {}".format( line, process_tools.get_except_info())) else: try: # get pid line_pid = int(info_parts.pop(0)) # unknown or full source if len(info_parts) == 7: # full source line_s_name = info_parts[0][1:] line_uid = int(info_parts[2]) line_uname = info_parts[3][1:-2] line_gid = int(info_parts[5]) line_gname = info_parts[6][1:-3] else: line_s_name = info_parts[0][1:-1] line_uid, line_gid = (-1, -1) line_uname, line_gname = ("unknown", "unknown") cur_dt = datetime.datetime.strptime(line_date, "%a %b %d %H:%M:%S %Y") if prev_dt: dt_change = abs(cur_dt - prev_dt).seconds > 5 else: dt_change = False prev_dt = cur_dt if not act_err or act_err.pid != line_pid or dt_change or line.count( "<type"): act_idx += 1 act_err = ErrorRecord( line_pid, line_s_name, line_uid, line_uname, line_gid, line_gname, ) act_err.set_idx(act_idx) idx_dict[act_idx] = act_err errs_found.append(act_err) if err_line.strip() or not options.noempty: act_err.add_line(line_date, line_state, err_line) except: print("Error parsing line '%s': %s" % (line, process_tools.get_except_info())) print("Found {}".format( logging_tools.get_plural("error record", len(errs_found)))) if options.overview: if errs_found: out_list = logging_tools.new_form_list() for err in errs_found: out_list.append(err.get_form_parts()) print(unicode(out_list)) elif options.stat: uid_dict = {} for err in errs_found: uid_dict.setdefault(err.uid, []).append(err) all_uids = uid_dict.keys() all_uids.sort() out_list = logging_tools.new_form_list() for uid in all_uids: uid_stuff = uid_dict[uid] diff_sources = [] for err in uid_stuff: if err.source_name not in diff_sources: diff_sources.append(err.source_name) diff_sources.sort() out_list.append(( logging_tools.form_entry(uid, header="uid"), logging_tools.form_entry(uid_stuff[0].uname, header="uname"), logging_tools.form_entry(len(uid_stuff), header="# err"), logging_tools.form_entry(len(diff_sources), header="# sources"), logging_tools.form_entry(", ".join(diff_sources), header="sources"), )) print(unicode(out_list)) elif options.num: idx_l = idx_dict.keys() idx_l.sort() idx_show = [] while options.num and idx_l: options.num -= 1 idx_show.append(idx_l.pop(-1)) idx_show.reverse() options.index = idx_show if options.index: for idx in options.index: if idx in idx_dict: act_err = idx_dict[idx] print(act_err.get_header()) print(act_err.show_lines()) else: print("Index {:d} not in index_list {}".format( idx, logging_tools.compress_num_list(idx_dict.keys())))