def _handle_ls_for_system(self, enable): _insserv_bin = process_tools.find_file("insserv") _update_rc_bin = process_tools.find_file("update-rc.d") _chkconfig_bin = process_tools.find_file("chkconfig") _srvs = ["meta-server"] for _srv in _srvs: if _insserv_bin: _cmdline = "{} {} {}".format( _insserv_bin, "" if enable else "-r", _srv ) elif _update_rc_bin: _cmdline = "{} {} {}".format( _update_rc_bin, _srv, "enable" if enable else "disable", ) elif _chkconfig_bin: _cmdline = "{} {} {}".format( _chkconfig_bin, _srv, "on" if enable else "off", ) _stat, _out = commands.getstatusoutput(_cmdline) _lines = _out.split("\n") self.log( "{} gave [{:d}] {}".format( _cmdline, _stat, logging_tools.get_plural("line", len(_lines)), ) ) for _l_num, _line in enumerate(_lines, 1): self.log(" {:3d} {}".format(_l_num, _line))
def _call(self, cur_time, builder): # handle first run locally is_first_run = self.__first_run self.__first_run = False if is_first_run: # only check for services on first run for vd_proto in virtual_desktop_protocol.objects.all(): _vd_update = False available = process_tools.find_file(vd_proto.binary) if vd_proto.devices.filter(pk=self.__effective_device.pk): if not available: _vd_update = True vd_proto.devices.remove(self.__effective_device) self.log( "Removing virtual desktop proto {} from {}".format( vd_proto.name, self.__effective_device.name)) else: if available: _vd_update = True vd_proto.devices.add(self.__effective_device) self.log( "Adding virtual desktop proto {} to {}".format( vd_proto.name, self.__effective_device.name)) if _vd_update: vd_proto.save() for wm in window_manager.objects.all(): _wm_update = False available = process_tools.find_file(wm.binary) if wm.devices.filter(pk=self.__effective_device.pk): if not available: _wm_update = True wm.devices.remove(self.__effective_device) self.log("Removing window manager {} from {}".format( wm.name, self.__effective_device.name)) else: if available: _wm_update = True wm.devices.add(self.__effective_device) self.log("Adding window manager {} to {}".format( wm.name, self.__effective_device.name)) if _wm_update: wm.save() for vdus in virtual_desktop_user_setting.objects.filter( device=self.__effective_device, to_delete=False): self.check_vdus_running(vdus, ignore_last_start_attempt=is_first_run)
def check_ipmi_settings(self): cmd_name = "ipmitool" self.it_command = process_tools.find_file(cmd_name) # print self.it_command if self.it_command: mp_command = process_tools.find_file("modprobe") self.log("found {} at {}".format(cmd_name, self.it_command)) self.log("trying to load ipmi kernel modules") for kern_mod in ["ipmi_si", "ipmi_devintf"]: cmd = "{} {}".format(mp_command, kern_mod) c_stat, c_out = commands.getstatusoutput(cmd) self.log("calling '{}' gave ({:d}): {}".format( cmd, c_stat, c_out)) else: self.log("cmd {} not found".format(cmd_name), logging_tools.LOG_LEVEL_WARN)
def __init__(self, name): hm_classes.MonitoringCommand.__init__(self, name, positional_arguments=True) self.parser.add_argument("--user", dest="user", type=str, default="ADMIN") self.parser.add_argument("--passwd", dest="passwd", type=str, default="ADMIN") self.parser.add_argument("--ip", dest="ip", type=str) # self.parser.add_argument("--passive", default=False, action="store_true") self.parser.add_argument("--passive-check-prefix", type=str, default="-") self.__smcipmi_binary = process_tools.find_file(SMCIPMI_BIN) self.__smcipmi_version = None _KNOWN_VERSIONS = {2110, 2140} _DEF_VERSION = 2110 if self.__smcipmi_binary: self.log("found {} at {}".format(SMCIPMI_BIN, self.__smcipmi_binary)) _stat, _out = subprocess.getstatusoutput("{}".format(self.__smcipmi_binary)) vers_re = re.compile("^smc\s*ipmi\s*tool\s*(?P<version>v\d+[^(]+).*$", re.IGNORECASE) for _line in _out.split("\n"): _match = vers_re.match(_line.strip()) if _match: _vers_str = _match.group("version").replace("V", "") _vers_int = int(_vers_str.replace(".", "")) if _vers_int in _KNOWN_VERSIONS: self.log("found known version '{}' -> {:d}".format(_vers_str, _vers_int)) else: self.log( "found unknown version '{}' -> {:d}, mapping to {:d}".format( _vers_str, _vers_int, _DEF_VERSION, ), logging_tools.LOG_LEVEL_WARN ) _vers_int = _DEF_VERSION self.__smcipmi_version = _vers_int else: self.log("no SMCIPMI binary found", logging_tools.LOG_LEVEL_WARN)
def _call(self, cur_time, builder): m_vectors = [] self.__dl.check_freshness() if self._ibq_bin: _cmd = "{} --counters --errors --details -k -K 2>/dev/null".format( process_tools.find_file("ibqueryerrors")) _stat, _out = commands.getstatusoutput(_cmd) self.ibd.feed(_out) m_vectors = self.ibd.build_vectors(self.__dl) else: m_vectors = [] if False: m_vectors.append( E( "machine_vector", time="{:d}".format(int(cur_time)), name="im", simple="0", uuid="5f0a0564-913a-40d1-97ee-22151ae13c7f", )) dummy_v = hm_classes.mvect_entry("test.value", default=0, info="test entry", unit="1", base=1, factor=1, value=4) m_vectors[0].append(dummy_v.build_xml(E)) # print etree.tostring(m_vectors[0], pretty_print=True) return m_vectors
def _call(self, cur_time, builder): sep_str = "-" * 64 # vector to report my_vector = None _quota_bin = process_tools.find_file("repquota") if _quota_bin is None: self.log("No repquota binary found", logging_tools.LOG_LEVEL_ERROR) else: self.log(sep_str) self.log("starting quotacheck") q_cmd = "{} -aniugp".format(_quota_bin) q_stat, q_out = subprocess.getstatusoutput(q_cmd) if q_stat: self.log( "Cannot call '{}' (stat={:d}): {}".format( q_cmd, q_stat, str(q_out)), logging_tools.LOG_LEVEL_ERROR) else: q_dict, dev_dict = self._scan_repquota_output(q_out) qcb_dict = self._create_base_db_entries(dev_dict) prob_devs, prob_objs, quota_cache = self._check_for_violations( q_dict) self._write_quota_usage(qcb_dict, quota_cache) if prob_devs: self._send_quota_mails(prob_devs, prob_objs, dev_dict) if self.Meta.creates_machvector: my_vector = self._create_machvector( builder, cur_time, quota_cache) qc_etime = time.time() self.log("quotacheck took {}".format( logging_tools.get_diff_time_str(qc_etime - cur_time))) self.log(sep_str) return my_vector
def decompress_dmi_info(in_str): _dmi_bin = process_tools.find_file("dmidecode") with tempfile.NamedTemporaryFile() as tmp_file: file(tmp_file.name, "w").write(server_command.decompress(in_str)) _dmi_stat, dmi_result = commands.getstatusoutput( "{} --from-dump {}".format(_dmi_bin, tmp_file.name)) _xml = dmi_struct_to_xml(parse_dmi_output(dmi_result.split("\n"))) return _xml
def selinux_enabled(): _bin = process_tools.find_file("getenforce") if _bin: c_out = subprocess.check_output(_bin) if c_out.strip() == "Enforcing": return True return False else: return False
def _check_for_systemd(self): if file("/proc/1/cmdline", "r").read().count("system"): self._method = "s" self._service_command = self._service_command_s self._systemctl = process_tools.find_file("systemctl") self.log("systemd detected, systemctl at {}".format( self._systemctl)) self._get_systemd_services() else: self._method = "i" self._service_command = self._service_command_i self._chkconfig = process_tools.find_file("chkconfig") self._service = process_tools.find_file("service") self.log( "classic init detected, chkconfig at {}, service at {}".format( self._chkconfig, self._service)) self._get_init_services() self.log("found {}".format( logging_tools.get_plural("service", len(self.__services))))
def _interpret_dmiinfo(self, dmi_dump): with tempfile.NamedTemporaryFile() as tmp_file: open(tmp_file.name, "w").write(dmi_dump) _dmi_stat, dmi_result = subprocess.getstatusoutput( "{} --from-dump {}".format( process_tools.find_file("dmidecode"), tmp_file.name, )) _dict = dmi_tools.parse_dmi_output(dmi_result.split("\n")) _xml = dmi_tools.dmi_struct_to_xml(_dict) return _xml
def selinux_enabled(): _bin = process_tools.find_file("selinuxenabled") if _bin: try: c_out = subprocess.check_output(_bin) except subprocess.CalledProcessError as e: return False else: return True else: return False
def init_bg_stuff(self): self.__dl = DeviceLookup(self.log) _ibq_bin = process_tools.find_file(("ibqueryerrors")) if _ibq_bin is None: self.log("no ibqueryerrors binary found, disabling", logging_tools.LOG_LEVEL_ERROR) self._ibq_bin = None else: self.log("found ibqueryerrors at {}".format(_ibq_bin)) self._ibq_bin = _ibq_bin self.ibd = IBDataStore()
def __init__(self, name, log_com): self.log_com = log_com self.name = name self.password = "******".format(self.name) self._check_dir() self.ca_dir = os.path.join(CA_DIR, self.name) self.ssl_config_name = os.path.join(self.ca_dir, "openssl.cnf") self.openssl_bin = process_tools.find_file("openssl") self.log("openssl command found at {}".format(self.openssl_bin)) self.ca_ok = os.path.isdir(self.ca_dir) self.certs = [] if self.ca_ok: self._read_certs()
def load_kernel_modules(self): if not self._modules_loaded: self._modules_loaded = True if self.kernel_modules: self.log("trying to load {}: {}".format( logging_tools.get_plural("kernel module", len(self.kernel_modules)), ", ".join(self.kernel_modules))) mp_command = process_tools.find_file("modprobe") for kern_mod in self.kernel_modules: cmd = "{} {}".format(mp_command, kern_mod) c_stat, c_out = subprocess.getstatusoutput(cmd) self.log("calling '{}' gave ({:d}): {}".format( cmd, c_stat, c_out))
def __init__(self): self.__start_time = time.time() self.__verbose = global_config["VERBOSE"] self.__log_cache, self.__log_template = ([], None) threading_tools.process_pool.__init__( self, "main", zmq=True, ) self.register_exception("int_error", self._int_error) self.register_exception("term_error", self._int_error) self.register_func("compress_done", self._compress_done) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context ) # log config self._log_config() self.device = config_tools.server_check(service_type_enum=icswServiceEnum.image_server).effective_device if not self.device: self.log("not an image server", logging_tools.LOG_LEVEL_ERROR) self._int_error("not an image server") elif not process_tools.find_file("xmllint"): self.log("xmllint not found", logging_tools.LOG_LEVEL_ERROR) self._int_error("xmllint not found") elif global_config["CLEAR_LOCK"] or global_config["SET_LOCK"]: cur_img = self._get_image() if global_config["CLEAR_LOCK"]: _info_str = "lock cleared" cur_img.build_lock = False else: _info_str = "lock set" cur_img.build_lock = True cur_img.save() self._int_error("{} on image {}".format(_info_str, unicode(cur_img))) else: self.log("image server is '{}'".format(unicode(self.device) if self.device else "---")) self.__builder_names = [] for cur_num in xrange(global_config["BUILDERS"]): builder_name = "builder_{:d}".format(cur_num) self.__builder_names.append(builder_name) self.add_process(BuildProcess(builder_name), start=True) db_tools.close_connection() self.__build_lock = False if not self["exit_requested"]: self.init_build()
def __init__(self, mv, log_com): self.__log_com = log_com self.mv = mv self.mv_keys = set() self._pcp_proc_info = process_tools.find_file("pcp_proc_info") self.enabled = False if self._pcp_proc_info: self.log("found pcp_proc_info at {}".format(self._pcp_proc_info)) if config_store.ConfigStore.exists(CSTORE_NAME): self.enabled = True self._pgpool_config = config_store.ConfigStore( CSTORE_NAME, log_com=self.log) else: self.log("no config_store named {} found".format(CSTORE_NAME), logging_tools.LOG_LEVEL_WARN) else: self.log("foudn no pcp_proc_info, disabled monitoring", logging_tools.LOG_LEVEL_WARN)
def __init__(self, log_com, srv_com, hp_com): self.__log_com = log_com self.__hp_com = hp_com _bin = process_tools.find_file(HPASM_BIN) if not _bin: srv_com.set_result( "Failed to locate a binary named \"{}\"".format(HPASM_BIN), server_command.SRV_REPLY_STATE_ERROR) _com_line = [] else: _com_line = ["{} -s '{}'".format( _bin, hp_com.Meta.command, )] hm_classes.subprocess_struct.__init__( self, srv_com, _com_line, )
def _setup_rsync(self): _is_ram = False for _fs in psutil.disk_partitions(all=True): if _fs.mountpoint == global_config["RRD_DIR"] and _fs.fstype in [ "tmpfs", "ramdisk" ]: _is_ram = True break self._rsync_bin = process_tools.find_file("rsync") self.log("{} is{} a RAM-disk, rsync binary is at {} ...".format( global_config["RRD_DIR"], "" if _is_ram else " not", self._rsync_bin, )) if _is_ram and self._rsync_bin and global_config[ "RRD_DISK_CACHE"] != global_config["RRD_DIR"]: self.do_rsync = True self.log("rsync for RRDs is enabled") else: self.do_rsync = False self.log("rsync for RRDs is disabled")
def init_module(self): self.lsmod_command = process_tools.find_file("lsmod")
def check_for_smartctl(self): _was_there = True if self.smartctl_bin else False self.smartctl_bin = process_tools.find_file("smartctl") if not _was_there and self.smartctl_bin: self._check_devices()
def _find_smi_command(self): self.__smi_command = process_tools.find_file( "true" if _DEBUG else COM_NAME)
def _database_backup(self, bu_dir): bu_name = datetime.datetime.now().strftime( "db_bu_database_%Y%m%d_%H:%M:%S") full_path = os.path.join( bu_dir, bu_name, ) _def_db = settings.DATABASES.get("default", None) if not _def_db: self.log("no default database found", logging_tools.LOG_LEVEL_ERROR) else: self.log("found default database, keys:") for _key in sorted(_def_db.keys()): self.log(" {}={}".format(_key, _def_db[_key])) _engine = _def_db.get("ENGINE", "unknown").split(".")[-1] # map old to new values _engine = { "postgresql_psycopg2": "postgresql" }.get(_engine, _engine) bu_dict = { "postgresql": { "dump_bin": "pg_dump", "cmdlines": [ "{DUMP} -c -f {FILENAME}.psql -F c -Z 4 -h {HOST} -U {USER} {NAME} -w {EXCLUDE}", "{DUMP} -f {FILENAME}.schema.psql -F c -Z 4 -h {HOST} -U {USER} {NAME} --schema-only -w {SCHEMA_ONLY}", ], "pgpass": True } } if _engine in bu_dict: _bu_info = bu_dict[_engine] _bin = process_tools.find_file(_bu_info["dump_bin"]) if not _bin: self.log( "cannot find dump binary {}".format( _bu_info["dump_bin"]), logging_tools.LOG_LEVEL_ERROR) else: self.log("found dump binary {} in {}".format( _bu_info["dump_bin"], _bin)) for _line in _bu_info["cmdlines"]: cmdline = _line.format( DUMP=_bin, FILENAME=full_path, EXCLUDE=" ".join([ "-T {}".format(_ignore) for _ignore in self.get_ignore_list(True) ]), SCHEMA_ONLY=" ".join([ "-t {}".format(_ignore) for _ignore in self.get_ignore_list(True) ]), **_def_db) start_time = time.time() _pgpass = _bu_info.get("pgpass", False) if _pgpass: _pgpassfile = "/root/.pgpass" if os.path.exists(_pgpassfile): _passcontent = open(_pgpassfile, "r").read() else: _passcontent = None open(_pgpassfile, "w").write( "{HOST}:*:{NAME}:{USER}:{PASSWORD}\n".format( **_def_db)) os.chmod(_pgpassfile, 0o600) try: _output = subprocess.check_output( cmdline.split(), stderr=subprocess.PIPE) except subprocess.CalledProcessError: self.log( "error calling {}: {}".format( cmdline, process_tools.get_except_info(), ), logging_tools.LOG_LEVEL_ERROR) else: end_time = time.time() self.log("successfully called {} in {}: {}".format( cmdline, logging_tools.get_diff_time_str(end_time - start_time), _output, )) if _pgpass: if _passcontent: open(_pgpassfile, "w").write(_passcontent) os.chmod(_pgpassfile, 0o600) else: os.unlink(_pgpassfile) else: self.log( "unsupported engine '{}' for database backup".format( _engine), logging_tools.LOG_LEVEL_WARN)
def init_module(self): self.__maillog_object = MailLogObject(self) self.__maillog_object.parse_lines() self.__mailq_command = process_tools.find_file("mailq")
import codecs import email import os import re import subprocess import sys import tempfile from StringIO import StringIO from email.parser import FeedParser from lxml import etree from initat.tools import logging_tools, process_tools SENDMAIL_BIN = process_tools.find_file("sendmail") SPAMC_BIN = process_tools.find_file("spamc") class disclaimer_handler(object): def __init__(self): self._log_template = logging_tools.get_logger( "disclaimer", "uds:/var/lib/logging-server/py_log_zmq", zmq=True ) self.args = sys.argv self.log("{:d} args: {}".format(len(self.args), ", ".join(self.args))) self.log("sendmail is at {}".format(SENDMAIL_BIN)) self._read_user_info()
def __init__(self, name): self._openssl_command = process_tools.find_file("openssl") hm_classes.hm_command.__init__(self, name, positional_arguments=True)
def mc_init__(self): self._openssl_command = process_tools.find_file("openssl")
def main(options): options.overview = True if (not options.stat and not options.index and not options.num) else False options.index = [int(cur_idx) for cur_idx in options.index] err_file_name = os.path.join(LOG_ROOT, "logging-server", "err_py") if not os.path.isfile(err_file_name): print("{} does not exist".format(err_file_name)) sys.exit(1) if options.clear: new_file_name = "{}_{}.tar".format( err_file_name, time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())) if process_tools.find_file("xz"): _pf = ".xz" _compr = "J" c_stat, out = commands.getstatusoutput("tar cpJf {}{} {}".format( new_file_name, _pf, err_file_name)) elif process_tools.find_file("bzip2"): _pf = ".bz2" _compr = "j" c_stat, out = commands.getstatusoutput("tar cpjf {}{} {}".format( new_file_name, _pf, err_file_name)) else: _pf = "" _compr = "" print("taring {} to {}{} ...".format(err_file_name, new_file_name, _pf)) c_stat, out = commands.getstatusoutput("tar cp{}f {}{} {}".format( _compr, new_file_name, _pf, err_file_name)) if c_stat: print("*** error (%d): %s" % (c_stat, out)) else: os.unlink(err_file_name) sys.exit(c_stat) try: err_lines = [ line.strip() for line in file(err_file_name, "r").read().split("\n") if line.count("from pid") ] except IOError: print("Cannot read '{}': {}".format(err_file_name, process_tools.get_except_info())) sys.exit(1) print("Found error_file {} with {}".format( err_file_name, logging_tools.get_plural("line", len(err_lines)))) errs_found, act_err = ([], None) act_idx, idx_dict, prev_dt = (0, {}, None) for line in err_lines: line_parts = line.split(":") # date is always the first 4 parts line_date = ":".join(line_parts[0:3]).strip() info_part = line_parts[3].strip() err_line = ":".join(line_parts[4:]) # parse info_part try: if info_part.startswith("("): line_state = "" else: line_state = info_part.split()[0] info_part = info_part[len(line_state):].strip() info_parts = info_part.split() # skip error-thread name and "from pid" string info_parts.pop(0) info_parts.pop(0) info_parts.pop(0) except: print("Error pre-parsing line '{}': {}".format( line, process_tools.get_except_info())) else: try: # get pid line_pid = int(info_parts.pop(0)) # unknown or full source if len(info_parts) == 7: # full source line_s_name = info_parts[0][1:] line_uid = int(info_parts[2]) line_uname = info_parts[3][1:-2] line_gid = int(info_parts[5]) line_gname = info_parts[6][1:-3] else: line_s_name = info_parts[0][1:-1] line_uid, line_gid = (-1, -1) line_uname, line_gname = ("unknown", "unknown") cur_dt = datetime.datetime.strptime(line_date, "%a %b %d %H:%M:%S %Y") if prev_dt: dt_change = abs(cur_dt - prev_dt).seconds > 5 else: dt_change = False prev_dt = cur_dt if not act_err or act_err.pid != line_pid or dt_change or line.count( "<type"): act_idx += 1 act_err = ErrorRecord( line_pid, line_s_name, line_uid, line_uname, line_gid, line_gname, ) act_err.set_idx(act_idx) idx_dict[act_idx] = act_err errs_found.append(act_err) if err_line.strip() or not options.noempty: act_err.add_line(line_date, line_state, err_line) except: print("Error parsing line '%s': %s" % (line, process_tools.get_except_info())) print("Found {}".format( logging_tools.get_plural("error record", len(errs_found)))) if options.overview: if errs_found: out_list = logging_tools.new_form_list() for err in errs_found: out_list.append(err.get_form_parts()) print(unicode(out_list)) elif options.stat: uid_dict = {} for err in errs_found: uid_dict.setdefault(err.uid, []).append(err) all_uids = uid_dict.keys() all_uids.sort() out_list = logging_tools.new_form_list() for uid in all_uids: uid_stuff = uid_dict[uid] diff_sources = [] for err in uid_stuff: if err.source_name not in diff_sources: diff_sources.append(err.source_name) diff_sources.sort() out_list.append(( logging_tools.form_entry(uid, header="uid"), logging_tools.form_entry(uid_stuff[0].uname, header="uname"), logging_tools.form_entry(len(uid_stuff), header="# err"), logging_tools.form_entry(len(diff_sources), header="# sources"), logging_tools.form_entry(", ".join(diff_sources), header="sources"), )) print(unicode(out_list)) elif options.num: idx_l = idx_dict.keys() idx_l.sort() idx_show = [] while options.num and idx_l: options.num -= 1 idx_show.append(idx_l.pop(-1)) idx_show.reverse() options.index = idx_show if options.index: for idx in options.index: if idx in idx_dict: act_err = idx_dict[idx] print(act_err.get_header()) print(act_err.show_lines()) else: print("Index {:d} not in index_list {}".format( idx, logging_tools.compress_num_list(idx_dict.keys())))
def _find_ipsec_command(self): self.__ipsec_command = process_tools.find_file("ipsec")