def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, init_logger=True) db_tools.close_connection() self.rrd_cache_socket = global_config["RRD_CACHED_SOCKET"] self.rrd_root = global_config["RRD_DIR"] cov_keys = [ _key for _key in global_config.keys() if _key.startswith("RRD_COVERAGE") ] self.rrd_coverage = [global_config[_key] for _key in cov_keys] self.log("RRD coverage: {}".format(", ".join(self.rrd_coverage))) self.register_timer(self.check_size, 6 * 3600, first_timeout=1) self.__verbose = global_config["VERBOSE"] self._setup_rsync() if self.do_rsync: self.log( "enabling periodic RAM-to-disk sync from {} to {} every {}". format( global_config["RRD_DIR"], global_config["RRD_DISK_CACHE"], logging_tools.get_diff_time_str( global_config["RRD_DISK_CACHE_SYNC"]), )) self.register_timer(self.sync_from_ram_to_disk, global_config["RRD_DISK_CACHE_SYNC"])
def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, init_logger=True ) db_tools.close_connection() self.__debug = global_config["DEBUG"] # cache address self.__memcache_address = [ "{}:{:d}".format( global_config["MEMCACHE_ADDRESS"].split(":")[0], global_config["MEMCACHE_PORT"], ) ] # last update of aggregation structure self.__struct_update = None # cache for filtered values self.__vector_filter_cache = {} self.init_sockets() self.init_ag_xml() self.register_timer(self.aggregate, 30, instant=False, first_timeout=1)
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self._init_cache() self.__node_options = sge_tools.get_empty_node_options() self.__run_options = sge_tools.get_empty_job_options( suppress_times=True, suppress_nodelist=True, show_stdoutstderr=False, ) self._init_network() self._init_sge_info() # job content dict self.__job_content_dict = {} # pinning dict self.__job_pinning_dict = {} self.register_func("get_config", self._get_config) self.register_func("job_control", self._job_control) self.register_func("queue_control", self._queue_control) self.register_func("file_watch_content", self._file_watch_content) self.register_func("affinity_info", self._affinity_info) self.register_func("job_ended", self._job_ended) self.register_func("full_reload", self._full_reload) # job stop/start info self.register_timer(self._update_nodes, 30, first_timeout=5) if global_config["TRACE_FAIRSHARE"]: self.log("register fairshare tracer") self.register_timer(self._update_fairshare, 60, instant=True)
def main(): zmq_context = zmq.Context() log_template = logging_tools.get_logger( "syslog_scan", get_log_path(icswLogHandleTypes.log_py), context=zmq_context ) send_sock = None log_template.log(logging_tools.LOG_LEVEL_OK, "starting syslog_scan") while True: line = sys.stdin.readline().strip() if not line: break try: _timestamp, host, msg = line.split(None, 2) except: log_template.log( logging_tools.LOG_LEVEL_ERROR, "error parsing line {}: {}".format(line, process_tools.get_except_info()) ) else: log_template.log("got line from {}: {}".format(host, msg)) if not send_sock: send_sock = open_socket(zmq_context) send_sock.send_unicode(msg) if send_sock: send_sock.close() log_template.log(logging_tools.LOG_LEVEL_OK, "received empty line, exiting") log_template.close() zmq_context.term()
def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, init_logger=True ) # close database connection db_tools.close_connection() self.register_func("rescan_repos", self._rescan_repos) self.register_func("reload_searches", self._reload_searches) self.register_func("clear_cache", self._clear_cache) self.register_func("search", self._search) self._correct_search_states() self.__background_commands = [] self.register_timer(self._check_delayed, 1) # set repository type if os.path.isfile("/etc/centos-release") or os.path.isfile("/etc/redhat-release"): self.repo_type = RepoTypeRpmYum(self) elif os.path.isfile("/etc/debian_version"): self.repo_type = RepoTypeDebDebian(self) else: self.repo_type = RepoTypeRpmZypper(self)
def process_init(self): # hm ... self.global_config = global_config global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context) db_tools.close_connection() self.register_func("fetch_partition_info", self._fetch_partition_info) self.register_func("scan_network_info", self._scan_network_info) self.register_func("scan_system_info", self._scan_system_info) self.register_func("snmp_basic_scan", self._snmp_basic_scan) self.register_func("snmp_result", self._snmp_result) self.register_func("base_scan", self._base_scan) self.register_func("wmi_scan", self._wmi_scan) self.register_func("ext_con_result", self._ext_con_result) self.register_func("host_monitor_result", self._host_monitor_result) self.EC.init(global_config) self._server = device.objects.get(Q(pk=global_config["SERVER_IDX"])) self._config = config.objects.get(Q(pk=global_config["CONFIG_IDX"])) self.__run_idx = 0 # global job list self.__job_list = [] self.__pending_commands = {} self._init_subsys()
def process_init(self): self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, init_logger=True) self.CS = config_store.ConfigStore("client", self.log)
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context) db_tools.close_connection() self.register_func("start_backup", self._start_backup)
def create_logger(self): self.__log_template = logging_tools.get_logger( "{}.{}".format(global_config["LOG_NAME"], self.name.replace(".", r"\.")), global_config["LOG_DESTINATION"], context=build_client.srv_process.zmq_context, ) self.log("added client ({} [{:d})".format(self.name, self.pk))
def __init__(self): self.__log_template = logging_tools.get_logger( "jsv_{:d}".format(os.getuid()), get_log_path(icswLogHandleTypes.log_py), ) self.__state = "initialized" self.env = {} self.param = {}
def process_init(self): self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context) self.register_func("set_option_dict", self._set_option_dict) self.register_func("set_srv_com", self._set_srv_com) self.register_func("start_command", self._start_command) db_tools.close_connection()
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) self.register_func("get_node_status", self._get_node_status) self.__socket = None
def process_init(self): self.__verbose = global_config["VERBOSE"] self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self.register_func("compress", self._compress)
def __init__(self): self.__log_template = logging_tools.get_logger( "jsv_{:d}".format(os.getuid()), "uds:/var/lib/logging-server/py_log_zmq", zmq=True, ) self.__state = "initialized" self.env = {} self.param = {}
def process_init(self): self.__log_template = logging_tools.get_logger( self.__options.log_name, get_log_path(icswLogHandleTypes(self.__options.handle)), context=self.zmq_context ) self.__log_template.log_command("set_max_line_length {:d}".format(256)) self.__log_str = self.__options.mult * (" ".join(self.__options.args)) self.log("log_str has {}".format(logging_tools.get_plural("byte", len(self.__log_str)))) self.register_func("start_logging", self._start_logging)
def create_logger(self): if self.__log_template is None: self.__log_template = logging_tools.get_logger( "{}.{}".format(global_config["LOG_NAME"], self.name.replace(".", r"\.")), global_config["LOG_DESTINATION"], zmq=True, context=Client.srv_process.zmq_context, init_logger=True) self.log("added client")
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self.register_timer(self._clear_old_graphs, 60, instant=True) self.register_timer(self._check_for_stale_rrds, 3600, instant=True)
def __init__(self): self._log_template = logging_tools.get_logger( "disclaimer", "uds:/var/lib/logging-server/py_log_zmq", zmq=True ) self.args = sys.argv self.log("{:d} args: {}".format(len(self.args), ", ".join(self.args))) self.log("sendmail is at {}".format(SENDMAIL_BIN)) self._read_user_info()
def create_logger(self): if self.__log_template is None: self.__log_template = logging_tools.get_logger( "{}.{}".format( global_config["LOG_NAME"], self.device.full_name.replace(".", r"\.") ), global_config["LOG_DESTINATION"], context=ConfigControl.srv_process.zmq_context, ) self.log("added client %s (%s)" % (str(self.device), self.device.uuid))
def create_logger(self): if self.__log_template is None: self.__log_template = logging_tools.get_logger( "%s.%s" % (global_config["LOG_NAME"], self.device.full_name.replace(".", r"\.")), global_config["LOG_DESTINATION"], zmq=True, context=config_control.srv_process.zmq_context, init_logger=True) self.log("added client %s (%s)" % (unicode(self.device), self.device.uuid))
def process_init(self): self.__log_template = logging_tools.get_logger( self.__log_name, self.__log_destination, context=self.zmq_context) self.__return_proc_name = None self.register_func("fetch_snmp", self._fetch_snmp) self.register_func("register_return", self._register_return) self.register_func("trigger_timeout", self._trigger_timeout) self._init_dispatcher() self.__job_dict = {} self.__envelope_dict = {} self.__req_id_lut = {}
def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, ) # self.add_process(BuildProcess("build"), start=True) db_tools.close_connection() self.register_func("process_assets", self._process_assets) self.register_func("process_batch_assets", self._process_batch_assets)
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self.register_func("monitoring_info", self._monitoring_info) self.register_func("passive_check_result", self._pcr) self.register_func("passive_check_results_as_chunk", self._pcrs_as_chunk)
def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context) # log.startLoggingWithObserver(my_observer, setStdout=False) self.__debug = global_config["DEBUG"] self.register_func("resolve", self._resolve, greedy=True) # clear flag for extra twisted thread self.__cache = {}
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) # close database connection db_tools.close_connection() self.register_func("rescan_kernels", self._rescan_kernels) self.kernel_dev = config_tools.icswServerCheck( service_type_enum=icswServiceEnum.kernel_server).get_result()
def process_init(self): global_config.close() self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], zmq=True, context=self.zmq_context, init_logger=True) db_tools.close_connection() self.register_timer(self.update, 30 if global_config["DEBUG"] else 300, instant=False)
def get_logger(name, options, **kwargs): log_type = options.logger log_all = options.logall if log_type == "stdout": if log_all or kwargs.get("all", False): return _get_logger(True) else: return _get_logger(False) else: return logging_tools.get_logger( "icsw_{}".format(name), get_log_path(icswLogHandleTypes.log_py), ).log
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context ) db_tools.close_connection() self.register_func("generate", self._generate) self.__run_idx = 0 # global job list self.__job_list = [] self.__pending_commands = {}
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self._init_sge_info() self._init_network() # job stop/start info self.__elo_obj = None self.register_timer(self._update, 30, instant=True) self.register_func("get_license_usage", self.get_license_usage)
def process_init(self): global_config.enable_pm(self) self.__log_template = logging_tools.get_logger( global_config["LOG_NAME"], global_config["LOG_DESTINATION"], context=self.zmq_context, ) db_tools.close_connection() self.register_timer(self.periodic_update, 60 if global_config["DEBUG"] else 300, instant=True) self.register_func('get_kpi_source_data', self._get_kpi_source_data) self.register_func('calculate_kpi_preview', self._calculate_kpi_preview) self.register_func('calculate_kpi_db', self._calculate_kpi_db)