def remove_logger(config, found_workers): import yaml NN = len(found_workers) - 1 if NN == 0: elog.warn("no logger can be removed") return loggerPrefix = found_workers[0].split('-')[0] delLoggerName = "%s-%d" % (loggerPrefix, NN) while delLoggerName not in found_workers and NN < LOGGER_WORKERS_MAX: NN = NN + 1 delLoggerName = "%s-%d" % (loggerPrefix, NN) if delLoggerName not in found_workers: elog.warn("no logger can be removed") return # remove file loggerNN: loggerNN = os.path.join(config['loggers_abspath'], "%s.py" % delLoggerName) loggerNNPosition = os.path.join(config['loggers_abspath'], ".%s.position" % delLoggerName) elog.info("%s: %s", delLoggerName, loggerNN) (fr, fd) = (None, None) try: loggingConfigYaml = config['logger']['logging_config'] loggingConfigYamlDefault = "%s.0" % loggingConfigYaml shutil.copy(loggingConfigYaml, loggingConfigYamlDefault) fr = open(loggingConfigYaml) cfg = yaml.load(fr) fr.close() fr = None del cfg['loggers'][delLoggerName] fd = util.open_file(loggingConfigYaml) yaml.dump(cfg, fd, default_flow_style=False) fd.close() fd = None os.remove(loggerNN) os.remove(loggerNNPosition) shutil.copy(loggingConfigYaml, loggingConfigYamlDefault) elog.info("success: %s", delLoggerName) except: shutil.copy(loggingConfigYamlDefault, loggingConfigYaml) elog.error("failed: %s", delLoggerName) pass finally: if fr: fr.close() if fd: fd.close() pass
def onSigChld(signo, frame): global exit_queue, exit_event pid, status = os.waitpid(-1, os.WNOHANG) if pid: elog.error("child#%d on signal: SIGCHLD.", pid) exit_queue.put(('EXIT', "child#%d on signal: SIGCHLD." % pid)) exit_event.set() pass
def add_logger(config, found_workers): import yaml from copy import deepcopy NN = len(found_workers) if NN > LOGGER_WORKERS_MAX: elog.warn("too many loggers(>%d) to add", LOGGER_WORKERS_MAX) return loggerPrefix = found_workers[0].split('-')[0] newLoggerName = "%s-%d" % (loggerPrefix, NN) while newLoggerName in found_workers: NN = NN + 1 newLoggerName = "%s-%d" % (loggerPrefix, NN) # add loggerNN: logger0 = os.path.join(config['loggers_abspath'], "%s.py" % loggerPrefix) loggerNN = os.path.join(config['loggers_abspath'], "%s.py" % newLoggerName) elog.info("%s: %s", newLoggerName, loggerNN) (fr, fd) = (None, None) try: loggingConfigYaml = config['logger']['logging_config'] loggingConfigYamlDefault = "%s.0" % loggingConfigYaml shutil.copy(loggingConfigYaml, loggingConfigYamlDefault) fr = open(loggingConfigYaml) cfg = yaml.load(fr) fr.close() fr = None fd = util.open_file(loggingConfigYaml) cfg['loggers'][newLoggerName] = deepcopy(cfg['loggers'][loggerPrefix]) yaml.dump(cfg, fd, default_flow_style=False) fd.close() fd = None shutil.copy(logger0, loggerNN) shutil.copy(loggingConfigYaml, loggingConfigYamlDefault) elog.info("success: %s", newLoggerName) except: shutil.copy(loggingConfigYamlDefault, loggingConfigYaml) elog.error("failed: %s", newLoggerName) pass finally: if fr: fr.close() if fd: fd.close() pass
def sweep_dir(path, srcs, results): dname = os.path.basename(path) if dname not in ignore_dirs: filelist = os.listdir(path) filelist.sort(key=lambda x: x[0:20]) for f in filelist: pf = os.path.join(path, f) if util.dir_exists(pf): sweep_dir(pf, srcs, results) elif util.file_exists(pf): _, ext = os.path.splitext(f) passed_filters = False if f not in ignore_files and ext not in ignore_exts: passed_filters = True if len(only_exts) > 0: if ext not in only_exts: passed_filters = False if passed_filters: fd = None try: fd = open(pf, 'r') lines = fd.readlines() lineno = 0 for line in lines: lineno += 1 for src in srcs: if line.find(src) != -1: elog.info("found '%s': [%s:%d]", src, os.path.relpath(pf, APPPATH), lineno) elog.force_clean("%s", line) if pf not in results: results.append(pf) except: elog.error("%r: %s", sys.exc_info(), pf) finally: util.close_file_nothrow(fd) else: #elog.warn("ignore file: %s", pf) pass else: elog.warn("ignore path: %s", path) pass
def load_logger_workers(loggers_dir, workers, loggerConfig): loggers = {} worker_modules = ["%s.%s" % (loggers_dir, workers[i]) for i in range(0, len(workers))] try: for worker in worker_modules: elog.debug("import %s", worker) module = importlib.import_module(worker) loggers[worker] = (module, loggerConfig) pass return loggers except ImportError as ie: elog.error("%r", ie) sys.exit(-1) pass
def main(parser): (options, args) = parser.parse_args(args=None, values=None) # 子进程退出后向父进程发送的信号 ## signal.signal(signal.SIGCHLD, util.sig_chld) # 进程退出信号 signal.signal(signal.SIGINT, util.sig_int) signal.signal(signal.SIGTERM, util.sig_term) # 当前脚本绝对路径 abspath = util.script_abspath(inspect.currentframe()) if not options.path: options.path = os.getcwd() elog.warn( "No path specified. using current working dir or using: --path='PATH'" ) if not options.srcs: elog.error("No source strings specified. using: --srcs='STRINGS'") sys.exit(1) if not options.dsts: elog.warn("No destigation strings specified. using: --dsts='STRINGS'") # 取得配置项options.path的绝对路径 root_path = util.source_abspath(APPFILE, options.path, abspath) srcs = parse_strarr(options.srcs) dsts = parse_strarr(options.dsts) elog.force("path: %s", root_path) elog.force("sour = %r", srcs) elog.force("dest = %r", dsts) founds = [] sweep_dir(root_path, srcs, founds) elog.force("Total %d files found", len(founds)) if len(founds) > 0: if options.replace: if len(srcs) == len(dsts): for pf in founds: ctime, mtime = None, None fts = file_times(pf) if fts: ctime, mtime = fts else: elog.warn("missing file: %s", pf) continue for i in range(0, len(srcs)): srcstr = srcs[i] dststr = None if i < len(dsts): dststr = dsts[i] if dststr: ds = dststr.replace('$(mtime)', mtime).replace( '$(ctime)', ctime) if options.whole_line: cmd = "sed -i 's/%s.*/%s/g' '%s'" % (srcstr, ds, pf) else: cmd = "sed -i 's/%s/%s/g' '%s'" % (srcstr, ds, pf) elog.debug(cmd) (status, output) = commands.getstatusoutput(cmd) if status != 0: elog.error( "failed to command: \"%s\", output: %r", sed, output) elog.force("Total %d files replaced", len(founds)) else: elog.error( "Failed to replace for srcs(%r) mismatched with dsts(%r)", srcs, dsts) pass else: elog.warn("No files to be replaced. Using: --replace") pass pass
def main(config, parser): import utils.logger as logger (options, args) = parser.parse_args(args=None, values=None) logConfigDict = logger.set_logger(config['logger'], options.log_path, options.log_level) loggers = {} if config['loggers'] and len(config['loggers']): loggers = load_logger_workers('loggers', config['loggers'], { 'logger_config' : logConfigDict, 'logger_stash' : options.logger_stash, 'batch_rows' : options.batch_rows, 'end_time' : options.end_time, 'end_rowid' : options.end_rowid }) if len(loggers) > LOGGER_WORKERS_MAX: elog.error("too many logger workers. please increase LOGGER_WORKERS_MAX and try!") exit(-1) found_workers = list_logger_workers(logConfigDict, config['loggers_abspath']) if options.list_logger_workers: for logger_worker in found_workers: elog.info("found worker: %s (%s/%s.py)", logger_worker, config['loggers_abspath'], logger_worker) elog.force("total %d workers: %r", len(found_workers), found_workers) return if options.add_logger: add_logger(config, found_workers) return if options.remove_logger: remove_logger(config, found_workers) return if len(loggers) == 0 and options.force: loggers = load_logger_workers('loggers', found_workers, { 'logger_config' : logConfigDict, 'logger_stash' : options.logger_stash, 'batch_rows' : options.batch_rows, 'end_time' : options.end_time, 'end_rowid' : options.end_rowid }) if options.reset_logger_position: if len(loggers): reset_logger_position(loggers, config['loggers_abspath'], options.start_time, options.start_rowid) else: elog.error("--reset-position ignored: logger worker not found. use --force for all.") pass if options.startup: if len(loggers): startup(loggers, config) else: elog.error("--startup ignored: logger worker not found. use --force for all.") pass pass
def exception_handler_log(errlog): import utils.evntlog as elog elog.error(json.dumps(errlog)) sys.exit(-1)