def cleanup(self): removehandlers = [] for handler in self.logger.handlers: removehandlers.append(handler) for handler in removehandlers: if handler.stream: elog.debug("close stream: %r", self.logfile) handler.stream.close() self.logger.removeHandler(handler) pass
def load_logger_workers(loggers_dir, workers, loggerConfig): loggers = {} worker_modules = ["%s.%s" % (loggers_dir, workers[i]) for i in range(0, len(workers))] try: for worker in worker_modules: elog.debug("import %s", worker) module = importlib.import_module(worker) loggers[worker] = (module, loggerConfig) pass return loggers except ImportError as ie: elog.error("%r", ie) sys.exit(-1) pass
def cleanup(self): removehandlers = [] for handler in self.logger.handlers: removehandlers.append(handler) for handler in removehandlers: if handler.stream: elog.debug("close stream: %r", self.logfile) handler.stream.close() try: self.logger.removeHandler(handler) except TypeError as err: pass pass
def log_messages(loggerClass): tstamp = loggerClass.start_tstamp rowid = loggerClass.start_rowid time_str = timestamp2datetimestr(tstamp, WebLogger.DATE_FORMAT) loggerClass.save_position((tstamp + WebLogger.TIME_DELTA, rowid + loggerClass.batch_rows)) elog.debug("%s: save position (%d, %d)", loggerClass.getlogfilename(), loggerClass.start_tstamp, loggerClass.start_rowid) random.seed(loggerClass.start_tstamp) while rowid < loggerClass.start_rowid: loggerClass.log_message(rowid, tstamp, time_str) rowid = rowid + 1 pass pass
def main(parser): (options, args) = parser.parse_args(args=None, values=None) # 子进程退出后向父进程发送的信号 ## signal.signal(signal.SIGCHLD, util.sig_chld) # 进程退出信号 signal.signal(signal.SIGINT, util.sig_int) signal.signal(signal.SIGTERM, util.sig_term) # 当前脚本绝对路径 abspath = util.script_abspath(inspect.currentframe()) if not options.path: options.path = os.getcwd() elog.warn( "No path specified. using current working dir or using: --path='PATH'" ) if not options.srcs: elog.error("No source strings specified. using: --srcs='STRINGS'") sys.exit(1) if not options.dsts: elog.warn("No destigation strings specified. using: --dsts='STRINGS'") # 取得配置项options.path的绝对路径 root_path = util.source_abspath(APPFILE, options.path, abspath) srcs = parse_strarr(options.srcs) dsts = parse_strarr(options.dsts) elog.force("path: %s", root_path) elog.force("sour = %r", srcs) elog.force("dest = %r", dsts) founds = [] sweep_dir(root_path, srcs, founds) elog.force("Total %d files found", len(founds)) if len(founds) > 0: if options.replace: if len(srcs) == len(dsts): for pf in founds: ctime, mtime = None, None fts = file_times(pf) if fts: ctime, mtime = fts else: elog.warn("missing file: %s", pf) continue for i in range(0, len(srcs)): srcstr = srcs[i] dststr = None if i < len(dsts): dststr = dsts[i] if dststr: ds = dststr.replace('$(mtime)', mtime).replace( '$(ctime)', ctime) if options.whole_line: cmd = "sed -i 's/%s.*/%s/g' '%s'" % (srcstr, ds, pf) else: cmd = "sed -i 's/%s/%s/g' '%s'" % (srcstr, ds, pf) elog.debug(cmd) (status, output) = commands.getstatusoutput(cmd) if status != 0: elog.error( "failed to command: \"%s\", output: %r", sed, output) elog.force("Total %d files replaced", len(founds)) else: elog.error( "Failed to replace for srcs(%r) mismatched with dsts(%r)", srcs, dsts) pass else: elog.warn("No files to be replaced. Using: --replace") pass pass
def init_data(self, logfile): self.restore_position() if not util.dir_exists(self.log_prefix): elog.warn("create dir for stash log: %s", self.log_prefix) os.makedirs(self.log_prefix) elog.debug('log config: %r', self.dictcfg) elog.info('stash prefix: %s', self.log_prefix) elog.info('start tstamp: %d', self.start_tstamp) elog.info('start rowid: %d', self.start_rowid) elog.info('batch rows: %d', self.batch_rows) file_dests = os.path.join(self.plugins_dir, 'config' , 'dests.csv') file_proxys = os.path.join(self.plugins_dir, 'config' , 'proxys.csv') file_keywds = os.path.join(self.plugins_dir, 'config' , 'keywds.csv') elog.info("dests file: %s", file_dests) elog.info("proxys file: %s", file_proxys) elog.info("keywds file: %s", file_keywds) with open(file_dests, 'r') as fd: dests = fd.readlines() with open(file_proxys, 'r') as fd: proxys = fd.readlines() with open(file_keywds, 'r') as fd: keywds = fd.readlines() self.dests = [] for n in range(0, len(dests)): # id, ip, port, host # 100005,67.64.46.91,80,www.zhibo8.cc self.dests.append(tuple(dests[n].strip('\n').split(','))) del dests self.proxys = [] for n in range(0, len(proxys)): # ip, port, type # 121.232.144.158,9000,HTTP self.proxys.append(tuple(proxys[n].strip('\n').split(','))) del proxys self.keywds = [] for n in range(0, len(keywds)): # id, word self.keywds.append(tuple(keywds[n].strip('\n').split(','))) del keywds self.max_dests = len(self.dests) - 1 self.max_proxys = len(self.proxys) - 1 self.max_keywds = len(self.keywds) - 1 # update dictcfg with logfile elog.update_log_config(self.dictcfg, self.logger_name, logfile, 'INFO') # reload config logging.config.dictConfig(self.dictcfg) # update logger self.logger = logging.getLogger(self.logger_name) self.logfile = logfile (self.a, self.b, self.c, self.d, self.p) = ((1, 220), (10, 230), (20, 240), (30, 250), (10000, 60000)) self.fields = ( 'rowid', 'timestr', 'timeint', 'destid', 'sourip', 'sourport', 'destip', 'destport', 'desturl', 'proxyip', 'proxyport', 'proxytype', 'keywdid') pass