def getQueueQuality(): collectorTime = collectTime() METRIC = "svr.fault" senderInfoList = [] i = 2 httpRequestList = [] httpErrorList = [] wntRequestList = [] wntErrorList = [] httpErrorPercent = 0 wntErrorPercent = 0 SERVERNUM = SERVERNUMBER # 文件存在且是一个普通文件 if OLD_SHELLTYPE_FLAG: sQualityLog = "/myshell/pubpathquality.txt" #sQualityLog = "/home/dy1/gs" + SERVERNUM + "/log/pubpathquality.txt" else: sQualityLog = "/home/dy1/gs" + SERVERNUM + "/log/oslog/pubpathquality.txt" if os.path.isfile(sQualityLog): # 获取最近一次记录的通道信息 info = tailer.tail(open(sQualityLog), 1) print info lastLogTime = info[0].split(']')[0].split("[")[1] while True: findLogTime = tailer.tail(open(sQualityLog), i)[0].split(']')[0].split("[")[1] if findLogTime == lastLogTime: i = i + 1 else: #i表示最后面的i条都是最后一次记录的日志 i = i - 1 break #最近一次日志记录的所有记录 checkLog = tailer.tail(open(sQualityLog), i) for i in checkLog: #将http和wnt通道故障率分别统计 type = i.split("]")[1].split(" ")[0] if type == "http": httpRequestList.append(float(i.split("]")[1].split(" ")[2])) httpErrorList.append(float(i.split("]")[1].split(" ")[3])) resultInfo1 = str( round(sum(httpErrorList) / sum(httpRequestList), 2)) senderInfoList.append( addString(collectorTime, resultInfo1, METRIC, qType="http")) else: wntRequestList.append(float(i.split("]")[1].split(" ")[2])) wntErrorList.append(float(i.split("]")[1].split(" ")[3])) resultInfo2 = str( round(sum(wntErrorList) / sum(wntRequestList), 2)) senderInfoList.append( addString(collectorTime, resultInfo2, METRIC, qType="wnt")) sendToLog(collectorTime, senderInfoList) return senderInfoList
def read_last_line(filename: str) -> str: """Read the last line of the log at `filename`.""" with open(filename) as f: lines = tailer.tail(f, 1) if lines: return lines[0] return ""
def _full_log(self, full): if full == False: self.refreshLog(self.currentLogFile, self.numbLogLine, False) return self.displayLog.clear() t = '\n'.join(tailer.tail(open(self.currentLogFile), 1000)) self.displayLog.setText(t)
def show_file_contents(fname, head=0, tail=0): for line in tailer.head(open(fname), head): print(line) print('\t...') print('\t...') for line in tailer.tail(open(fname), tail): print(line)
def get_logs(self, path, last_clock): import portalocker import tailer import time pn = self.get_path(path) with open(pn, "r+") as log: while True: try: portalocker.lock(log, portalocker.LOCK_EX) break except: dbg.dbg("lock failed") time.sleep(0.1) curtime = int(util.current_sec()) lines = tailer.tail(log, 20) ret = [] if last_clock is None: last_clock = 0 for line in lines: sp = line.strip().split("\t") if(int(sp[0]) < last_clock): continue #log = { # 'time': eval(sp[0]), # 'message': sp[1] #} #ret.append(log) ret.append(sp[1]) return ret, curtime
def getScrapyStatusFromScrapyLog(logPath): ''' 通过scrapy 的日志获得如下格式的json str,日志内容必须为scrapy 刚执行完成后的日志内容 2016-05-03 17:07:24 [scrapy] INFO: Dumping Scrapy stats: {'downloader/request_bytes': 227847, 'downloader/request_count': 427, 'downloader/request_method_count/GET': 427, 'downloader/response_bytes': 799168, 'downloader/response_count': 427, 'downloader/response_status_count/200': 427, 'finish_reason': 'finished', 'finish_time': datetime.datetime(2016, 5, 3, 9, 7, 24, 34782), 'item_scraped_count': 6882, 'log_count/DEBUG': 7310, 'log_count/INFO': 16, 'request_depth_max': 3, 'response_received_count': 427, 'scheduler/dequeued': 427, 'scheduler/dequeued/memory': 427, 'scheduler/enqueued': 427, 'scheduler/enqueued/memory': 427, 'start_time': datetime.datetime(2016, 5, 3, 8, 58, 42, 954245)} 2016-05-03 17:07:24 [scrapy] INFO: Spider closed (finished) :param logPath: :return: ''' lines = tailer.tail(open(logPath),50) nu = 0 for i,line in enumerate(lines): if line.endswith('Dumping Scrapy stats:'): nu = i jsonStr = ''.join(lines[nu+1:-1]) jsonDict = eval(jsonStr) return jsonDict
def logslastn(chan, n): mess = "" lines = tailer.tail(open(chan + ".txt", errors='ignore'), n) for line in lines: mess += line + '\n' '''url = 'http://pastebin.com/api/api_post.php' payload = {"api_option": "paste",\ "api_dev_key": "ae7566ad9d35a376849f31a49f709bd6",\ "api_paste_code": mess,\ "api_paste_expire_date": "1H",\ "api_paste_private": 0,\ "api_paste_name": "logs",\ "api_paste_format": "text",\ "api_user_key": ''}''' '''url = "http://paste.ee/api" payload = {"key": "2f7c3fb1a18609292fb8cc5b8ca9e0bb", \ "description": "logs" + "[" + strftime("%d/%m/%Y %H:%M:%S") + "]", \ "paste": mess, \ "format": "simple"} # headers = {'content-type': 'application/json'} data = parse.urlencode(payload) data = data.encode('utf-8') # req = request.Request(url, data, headers) response = request.urlopen(url, data,timeout = 100) the_page = response.read().decode('utf-8') # print(the_page) return (the_page)''' return postlog(mess)
def on_modified(event): #if the file that's been modified is a csv file #we have some logging work to do #Scope-limit csv files we care about for now if event.src_path.endswith(".csv") and island_str in event.src_path: print(f"{event.src_path} has been modified") logpath = event.src_path logname = str(event.src_path.split("/")[-1]) print(log_state) print(logname) #if this is the first time this file has been read, get the column names to attach to the neptune log_metrics if log_state[logname] == 0: csvlog = pd.read_csv(logpath) log_metrics[logname] = list(csvlog.columns) log_state[logname] = 1 lt = [t for t in log_type if t in logpath] if len(lt) == 1: logline = tailer.tail(open(logpath), 1)[0] stats = [float(l) for l in logline.strip().split(",")] triton.log_stats_to_experiment(lt[0], log_metrics[logname], stats, logpath, island_str)
def _main(filepath, options): try: if options.lines > 0: with open(filepath, 'rb') as f: if options.head: if options.follow: sys.stderr.write('Cannot follow from top of file.\n') sys.exit(1) lines = head(f, options.lines) else: lines = tail(f, options.lines) encoding = locale.getpreferredencoding() for line in lines: print(line.decode(encoding)) if options.follow: for line in follow_path(filepath): if line is not None: print(line) else: time.sleep(options.sleep) except KeyboardInterrupt: # Escape silently pass
def animate(i): ftemp = 'temp.csv' fh = tailer.tail(open(ftemp), 10) temp = list() humid = list() time = list() for line in fh: pis = line.split(',') degree = pis[0] humidity = pis[1] timeC = pis[2] timeB = timeC[:8] time_string = datetime.strptime(timeB, '%H:%M:%S') temp.append(float(degree)) humid.append(float(humidity)) time.append(time_string) plt.xticks(rotation=-45) ax1 = plt.subplot(211) graph1 = plt.plot(temp, 'c') ax1.set_title('온도', fontproperties=fontprop) ax1.get_xaxis().set_visible(False) ax1.set_ylim([10, 50]) ax2 = plt.subplot(212) graph2 = plt.plot(time, humid) ax2.set_ylim([0, 100]) ax2.set_title('습도', fontproperties=fontprop)
def parse(file, verbose, lines): inserted, not_inserted = 0, 0 try: if lines is None: fb = open(file) content = fb.read().splitlines() else: nlines = int(lines) content = tailer.tail(open(file), nlines) print('Reading file %s ...' % file) except IOError: print('File file %s not exits or not can read.' % file) sys.exit(1) for idx, line in enumerate(content): record = line.split('|') if len(record) < 4: continue if not exist_record(record) and insert_record(record): inserted += 1 if verbose: print(('Insert record ', record)) else: if verbose: print(('Not insert record ', record)) not_inserted += 1 print('Insert record: %i\nNo inserted record: %i' % (inserted, not_inserted))
def getCmdHandler(): # wait for cmd to be written completely() id = request.args.get('id') filename = str(dir) + 'to' + str(id) + '.txt' cmd = str(tailer.tail(open(filename), 1)) cmd_json = cmd[2:-2] return cmd_json
def get_logs(self, path, last_clock): import portalocker import tailer import time pn = self.get_path(path) with open(pn, "r+") as log: while True: try: portalocker.lock(log, portalocker.LOCK_EX) break except: dbg.dbg("lock failed") time.sleep(0.1) curtime = int(util.current_sec()) lines = tailer.tail(log, 20) ret = [] if last_clock is None: last_clock = 0 for line in lines: sp = line.strip().split("\t") if (int(sp[0]) < last_clock): continue #log = { # 'time': eval(sp[0]), # 'message': sp[1] #} #ret.append(log) ret.append(sp[1]) return ret, curtime
def readLog(): #take file and read the last line file = tailer.tail(open('myLogFile.txt', 'w'), 1) #follow the file as it grows for line in tailer.follow(open('myLogFil.txt')): print(line)
def tail(file_path, lines=20): try: import tailer except ImportError: return read_file(file_path) with open(file_path) as f: return tailer.tail(f, lines)
def show_file_contents(fname, head=0, tail=0): # Display catalog file content for line in tailer.head(open(fname), head): print(line) print('\t...') print('\t...') for line in tailer.tail(open(fname), tail): print(line)
def tail_log_file(item, lines=1000): path = Logger._log_path(item) if not os.path.isfile(path): return 'No Log' with open(path) as fi: return '\n'.join(tailer.tail(fi, lines))
def __init__(self): self.current_price = float( json.loads( urllib.request.urlopen("https://api.coinone.co.kr/trades"). read().decode('utf-8')).get('completeOrders')[0]['price']) self.time = time.time() self.order_last = tailer.tail(open('./orders/orders.txt'), 1)[0].split(':') # 서버에 저장된 마지막 거래 기록
def display_logs(algo): #todo logs = " \n".join( tailer.tail( open( os.path.join(config['GENERAL']['LOGS_DIR'], "{}.log".format(algo)), "r"), 100)) print(logs) return logs
def get_log(logpath, lines=1000): if os.path.exists(logpath): with open(logpath) as log: logfile = os.linesep.join(tailer.tail(log, lines)) else: logfile = None logpath = None return logfile, logpath
def get_cli_output(user, filename): try: output = '\n'.join( tail(open(get_output_file(user, filename)), app.config['NO_OF_OUTPUT_LINES'])) except FileNotFoundError: # if output file has not yet been created, return string saying this output = 'No output has been produced.' return jsonify(output=output)
def log(self): """Return the system logs. 10 lines.""" lines = tailer.tail(open('logs/status.log'), 10) statement = "" for line in lines: statement += (line + "<br />") return statement
def log_n(self, n): """Return n number of lines from the system logs.""" lines = tailer.tail(open('logs/status.log'), n) statement = "" for line in lines: statement += (line + "<br />") return statement
def get_last_log_date(self, logfile): if not os.path.exists(logfile): return None last_line = tailer.tail(open(logfile), 1) if last_line: last_line = last_line[0] return last_line.split(', ')[0].split('T')[0] else: return None
async def logs(self, context: commands.Context, lines: int = 20, offset: int = 0): '''Output last lines of log. Usage: !logs [lines=20] [offset=0]''' with open(self.log_filename) as f: log_tail = tailer.tail(f, lines=lines + offset)[:lines] formatted_log_tail = '\n'.join(log_tail)[-1994:] # 2k char limit await context.send(f'```{formatted_log_tail}```')
def ticker_time_series(self, limit=-1, now_ts=0, interval=60, time_range=3600, field='last', operations=['first', 'last', 'max', 'min'], as_datetime=False): if limit < 0: limit = self._limit_for_time_range(time_range, 'ticker') events = [] for line in tailer.tail(open(const.DATA_DIR + "/ticker.jsons"), limit): events.append(json.loads(line)) now = int(math.ceil(time.time())) if now_ts > 0: now = int(math.ceil(now_ts / 1000.0)) return self._compute_time_series(events, now - time_range, now, interval, field, operations, as_datetime)
def get_last_log_lines(self, lines): """Return the last logs messages.""" try: current_tail = tailer.tail(open( "/opt/splunk/var/log/splunk/SplunkAppForWazuh.log"), lines) result = list(reversed(current_tail)) except Exception as e: self.error('[log.py][get_last_log_lines] %s' % (e)) raise e return result
def on_modified(event): if event.src_path.endswith(".csv") and island in event.src_path: print(f"{event.src_path} has been modified") logpath = event.src_path #for w in watchlist: # if w in logpath: if "mean" in logpath: logline = tailer.tail(open(logpath), 1)[0] stats = [float(l) for l in logline.strip().split(",")] triton.log_stats_to_experiment(stats, logpath)
def home(): """ This function just responds to the browser ULR localhost:5000/ :return: the last 20 queries ' """ last_lines = tailer.tail(open('/data/queries.txt'), 20) last_lines = ''.join([x + "\n" for x in last_lines]) return Response(last_lines, mimetype='text/plain')
def log_short(bot, update): user = update.message.from_user logger.info("%s(%s) wants the short log." % (user.first_name, user.id)) if update.message.chat_id != MANAGER_ID: update.message.reply_text('이 command는 관리자를 위한 것으로 user에게는 허용되지 않습니다.') lines = tailer.tail(open(LOG_DIR + '/log.txt', 'r'), 10) string = "" for line in lines: string = string + line + "\n" update.message.reply_text(string)
def _get_one_java_run_and_return_last_line_date(self, querysearch, until, maxtweets, all_tweets=True, since=None, return_line=True): """ Create one java csv using java jar (either Top Tweets or All tweets as specified in all_tweets tag) and return date string from last tweet collected. querysearch: (string) query string, usually one word - multiple words imply an "AND" between them maxtweets: (int) number of tweets to return since: (string of form '2015-09-30') string of date to search since; this is optional and won't be used when using the create_java_tweets function until: (string of form '2015-09-30') string of date to search until, since search is conducted backwards in time return_line (bool): whether to return date from last line or not; if true the date from the last line in the csv is returned """ start_time = time.time() # choose which jar file to use jar_string = self.jar_folder_path + '/got_top_tweets.jar' if all_tweets: jar_string = self.jar_folder_path + '/got_all_tweets.jar' # create search string quotation_mark = '"' query_string = 'querysearch=' + quotation_mark + querysearch + quotation_mark until_string = 'until=' + until maxtweets_string = 'maxtweets=' + str(maxtweets) # create output_got.csv file of tweets with these search parameters if since is None: subprocess.call(['java', '-jar', jar_string, query_string, until_string, maxtweets_string]) else: since_string = 'since=' + since subprocess.call(['java', '-jar', jar_string, query_string, since_string, until_string, maxtweets_string]) # find date on last tweet in this file (in last line of file) last_line = tailer.tail(open('output_got.csv'), 1)[0] date_position = last_line.find(';') date_string = last_line[date_position+1:date_position+11] date_string = self._convert_date_to_standard(date_string) print "Time to collect", str(maxtweets), "tweets:", \ round((time.time() - start_time)/60., 1), "minutes" if return_line: return date_string
def test_ner(args): if args.year == 2015 or args.year == 2016: if args.dev: run_command( "java -cp " + args.stanford_jar + " edu.stanford.nlp.ie.crf.CRFClassifier -ner.useSUTime false -loadClassifier " + os.path.abspath("datasets/NEEL" + str(args.year) + "/train/NEEL" + str(args.year) + ".ser.gz") + " -testFile " + os.path.abspath("datasets/NEEL" + str(args.year) + "/dev/NEEL" + str(args.year) + "-dev.conll") + " > " + os.path.abspath("datasets/NEEL" + str(args.year) + "/dev/stanford.conll")) print('\n'.join([ str(element) for element in tailer.tail( open( os.path.abspath("datasets/NEEL" + str(args.year) + "/dev/stanford.conll")), 8) ])) else: run_command( "java -cp " + args.stanford_jar + " edu.stanford.nlp.ie.crf.CRFClassifier -ner.useSUTime false -loadClassifier " + os.path.abspath("datasets/NEEL" + str(args.year) + "/train/NEEL" + str(args.year) + ".ser.gz") + " -testFile " + os.path.abspath("datasets/NEEL" + str(args.year) + "/test/NEEL" + str(args.year) + "-short-test.conll") + " > " + os.path.abspath("datasets/NEEL" + str(args.year) + "/test/stanford.conll")) print('\n'.join([ str(element) for element in tailer.tail( open( os.path.abspath("datasets/NEEL" + str(args.year) + "/test/stanford.conll")), 8) ])) else: print("Testing a NER is only available for NEEL2015 and NEEL2016")
def send_msg(self, msg, send_slack=False): logfile = self.get_log_fname() print(msg) with open(logfile, 'a') as f: f.write(msg + '\n') if send_slack and self.slack_obj is not None: self.slack_obj.chat.post_message( '@' + self.slack_usr, msg, username='******') content = tailer.tail(open(logfile), 10) self.slack_obj.files.upload(content='\n'.join( content), channels='@' + self.slack_usr, title=get_formatted_datetime() + '_tail_of_log')
def send_msg(msg, slack=None, slack_usr=None): print(msg) with open('log.txt', 'a') as f: f.write(msg + '\n') if slack is not None and slack_usr is not None: slack.chat.post_message('@' + slack_usr, msg, username='******') content = tailer.tail(open('log.txt'), 10) slack.files.upload(content='\n'.join(content), channels='@' + slack_usr, title=get_formatted_datetime() + '_tail_of_log')
def __init__(self, theFile): self.theFile = theFile self.first = tl.head(open(theFile),1)[0].split() print "the top left corner has position: " + self.first[0] + "\t"+ self.first[1] self.last = tl.tail(open(theFile),1)[0].split() print "the bottom right corner has position: " + self.last[0] + "\t" + self.last[1] self.yrange=abs((float(self.last[1])-float(self.first[1]))/DELTAS) self.height=self.yrange*DELTAS print "the y-range of our map in boxes: " + str(self.yrange)+ " And the height in wg84 is "+str(self.height) self.xrange=abs((float(self.last[0])-float(self.first[0]))/DELTAS) self.length=self.xrange*DELTAS print "the x-range of our map in boxes: " + str(self.xrange)+ " And the length in wg84 is "+str(self.length)
def volume_time_series(self, limit=-1, now_ts=0, interval=60, trade_type='ask', time_range=3600, field='amount', operations=['sum'], min_amount=0, as_datetime=False): if limit < 0: limit = self._limit_for_time_range(time_range, 'trades') events = [] for line in tailer.tail(open(const.DATA_DIR + "/trades.jsons"), limit): event = json.loads(line) events.append(event) now = int(math.ceil(time.time())) if now_ts > 0: now = int(math.ceil(now_ts / 1000.0)) time_series = [] from_ts = now - time_range to_ts = now for ts in range(from_ts, to_ts, interval): values = [] for event in events: event_ts = event['_ts'] / 1000 #print('ts', ts, event_ts, event_ts >= ts, event_ts <= (ts+interval)) data = event.get('data', None) if data == None: continue if trade_type != 'all' and data['type'] != trade_type: continue if event_ts >= ts and event_ts <= (ts + interval): value = float(data.get(field, 0.0)) if (value < min_amount): continue value = 1 values.append(value) data_points = [] for operation in operations: data_point = 0 if len(values) == 0: data_point = 0 elif operation == 'avg': data_point = sum(values) / len(values) elif operation == 'sum': data_point = sum(values) elif operation == 'max': data_point = max(values) elif operation == 'min': data_point = min(values) elif operation == 'first': data_point = values[0] elif operation == 'last': data_point = values[len(values)-1] data_points.append(float(data_point)) if as_datetime == True: time_series.append([datetime.fromtimestamp(ts)] + data_points) else: time_series.append([ts] + data_points) return time_series
def get_last_log_lines(self, lines): """Return the last logs messages.""" try: current_tail = tailer.tail( open( make_splunkhome_path( ['var', 'log', 'splunk', 'SplunkAppForWazuh.log'])), lines) result = list(reversed(current_tail)) except Exception as e: self.error('[log.py][get_last_log_lines] %s' % (e)) raise e return result
def get_current_temperatures(filename=None, separator=','): if filename is None: _, filenames = get_temperature_log_file_list() filename = filenames[-1] with open(filename) as f: header = [s.strip() for s in tailer.head(f, 1).pop().split(separator)] current_strings = [s.strip() for s in tailer.tail(f, 1).pop().split(separator)] current = [] for s in current_strings: try: current.append(float(s)) except ValueError: current.append(s) return dict(zip(header, current))
def logslasth(chan, h): mess = "" amlines = 60*round(h)*5 if amlines > logmax: amlines = logmax if amlines < 50: amlines = 50 #print(amlines) lines = tailer.tail(open(chan + ".txt", errors='ignore'), amlines) n = datetime.datetime.now() #for some reason the first element is borked del lines[0] line = lines[0] #for people who ask ridiculous amounts of lines ti = getdate(line) ti = n - ti mess = "" if ti.total_seconds() < h * 3600: for line in lines: mess += line + '\n' else: lines.reverse() for line in lines: line = line.strip('\n').strip() #print(line) if line != '': try: ti = getdate(line) ti = n - ti if ti.total_seconds() > round(h * 3600): break else: mess = line + '\n' + mess except Exception: pass #print("yay") '''url = "http://paste.ee/api" payload = {"key": "2f7c3fb1a18609292fb8cc5b8ca9e0bb", \ "description": "logs" + "[" + strftime("%d/%m/%Y %H:%M:%S") + "]", \ "paste": mess, \ "format": "simple"} # headers = {'content-type': 'application/json'} data = parse.urlencode(payload) data = data.encode('utf-8') # req = request.Request(url, data, headers) #print("sending") response = request.urlopen(url, data, timeout = 100) the_page = response.read().decode('utf-8')''' return postlog(mess)
def logslasth(chan, h): mess = "" amlines = 60 * round(h) * 5 if amlines > logmax: amlines = logmax if amlines < 50: amlines = 50 # print(amlines) lines = tailer.tail(open(chan + ".txt", errors='ignore'), amlines) n = datetime.datetime.now() # for some reason the first element is borked del lines[0] line = lines[0] # for people who ask ridiculous amounts of lines ti = getdate(line) ti = n - ti mess = "" if ti.total_seconds() < h * 3600: for line in lines: mess += line + '\n' else: lines.reverse() for line in lines: line = line.strip('\n').strip() # print(line) if line != '': try: ti = getdate(line) ti = n - ti if ti.total_seconds() > round(h * 3600): break else: mess = line + '\n' + mess except Exception: pass # print("yay") '''url = "http://paste.ee/api" payload = {"key": "2f7c3fb1a18609292fb8cc5b8ca9e0bb", \ "description": "logs" + "[" + strftime("%d/%m/%Y %H:%M:%S") + "]", \ "paste": mess, \ "format": "simple"} # headers = {'content-type': 'application/json'} data = parse.urlencode(payload) data = data.encode('utf-8') # req = request.Request(url, data, headers) #print("sending") response = request.urlopen(url, data, timeout = 100) the_page = response.read().decode('utf-8')''' return postlog(mess)
def run(self): for line in tailer.tail(open(self.page.file_path), self.page.plugin.lines): g15screen.run_on_redraw(self._add_line, line) self.fd = open(self.page.file_path) try: for line in tailer.follow(self.fd): if self._stopped: break g15screen.run_on_redraw(self._add_line, line) if self._stopped: break except ValueError as e: logger.debug("Error while reading", exc_info = e) if not self._stopped: raise e self.page.redraw()
def deferred_check(): # Get the last 3 lines of the file s = tailer.tail(open('C:\\ProgramData\\CrashPlan\\log\\history.log.0'), 3) log = [] tmp = [] for i, l in enumerate(s): tmp = re_record.match(l) if tmp: log = tmp if log == -1: return False # log is now the latest log line for a complete backup # check if it was in the last ~5 minutes # 02/09/16 11:41AM timedata = re_datetime.match(log.group(1)) nowtime = datetime.datetime.now() logtime = datetime.datetime( int("20" + timedata.group(3)), # year int(timedata.group(1)), # day damn silly month/day mixup int(timedata.group(2)), # month int(timedata.group(4)), # hour int(timedata.group(5)), # minute tzinfo = nowtime.tzinfo) diff = (nowtime-logtime).seconds print("log time:", logtime) print("now time:", nowtime) # was the latest completed backup was in the last 10 minutes? if diff < TIME_INTERVAL: print("Backup completed " + str(diff) + " seconds ago. Shutting down...") subprocess.call(["shutdown", "/s", "/t", "60"]) return True print("Fail: Last backup was", diff, "seconds ago, greater than", TIME_INTERVAL, "seconds ago.") return False
def tail(self, irc, msg, args, optlist, optlog): """[--singleline --n=# of lines] <logfile> Tail's the last 10 messages from a logfile. Execute listlogs command for a list of logs available. Ex: main """ # first, lower optlog to match. optlog = optlog.lower() # next handle optlist. singleline, lines = False, 10 # defaults. if optlist: for (k, v) in optlist: if k == 'singleline': singleline = True if k == 'n': if v > 50: irc.reply("Sorry, I won't display more than 50 lines.") elif v < 1: irc.reply("Sorry, I need a positive integer here.") else: # under 50 so lets go. lines = v # next, grab our list of logs. ll = self._listlogs() if not ll: irc.reply("ERROR: No logs found to display.") return else: # found logs. verify it works. if optlog not in ll: # we didn't find. display a list. irc.reply("ERROR: '{0}' is not a valid log. These are: {1}".format(optlog, " | ".join([i for i in ll.keys()]))) return # we're here if things worked. # lets display the last 10 lines. lf = tailer.tail(open(ll[optlog]), lines) # lets display. if singleline: irc.reply("{0} :: {1}".format(optlog, " ".join([i for i in lf]))) else: # one per line. for l in lf: irc.reply("{0}".format(l))
def logslastseen(chan, user): mess = "" lines = tailer.tail(open(chan + ".txt", errors='ignore'), logmax) lines.reverse() n = datetime.datetime.now() try: user = user[0:4] except Exception: pass #print(user) pinged = False for line in lines: line = line.strip('\n').strip() if line != '': #print(line) if user in line and "ping timeout" in line.lower() and "quit" in line.lower() and '<' not in line.lower() and '*' not in line.lower(): pinged = True if pinged: if "<" + user in line: mess = line + '\n' + mess break else: mess = line + '\n' + mess else: if user in line and ("quit" in line.lower() or "left" in line) and '<' not in line.lower() and '*' not in line.lower(): mess = line + '\n' + mess break else: mess = line + '\n' + mess #print(mess) '''url = "http://paste.ee/api" payload = {"key": "2f7c3fb1a18609292fb8cc5b8ca9e0bb", \ "description": "logs" + "[" + strftime("%d/%m/%Y %H:%M:%S") + "]", \ "paste": mess, \ "format": "simple"} # headers = {'content-type': 'application/json'} data = parse.urlencode(payload) data = data.encode('utf-8') # req = request.Request(url, data, headers) response = request.urlopen(url, data, timeout = 100) the_page = response.read().decode('utf-8')''' return postlog(mess)
def _create(log_file): errors = [] with open(log_file) as f: for line in f.readlines(): if 'error' in line.lower(): errors.append(line) last_lines = tailer.tail(open(log_file), 1) status = 'inprogress' build_time = None if len(last_lines) == 1: last_line = last_lines[0] time_prefix = 'Build time: ' if last_line.startswith(time_prefix): status = 'good' build_time = last_line.replace(time_prefix, '').strip() path_head, path_tail = os.path.split(log_file) tz = get_localzone() file_time = tz.localize(datetime.datetime.strptime(time.ctime(os.path.getmtime(log_file)), "%a %b %d %H:%M:%S %Y")) return Job(path_tail, status, log_file, build_time, file_time.isoformat(), len(errors))
def prepare_pdf_export(request, config_id): if request.method == "POST": fd, filename = tempfile.mkstemp(suffix=".log") fp = open(filename, 'w') print >> fp, "Ready to start PDF generation" fp.close() # Strip off the path and the suffix filename = filename[len(tempfile.gettempdir()):] filename = filename[:-4] filename = filename.lstrip(os.path.sep) assert os.sep not in filename and filename.isalnum() # lazy security checking return HttpResponse(filename, content_type="text/plain") filename = request.GET.get("key") assert filename and os.sep not in filename and filename.isalnum() # lazy security checking filename = os.path.join(tempfile.gettempdir(), filename + ".log") fp = open(filename) try: body = tailer.tail(fp, 1)[0] except IndexError: body = '' fp.close() return HttpResponse(body, content_type="text/plain")
def basic_cmnd_log(self, message): """ """ plugin = None if 'plugin' in message.data: plugin = message.data['plugin'] if plugin != self._parent._name: return False mess = XplMessage() mess.set_type("xpl-trig") mess.set_schema("helper.basic") mess.add_data({"plugin" : self._parent._name}) mess.add_data({"command" : "log"}) try: try : if 'lines' in message.data: lines = int(message.data['lines']) except : lines = 50 #Todo : retrieve filepath fron logger filename = os.path.join("/var/log/domogik", self._parent._name + ".log") result = tailer.tail(open(filename), lines) i = 1 for line in result: if line != "": mess.add_data({"scr%s" % (i) : "%s" % (line)}) i = i+1 mess.add_data({"scr-count" : i-1}) mess.add_data({"status" : "ok"}) except: error = "Exception : %s" % (traceback.format_exc()) self._parent.log.error("pluginHelper.basic_cmnd_log : " + error) mess.add_data({"scr1" : "An error has occured. Look at the log."}) mess.add_data({"scr-count" : 1}) mess.add_data({"status" : "ok"}) self._parent.myxpl.send(mess)
def log(): import tailer lines=tailer.tail(open(app.config['LOG']), 50) return render_template("log.html", title = "Logging",log = lines)
# -*- coding: utf-8 -*- import tailer from sys import argv script, filename = argv for original in tailer.tail(open(filename),50): print original for line in tailer.follow(open(filename)): print line
def getLastOrder(): return tailer.tail(OrderFile,3)[-2].split(",")
def getLastMatch(): return tailer.tail(MatchFile,3)[-2].split(",")
def getLastUpDn5(): return tailer.tail(UpDn5File,3)[-2].split(",")
def view_log(name, n=50): with open("../log/{}.log".format(name)) as f: lines = tailer.tail(f, n) return lines
while not startdir.endswith('workspace'): startdir = os.path.dirname(startdir) if len(startdir) < 2: raise Exception("workspace directory not found") # get the get revision number commit = open(os.path.join(startdir, 'git.revision'), 'r').read().strip() print 'commit: ', commit # locate the log inside the newest build dir buildsdir = os.path.join(os.path.dirname(startdir), 'builds') builddir = find_latest_dir(buildsdir) log = os.path.join(builddir, 'log') # see if this log shows a success tail = tailer.tail(open(log), 500) logfile = None error = None if 'BUILD SUCCEEDED' not in ''.join(tail): # not successful, get more lines to write to output log error = [] for line in tail: if 'error' in line.lower() or 'fail' in line.lower(): error.append(line.strip()) if error: error = '\n'.join(error) else: error = "build failed, see log" logfile = os.path.join(os.path.dirname(log), 'autolog_log') f = open(logfile, 'w') f.writelines(["%s\n" % x for x in tail])
def tail_log_file(): try: return tailer.tail(open(BOT_LOGFILE), 10) except FileNotFoundError: return ["Arquivo de log não encontrado"]
# -*- coding: utf-8 -*- import tailer from os import listdir from os.path import isfile, join, sep, expanduser path = expanduser("~")+sep+"logutil"+sep+"testFolder" files = [ f for f in listdir(path) if isfile(join(path,f)) ] sorted_files = sorted(files) last_log_file = sorted_files.pop() file_with_path=path+sep+last_log_file for original in tailer.tail(open(file_with_path),50): print original for line in tailer.follow(open(file_with_path)): print line