def run(self): """ Main process """ self.logger.debug('Thread started') try: #purge new lines Pygtail(self.log_file_path).readlines() #handle new lines while self.running: try: for log_line in Pygtail(self.log_file_path): if isinstance(log_line, str): log_line = log_line.decode('utf-8') log_line = log_line.strip() self.logger.debug('New log line: %s' % log_line) self.send_log_callback(log_line) #pause time.sleep(0.5) except: self.logger.exception(u'Exception on log watcher:') except: self.logger.exception(u'Fatal exception on log watcher:') self.logger.debug(u'Thread stopped')
def test_subsequent_read_with_new_data(self): pygtail = Pygtail(self.logfile.name) self.assertEqual(pygtail.read(), self.test_str) new_lines = "4\n5\n" self.append(new_lines) new_pygtail = Pygtail(self.logfile.name) self.assertEqual(new_pygtail.read(), new_lines)
def test_read_from_the_file_end(self): pygtail = Pygtail(self.logfile.name, read_from_end=True) self.assertEqual(pygtail.read(), None) new_lines = "4\n5\n" self.append(new_lines) new_pygtail = Pygtail(self.logfile.name, read_from_end=True) self.assertEqual(new_pygtail.read(), new_lines)
def test_timed_rotating_file_handler(self): new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() self.append(new_lines[0]) os.rename(self.logfile.name, "%s.2016-06-16" % self.logfile.name) self.append(new_lines[1]) pygtail = Pygtail(self.logfile.name) self.assertEqual(pygtail.read(), ''.join(new_lines))
def test_logrotate_with_dateext_with_delaycompress(self): new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() self.append(new_lines[0]) os.rename(self.logfile.name, "%s-20160616" % self.logfile.name) self.append(new_lines[1]) pygtail = Pygtail(self.logfile.name) self.assertEqual(pygtail.read(), ''.join(new_lines))
def test_custom_rotating_file_handler_with_prepend(self): new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() self.append(new_lines[0]) file_dir, rel_filename = os.path.split(self.logfile.name) os.rename(self.logfile.name, os.path.join(file_dir, "custom_log_pattern.%s" % rel_filename)) self.append(new_lines[1]) pygtail = Pygtail(self.logfile.name, rotated_filename_patterns=["custom_log_pattern.%s"]) self.assertEqual(pygtail.read(), ''.join(new_lines))
def check(self): tx1 = RespendTx(time_regex=r'.*tx1: (\d{4}-\d{2}-\d{2} [0-9:]{8})', hex_regex=r'.*tx1 hex: ([0-9a-f]+)') tx2 = RespendTx( time_regex=r'(\d{4}-\d{2}-\d{2} [0-9:]{8}) Respend tx2', hex_regex=r'.*tx2 hex: ([0-9a-f]+)') found = False for line in Pygtail(self.logpath): # tx2 is logged before tx1 if not tx2.done(): tx2.parse_line(line) elif not tx1.done(): tx1.parse_line(line) if tx1.done() and tx2.done(): print("found double spend") self.on_respend(tx1, tx2) tx1.clear() tx2.clear() found = True return found
def _consume_loop(self): while self._is_running: sleep(1) # throttle polling for new logs for log_line in Pygtail(self._expanded_log_path, read_from_end=True, offset_file=self._offset_path): self._notify_subscribers(log_line)
def PygtailLogger(logger: logging.Logger, filename: str, prefix: str = "2| ") -> Iterator[Callable[[], None]]: """ Helper for streaming task stderr into logger using pygtail. Context manager yielding a function which reads the latest lines from the file and writes them into logger at verbose level. This function also runs automatically on context exit. Truncates lines at 4KB in case writer goes haywire. """ pygtail = Pygtail(filename, full_lines=True) pygtail_ok = True def poll() -> None: nonlocal pygtail_ok if pygtail_ok: try: for line in pygtail: logger.verbose( (prefix + line.rstrip())[:4096]) # pyre-ignore except: pygtail_ok = False # cf. https://github.com/bgreenlee/pygtail/issues/48 logger.verbose( # pyre-ignore "incomplete log stream due to the following exception; see %s", filename, exc_info=sys.exc_info(), ) try: yield poll finally: poll()
def test_renamecreate_unknown_rotated_name(self): """ Tests "renamecreate" semantics where the currently processed file gets renamed and the original file gets recreated. Rolled file has unknown name to pygtail. logrotate from Linux has this behaviour when rotating into separate directory. """ new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() os.rename(self.logfile.name, "%s.unknown-name" % self.logfile.name) # append will recreate the original log file self.append(new_lines[0]) self.append(new_lines[1]) # reopen using Pytgail pygtail = Pygtail(self.logfile.name) self.assertEqual(pygtail.read(), ''.join(new_lines))
def generate(): while True: file = Pygtail(LOG_FILE, every_n=1) for index, line in enumerate(file): yield "data:" + str(line) + "\n\n" time.sleep(0.1) time.sleep(1)
def main(): Thread(target=put_metrics).start() while not os.path.exists(METRICS_SH): print('Waiting for {}...'.format(METRICS_SH)) time.sleep(1) while True: for line in Pygtail(METRICS_SH): try: metric, value = line.split(': ') value = float(value.strip()) if ' ' in metric: raise ValueError except ValueError: print('invalid line', line.strip()) continue METRICS_QUEUE.put({ 'timestamp': time.time(), 'value': value, 'metric': metric, }) print('queued', line.strip()) time.sleep(5) print('no new data...')
def test_copytruncate_on_smaller(self): self.test_readlines() self.copytruncate() new_lines = "4\n5\n" self.append(new_lines) pygtail = Pygtail(self.logfile.name, copytruncate=True) self.assertEqual(pygtail.read(), new_lines)
def tail_file(self, remove_offset: bool = True) -> Iterable: log_file = get_server_path(self.config.server_log) offset_file = get_server_path(".log_offset") if remove_offset: self.delete_offset() return Pygtail(log_file, offset_file=offset_file)
def _test_copytruncate_larger(self, onoff): self.test_readlines() self.copytruncate() self.append(self.test_str) new_lines = "4\n5\n" self.append(new_lines) pygtail = Pygtail(self.logfile.name, copytruncate=onoff) self.assertEqual(pygtail.read(), new_lines)
def ss_web_log_monitor(self): self.logger.debug("Doorbird.ss_web_log_monitor() called") #Get isDaylight variable ID isDaylightId = None for var in indigo.variables: if var.name == "isDaylight": isDaylightId = var.id lastTriggered = None lastError = None self.logger.info( indigo.devices[self.indigoID].name + ": Monitoring of the Security Spy web log has started") while self.monitorSSWeblogs: try: if os.path.isfile(indigo.devices[ self.indigoID].pluginProps["ssWebLogPath"]): if indigo.variables[isDaylightId].value == "false": for line in Pygtail(indigo.devices[ self.indigoID].pluginProps["ssWebLogPath"]): if (("cameraNum=" + indigo.devices[ self.indigoID].pluginProps["ssCameraNum"]) in line) and (indigo.devices[self.indigoID] .states["doorbirdOnOffState"] == True): if (lastTriggered == None) or ( time.time() - lastTriggered > 30): self.logger.debug( indigo.devices[self.indigoID].name + ": Security Spy web log criteria detected" ) self.turn_light_on() lastTriggered = time.time() else: if (lastError == None) or ( time.time() - lastError > 300 ): # only send error messages every 5 minutes so as not to spam the log lastError = time.time() self.logger.error( indigo.devices[self.indigoID].name + ": Invalid Security Spy web log path: " + indigo.devices[ self.indigoID].pluginProps["ssWebLogPath"]) except: if (lastError == None) or ( time.time() - lastError > 300 ): # only send error messages every 5 minutes so as not to spam the log lastError = time.time() self.logger.error(indigo.devices[self.indigoID].name + ": Error reading Security Spy web log") time.sleep(.5) self.logger.info( indigo.devices[self.indigoID].name + ": Monitoring of the Security Spy web log has stopped")
def parse_log(): ''' Parses ESS Log Data to store for the App ''' app = create_app(config.JobConfig) app_context = app.app_context() app_context.push() _detect_rotated_log(app) with app.app_context(): try: for line in Pygtail(app.config['ESS_LOG'], paranoid=True, full_lines=True, offset_file=app.config['ESS_LOG_OFFSET']): try: data = re.findall(r'\{.*\}', line) data = json.loads(data[0]) except Exception as r: app.logger.error(r) if _is_connection_test(data['account_id'], data['domain_id']): app.logger.info('Conncetion Test Detected. Skipping...') continue if _message_exists(app.logger, data['message_id']): app.logger.info('Message ID FOUND. Skipping...') continue app.logger.info('Message ID NOT FOUND. Processing...') try: _store_account(app.logger, data) _store_domain(app.logger, data) _store_message(app.logger, data) if data['recipients']: for recipient in data['recipients']: _store_recipient(app.logger, recipient, data['message_id']) if data['attachments']: for attachment in data['attachments']: _store_attachment(app.logger, attachment, data['message_id']) except Exception as e: db.session.rollback() app.logger.error("Failed to Process Message ({})".format( data['message_id'])) app.logger.error(e) else: db.session.commit() except Exception as f: app.logger.error(f) app.logger.info('Closing app context for parse_log') app_context.pop()
def run(self): print 'Starting Thread:' + self.objectName() self.started = True while self.started: for line in Pygtail(self.logger_path): try: self.emit(SIGNAL('Activated( QString )'), line.rstrip().split(' : ')[1]) except IndexError: pass
def test_logrotate_without_delay_compress(self): new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() self.append(new_lines[0]) # put content to gzip file gzip_handle = gzip.open("%s.1.gz" % self.logfile.name, 'wb') with open(self.logfile.name, 'rb') as logfile: gzip_handle.write(logfile.read()) gzip_handle.close() with open(self.logfile.name, 'w'): # truncate file pass self.append(new_lines[1]) pygtail = Pygtail(self.logfile.name) self.assertEqual(pygtail.read(), ''.join(new_lines))
def read_raw_logs(self, log_file="", start=False, offset_path=""): if not offset_path: offset_path = os.path.join(self.offset_path, self.conn_log_file + ".offset") if start: if os.path.exists(offset_path): os.remove(offset_path) if log_file: for line in Pygtail(log_file, offset_file=offset_path): if line is not None: yield line
def main(): stream = file('config.yaml', 'r') config = yaml.load(stream) tailer = Pygtail(config.get('tail').get('aof_path'), "offset_file") parser = Parser() for line in tailer: command = parser.parse_command(line) if not command: continue print command
def _tail_lines(self, filepath): tail = Pygtail(str(filepath)) while True: try: self._tail_worker_enabled.wait() if self._tail_worker_kill.is_set(): return line = tail.next() GLib.idle_add(self._add_line_to_log_buffer, line) except StopIteration: time.sleep(0.5)
def pygtail_check_logs(): from pygtail import Pygtail key_phrases = [ "new-host-alert" ] # This is where we can add additional realtime-events to trigger based on. for line in Pygtail(REALTIME_LOG_PATH): if 'nnm:' in line: # This line is in case we're looking at /var/log/messages instead. for phrase in key_phrases: # Look for any real-time alerts which we want to act upon. if phrase in line: process_log(line)
def test_on_update_without_paranoid(self): updates = [0] def record_update(): updates[0] += 1 pygtail = Pygtail(self.logfile.name, on_update=record_update) self.assertEqual(updates[0], 0) for line in pygtail: self.assertEqual(updates[0], 0) self.assertEqual(updates[0], 1)
def read_eventrec(path): ''' read lsb.stream ''' while True: for line in Pygtail(path, offset_file="lsb.stream.pygtail", paranoid=True): log = lsf.eventRec() result = lsf.lsb_geteventrecbyline(line, log) if result != 0: break display(log) time.sleep(1)
def test_full_lines(self): """ Tests lines are logged only when they have a new line at the end. This is useful to ensure that log lines aren't unintentionally split up. """ pygtail = Pygtail(self.logfile.name, full_lines=True) new_lines = "4\n5," last_line = "5.5\n6\n" self.append(new_lines) pygtail.read() self.append(last_line) self.assertEqual(pygtail.read(), "5,5.5\n6\n")
def test_copytruncate_off_smaller(self): self.test_readlines() self.copytruncate() new_lines = "4\n5\n" self.append(new_lines) sys.stderr = captured = io.BytesIO() if PY2 else io.StringIO() pygtail = Pygtail(self.logfile.name, copytruncate=False) captured_value = captured.getvalue() sys.stderr = sys.__stderr__ self.assertRegexpMatches(captured_value, r".*?\bWARN\b.*?\bshrank\b.*") self.assertEqual(pygtail.read(), None)
def run(self): while True: try: for linha in Pygtail(self.conf['apache_log']): t = Thread(target=self.connections, args=(linha,)) t.start() # Prevent processing overflow except IOError: print('[-] Log not found: {}, waiting...'.format(self.conf['apache_log'])) sleep(5) except: pass finally: sleep(0.01)
def test_renamecreate(self): """ Tests "renamecreate" semantics where the currently processed file gets renamed and the original file gets recreated. This is the behavior of certain logfile rollers such as TimeBasedRollingPolicy in Java's Logback library. """ new_lines = ["4\n5\n", "6\n7\n"] pygtail = Pygtail(self.logfile.name) pygtail.read() os.rename(self.logfile.name, "%s.2018-03-10" % self.logfile.name) # append will recreate the original log file self.append(new_lines[0]) self.append(new_lines[1]) self.assertEqual(pygtail.read(), ''.join(new_lines))
def test_on_update_with_paranoid(self): updates = [0] def record_update(): updates[0] += 1 pygtail = Pygtail(self.logfile.name, paranoid=True, on_update=record_update) self.assertEqual(updates[0], 0) next(pygtail) self.assertEqual(updates[0], 1) next(pygtail) self.assertEqual(updates[0], 2) next(pygtail) self.assertEqual(updates[0], 3)