def _main(filepath, options): try: if options.lines > 0: with open(filepath, 'rb') as f: if options.head: if options.follow: sys.stderr.write('Cannot follow from top of file.\n') sys.exit(1) lines = head(f, options.lines) else: lines = tail(f, options.lines) encoding = locale.getpreferredencoding() for line in lines: print(line.decode(encoding)) if options.follow: for line in follow_path(filepath): if line is not None: print(line) else: time.sleep(options.sleep) except KeyboardInterrupt: # Escape silently pass
async def my_background_task(): await client.wait_until_ready() while not client.is_closed(): print("The bot is ready!") while 1: for line in tailhead.follow_path(log_loc): if line is not None: if ("ENCOUNTER_START" in line): if debug: print('\rfound encounter start: ' + line) elif ("ENCOUNTER_END" in line): if debug: print('\rfound encounter end: ' + line) if int(line[-1]) == 0: await client.get_channel(channel_id).send( "You wiped :(") else: await client.get_channel(channel_id).send( "You did the thing! :D") await client.get_channel(channel_id).send(str(command)) if debug: await client.get_channel(channel_id).send(line) else: if debug: sys.stdout.write('\r' + "waiting for log event " + next(spinner)) time.sleep(1)
def run(self): for line in tailhead.follow_path(self.path): if line is None: time.sleep(0.1) if self.should_exit: return continue self.update_metrics(line)
def run(self): for line in tailhead.follow_path(self.path): if line is None: time.sleep(0.1) if self.should_exit: return continue try: payload = json.loads(line)['Payload'] except json.decoder.JSONDecodeError: print("Line not parsable as JSON: {}".format(line)) continue self.update_metrics(payload)
def run(self): while True: line1 = tailhead.tail(open('/var/log/mycroft/audio.log', "rb"), 3) line1sant = str(line1[0].decode("utf-8")) emit('my_response', {'data': line1sant}) for line2 in tailhead.follow_path('/var/log/mycroft/audio.log'): if line2 is not None: emit('my_response', {'data': line2}) else: if self._should_kill: break time.sleep(1) if self._should_kill: break self.reset_kill()
def follow_tail(self): """ Read (tail and follow) the log file, parse entries and send messages to Sentry using Raven. """ try: follower = tailhead.follow_path(self.filepath) except (FileNotFoundError, PermissionError) as err: raise SystemExit("Error: Can't read logfile %s (%s)" % (self.filepath, err)) for line in follower: self.message = None self.params = None self.site = None if line is not None: self.parse(line) send_message(self.message, self.params, self.site, self.logger)
def logging_thread(file): container_id = file.split('/')[-2] container_obj = (next((item for item in running_containers_data if item['id'] == container_id))) container_name = container_obj['name'] for newlines in tailhead.follow_path(file): if newlines is not None: new_logs_batch = '' initial_logs_raw = [] log_line_list = newlines.split("{\"log") for i in log_line_list: results = line_formater("{\"log" + i, container_name) if results[0] == '': continue initial_logs_raw.append({"log": results[0], "ms": results[1]}) for i in initial_logs_raw: new_logs_batch += i['log'] print(new_logs_batch) container_obj['current_logs'] = content send_log_to_listener(new_logs_batch)
def follow_tail(self): """ Read (tail and follow) the log file, parse entries and send messages to Sentry using Raven. """ try: follower = tailhead.follow_path(self.filepath) except (FileNotFoundError, PermissionError) as err: raise SystemExit("Error: Can't read logfile %s (%s)" % (self.filepath, err)) for line in follower: self.clear_attributes() if line is not None: self.parse(line) send_message( self.message, self.level, self.data, ) else: time.sleep(1)
def run(self): """ This function is used for tailing the recording log and finding out when a new recording have taken place. """ logging.debug('ENTERING FUNCTION: run()') list_cameras() for line in tailhead.follow_path(self.unifi_record_log): if line is not None: if 'STOPPING' in line and 'motionRecording' in line: split_row = line.split() # Debug to output line captured from recording.log #logging.debug('Capture: %s', split_row) rec_time = split_row[2].split('.')[0] rec_camera_id, rec_camera_name = split_row[4][6:].strip( '[]').split('|') rec_duration = split_row[9] logging.info('---------- New recording ----------') logging.info('---------- Camera: %s ----------', rec_camera_name) rec_id = split_row[7].split(':')[1] logging.info( '---------- Camera ID: %s Time: %s Rec ID: %s %s----------', rec_camera_name, rec_time, rec_id, rec_duration) d1 = datetime.datetime.now().strftime("%H:%M:%S") d1 = str(d1) d2 = rec_time FMT = "%H:%M:%S" offby = datetime.datetime.strptime( d1, FMT) - datetime.datetime.strptime(d2, FMT) offby = offby.total_seconds() logging.info('---------- Recording time: %s ----------', rec_time) logging.info('---------- Process time: %s ----------', d1) logging.info( '---------- Detection behind by %s ----------', offby) rec_timestamp = rec_time.replace(":", "_") if (offby > 9000): continue # Download the recording. rec_file = self.download_recording(rec_id) if not rec_file: continue # Run detection on the recording. self.run_detection(rec_file) if self.get_detection_result(): self.copy_result_movie(rec_camera_name, rec_timestamp) self.copy_results_output(rec_camera_name, rec_timestamp) notification_image = self.get_notification_image( rec_camera_id, rec_id) self.send_discord_notification(notification_image, rec_camera_name, rec_timestamp) else: logging.info('Person NOT FOUND in recording.') # Destroy recording os.remove(rec_file) else: time.sleep(1)
def follow_file(file, logger): for line in tailhead.follow_path(file): if line: extract = re.search(regex, line) level = extract.group("level") if extract and extract.groupdict().get("level") else "info" getattr(logger, level.lower(), "info")(line)
except ImportError: log("Checkrune failed - keeping Rune") return True def makehash(xy): string2hash = xy hash_object = hashlib.md5(string2hash.encode()) hex_dig = hash_object.hexdigest() return hex_dig def write_file(msg): localtime = time.localtime(time.time()) timestring = time.strftime ('%d.%m.%Y - %H:%M:%S') hash = makehash(msg) keep_rune = rune_eval(msg) with open("runesell.txt", 'w') as f: f.write("%s %s\n"%(hash,keep_rune)) f.close() print msg for line in tailhead.follow_path(runfilepath): if line is not None: print(line) write_file(line) else: time.sleep(5)
import os import tailhead import time import re from subprocess import run keywords = ["emerg", "alert", "crit", "err", "info"] regexes = [] # # assert logfile or even use argparse to get it in # for key in keywords: expr = '\]\s{1,}' + key +'\:(.*)$' regexes.append(re.compile(expr)) for line in tailhead.follow_path('/var/log/syslog'): if line is not None: for expr in regexes: exists = re.match(expr, line) if exists is not None: print(exists) #run(['notify-send', text] else: time.sleep(1)
parser.add_argument("--api-url", type=str, required=True, help="URL of API to forward logs to") parser.add_argument("--proxy", type=str, required=False, help="HTTP proxies") parsed = parser.parse_args() file_path = parsed.export_file api_url = parsed.api_url proxy = parsed.proxy # ------------------------------------------------------------------------- # MAIN for line in tailhead.follow_path(file_path): if line is None: time.sleep(0.1) continue try: proxies_dict = {} if proxy: proxies_dict = { "http": proxy, "https": proxy, } resp = requests.post(api_url, data=line, headers={'Content-Type': 'application/json'},
#!/usr/bin/python3 # imports import tailhead import telegram_send # blocklist blocklist = open("blocklist.txt", "r").read().split() # Follow the file as it grows for line in tailhead.follow_path('logfile.txt'): # check against blocklist for unwantedUrl in blocklist: # if there exist a line with a entry on the blocklist, do 'telegram-send' if unwantedUrl in line: # disabling potential links to be clickable by putting the dot between brackets safeUrl = line.replace(".", "[.]") telegram_send.send(messages=[safeUrl], conf=None, disable_web_page_preview="true")