def parse(self, event_line, previous_lines, next_lines): if self.first_line: return self._parse_first_line( event_line, previous_lines, next_lines) if event_line["url_query"]["state"] == "playing": if (datetime_diff( event_line['datetime'], self.last['datetime']) > 600): # Too much of a time difference, making this a different event. self.debug_final = event_line return EVENT_DONE_REDO self.debug_info.append(event_line) self.last = event_line return EVENT_MORE elif event_line["url_query"]["state"] == "paused": self.last = event_line self.debug_info.append(event_line) return EVENT_MORE else: self.last = event_line self.debug_info.append(event_line) return EVENT_DONE
def parse_dump(self, last_datetime): """Clear out null events, returns done_events. Call this before you serialize this object. Events returned here are stable, and shouldn't change. """ done_events = self.done_events self.done_events = [] for event_key in list(self.event_parsers.keys()): event_parser = self.event_parsers[event_key] if event_parser.first_line: del self.event_parsers[event_key] elif (datetime_diff( last_datetime, event_parser.last['datetime']) > 600): event_parser.finish() done_events.append(event_parser.event) del self.event_parsers[event_key] self.debug_stream = None return done_events
def main(): logging.info('{0:#^40}'.format('[ Plex Log Saver ]')) if not os.path.isdir('logs'): os.mkdir('logs') config_file = os.path.join('logs', 'config.cfg') config = config_load(config_file) if config['plex_log_dir'] == '': logging.info('Config missing "plex_log_dir", Exiting!') print('Config missing "plex_log_dir", Exiting!') return log_file_template = os.path.join( 'logs', config['log_file_name']) if config['log_save_mode'] == 'gzip': import gzip log_open = gzip.open else: log_open = open last_datetime = tuple(map(int, config['plex_last_datetime'].split('-'))) log_parser = PlexSuperLogParser(last_datetime) all_lines = [] # We're only interested in 'Plex Media Server.log' log files # I've been able to so far get all of the info i need from those logs log_file_glob = os.path.join( config['plex_log_dir'], 'Plex Media Server.log*') for log_file in file_glob(log_file_glob): all_lines.extend(log_parser.parse_file(log_file)) if len(all_lines) == 0: logging.info('No new lines, finishing.') return # Sort the logs based on datetime all_lines.sort(key=lambda line_body: line_body['datetime']) time_diff = datetime_diff(all_lines[0]['datetime'], last_datetime) logging.info(( ' Last entry last run:' ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format( *last_datetime)) logging.info(( 'Earliest entry this run:' ' {0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}').format( *all_lines[0]['datetime'])) if time_diff > 60: logging.warn(( 'Possibly missing {0} seconds of log files').format(time_diff)) logging.info('{0} new log lines added'.format(len(all_lines))) ## TODO: replace this! No longer needed... # BasketOfHandles handles our open files for us, # keeping only 5 open at a time. with BasketOfHandles(log_open, 5) as basket: for line_body in all_lines: log_file_name = log_file_template.format(**line_body) file_handle = basket.open(log_file_name, 'at') json.dump(line_body, file_handle, sort_keys=True) file_handle.write('\n') if line_body['datetime'] > last_datetime: last_datetime = line_body['datetime'] config['plex_last_datetime'] = '-'.join(map(str, last_datetime)) config_save(config_file, config) logging.info('Finished.')
def get_duration(self): if self.end is None or self.start is None: return None return datetime_diff(self.end, self.start)