def test_prefix(self): prefix_1 = PrefixTransform('p1') prefix_2 = PrefixTransform('p2') f1_name = self.tmpdirname + '/f1' f2_name = self.tmpdirname + '/f2' writer = ComposedWriter( transforms=[prefix_1, prefix_2], writers=[TextFileWriter(f1_name), TextFileWriter(f2_name)]) f1 = open(f1_name, 'r') f2 = open(f2_name, 'r') for line in SAMPLE_DATA: writer.write(line) time.sleep(0.1) f1_line = f1.readline().rstrip() f2_line = f2.readline().rstrip() logging.info('wrote: "%s", f1: "%s", f2: "%s"', line, f1_line, f2_line) self.assertEqual('p2 p1 ' + line, f1_line) self.assertEqual('p2 p1 ' + line, f2_line)
def __init__(self, readers=[], transforms=[], writers=[], stderr_writers=[], host_id='', interval=0, name=None, check_format=False): """listener = Listener(readers, transforms=[], writers=[], interval=0, check_format=False) readers A single Reader or a list of Readers. transforms A single Transform or a list of zero or more Transforms writers A single Writer or a list of zero or more Writers stderr_writers A single Writer or a list of zero or more Writers to which the logger's stderr should be written. interval How long to sleep before reading sequential records name Optional human-readable short name to be used in displays check_format If True, attempt to check that Reader/Transform/Writer formats are compatible, and throw a ValueError if they are not. If check_format is False (the default) the output_format() of the whole reader will be formats.Unknown. Sample use: listener = Listener(readers=[NetworkReader(':6221'), NetworkReader(':6223')], transforms=[TimestampTransform()], writers=[TextFileWriter('/logs/network_recs'), TextFileWriter(None)], interval=0.2) listener.run() Calling listener.quit() from another thread will cause the run() loop to exit. """ logging.info('Instantiating %s logger', name or 'unnamed') ########### # Create readers, writers, etc. self.reader = ComposedReader(readers=readers, check_format=check_format) self.writer = ComposedWriter(transforms=transforms, writers=writers, check_format=check_format) self.interval = interval self.name = name or 'Unnamed listener' self.last_read = 0 self.quit_signalled = False
def __init__(self, readers, transforms=[], writers=[], host_id='', interval=0, name=None, check_format=False): """ listener = Listener(readers, transforms=[], writers=[], interval=0, check_format=False) readers A single Reader or a list of Readers. transforms A single Transform or a list of zero or more Transforms writers A single Writer or a list of zero or more Writers host_id Optional host_id on which Listener is to be run. Ignored here, but it may show up as part of a config, so we need to be able to handle is in kwargs. interval How long to sleep before reading sequential records name Optional human-readable short name to be used in displays check_format If True, attempt to check that Reader/Transform/Writer formats are compatible, and throw a ValueError if they are not. If check_format is False (the default) the output_format() of the whole reader will be formats.Unknown. Sample use: listener = Listener(readers=[NetworkReader(':6221'), NetworkReader(':6223')], transforms=[TimestampTransform()], writers=[TextFileWriter('/logs/network_recs'), TextFileWriter(None)], interval=0.2) listener.run() Calling listener.quit() from another thread will cause the run() loop to exit. """ self.reader = ComposedReader(readers=readers, check_format=check_format) self.writer = ComposedWriter(transforms=transforms, writers=writers, check_format=check_format) self.interval = interval self.name = name or 'Unnamed listener' self.last_read = 0 self.quit_signalled = False
def test_check_format(self): f1_name = self.tmpdirname + '/f1' f2_name = self.tmpdirname + '/f2' # This should be okay ComposedWriter( transforms=[PrefixTransform('prefix')], writers=[TextFileWriter(f1_name), TextFileWriter(f2_name)], check_format=True) # Should raise an error if formats are not compatible with self.assertLogs(logging.getLogger(), logging.ERROR): with self.assertRaises(ValueError): ComposedWriter( transforms=[ParseNMEATransform()], writers=[TextFileWriter(f1_name), TextFileWriter(f2_name)], check_format=True)
def run_logger(logger, config, stderr_filename=None, stderr_data_server=None, log_level=logging.INFO): """Run a logger, sending its stderr to a cached data server if so indicated logger - Name of logger config - Config dict stderr_filename - If not None, send stderr to this file. stderr_data_server - If not None, host:port of cached data server to send stderr messages to. log_level - Level at which logger should be logging (e.g logging.WARNING, logging.INFO, etc. """ # Reset logging to its freshly-imported state reload(logging) stderr_handler = RotatingFileHandler(stderr_filename, maxBytes=STDERR_MAX_BYTES, backupCount=STDERR_BACKUP_COUNT) logging.basicConfig(handlers=[stderr_handler], level=log_level, format=DEFAULT_LOGGING_FORMAT) if stderr_data_server: field_name = 'stderr:logger:' + logger cds_writer = ComposedWriter( transforms=ToDASRecordTransform(data_id='stderr', field_name=field_name), writers=CachedDataWriter(data_server=stderr_data_server)) logging.getLogger().addHandler(StdErrLoggingHandler(cds_writer)) # Set the name of the process for ps config_name = config.get('name', 'no_name') setproctitle('openrvdas/server/logger_runner.py:' + config_name) logging.info(f'Starting logger {logger} config {config_name}') try: if config_is_runnable(config): listener = ListenerFromLoggerConfig(config=config) try: listener.run() except KeyboardInterrupt: logging.warning(f'Received quit for {config_name}') except Exception as e: logging.fatal(e) # Allow a moment for stderr_writers to finish up time.sleep(0.25)
def test_all_files(self): f1_name = self.tmpdirname + '/f1' f2_name = self.tmpdirname + '/f2' writer = ComposedWriter( transforms=[], writers=[TextFileWriter(f1_name), TextFileWriter(f2_name)]) f1 = open(f1_name, 'r') f2 = open(f2_name, 'r') for line in SAMPLE_DATA: writer.write(line) time.sleep(0.1) f1_line = f1.readline().rstrip() f2_line = f2.readline().rstrip() logging.info('wrote: "%s", f1: "%s", f2: "%s"', line, f1_line, f2_line) self.assertEqual(line, f1_line) self.assertEqual(line, f2_line)
if args.start_data_server: data_server_proc = multiprocessing.Process( name='openrvdas_data_server', target=run_data_server, args=(args.data_server_websocket, args.data_server_back_seconds, args.data_server_cleanup_interval, args.data_server_interval), daemon=True) data_server_proc.start() ############################ # If we do have a data server, add a handler that will echo all # logger_manager stderr output to it if args.data_server_websocket: stderr_writer = ComposedWriter( transforms=ToDASRecordTransform(field_name='stderr:logger_manager'), writers=[CachedDataWriter(data_server=args.data_server_websocket)]) logging.getLogger().addHandler(StdErrLoggingHandler(stderr_writer, parse_to_json=True)) ############################ # Instantiate API - a Are we using an in-memory store or Django # database as our backing store? Do our imports conditionally, so # they don't actually have to have Django if they're not using it. if args.database == 'django': from django_gui.django_server_api import DjangoServerAPI api = DjangoServerAPI() elif args.database == 'memory': from server.in_memory_server_api import InMemoryServerAPI api = InMemoryServerAPI() elif args.database == 'hapi':
class Listener: """Listener is a simple, yet relatively self-contained class that takes a list of one or more Readers, a list of zero or more Transforms, and a list of zero or more Writers. It calls the Readers (in parallel) to acquire records, passes those records through the Transforms (in series), and sends the resulting records to the Writers (in parallel). """ ############################ def __init__(self, readers=[], transforms=[], writers=[], stderr_writers=[], host_id='', interval=0, name=None, check_format=False): """listener = Listener(readers, transforms=[], writers=[], interval=0, check_format=False) readers A single Reader or a list of Readers. transforms A single Transform or a list of zero or more Transforms writers A single Writer or a list of zero or more Writers stderr_writers A single Writer or a list of zero or more Writers to which the logger's stderr should be written. interval How long to sleep before reading sequential records name Optional human-readable short name to be used in displays check_format If True, attempt to check that Reader/Transform/Writer formats are compatible, and throw a ValueError if they are not. If check_format is False (the default) the output_format() of the whole reader will be formats.Unknown. Sample use: listener = Listener(readers=[NetworkReader(':6221'), NetworkReader(':6223')], transforms=[TimestampTransform()], writers=[TextFileWriter('/logs/network_recs'), TextFileWriter(None)], interval=0.2) listener.run() Calling listener.quit() from another thread will cause the run() loop to exit. """ logging.info('Instantiating %s logger', name or 'unnamed') ########### # Create readers, writers, etc. self.reader = ComposedReader(readers=readers, check_format=check_format) self.writer = ComposedWriter(transforms=transforms, writers=writers, check_format=check_format) self.interval = interval self.name = name or 'Unnamed listener' self.last_read = 0 self.quit_signalled = False ############################ def quit(self): """ Signal 'quit' to all the readers. """ self.quit_signalled = True logging.info('Shutting down %s', self.name) ############################ def run(self): """ Read/transform/write until either quit() is called in a separate thread, or ComposedReader returns None, indicating that all its component readers have returned EOF. """ logging.info('Running %s', self.name) if not self.reader and not self.writer: logging.info('No readers or writers defined - exiting.') return record = '' try: while not self.quit_signalled and record is not None: record = self.reader.read() self.last_read = time.time() logging.debug('ComposedReader read: "%s"', record) if record: self.writer.write(record) if self.interval: time_to_sleep = self.interval - (time.time() - self.last_read) time.sleep(max(time_to_sleep, 0)) # Exit in an orderly fashion if someone hits Ctl-C except KeyboardInterrupt: logging.info('Listener %s received KeyboardInterrupt - exiting.', self.name or '') except Exception as e: logging.info('Listener %s received exception: %s', self.name, traceback.format_exc()) raise e