def run(args): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) queue = multiprocessing.Queue(beaver_config.get('max_queue_size')) worker = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def cleanup(signalnum, frame): sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info('{0} detected'.format(sig_name)) logger.info('Shutting down. Please wait...') if worker is not None: logger.info('Closing worker...') try: worker.close() except RuntimeError: pass try: queue.put_nowait(('exit', ())) except Queue.Full: pass if ssh_tunnel is not None: logger.info('Closing ssh tunnel...') ssh_tunnel.close() logger.info('Shutdown complete.') return os._exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info('Starting queue consumer') proc.start() return proc while 1: try: if REOPEN_FILES: logger.debug('Detected non-linux platform. Files will be reopened for tailing') logger.info('Starting worker...') worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger) logger.info('Working...') worker.loop() except KeyboardInterrupt: pass
def run(args): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) queue = multiprocessing.Queue(beaver_config.get('max_queue_size')) worker = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def cleanup(signalnum, frame): sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info('{0} detected'.format(sig_name)) logger.info('Shutting down. Please wait...') if worker is not None: logger.info('Closing worker...') try: worker.close() except RuntimeError: pass try: queue.put_nowait(('exit', ())) except Queue.Full: pass if ssh_tunnel is not None: logger.info('Closing ssh tunnel...') ssh_tunnel.close() logger.info('Shutdown complete.') return sys.exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info('Starting queue consumer') proc.start() return proc while 1: try: if REOPEN_FILES: logger.debug('Detected non-linux platform. Files will be reopened for tailing') logger.info('Starting worker...') worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger) logger.info('Working...') worker.loop() except KeyboardInterrupt: pass
def __init__(self, args): self.logger = setup_custom_logger('beaver', args) self.beaver_config = BeaverConfig(args, logger=self.logger) # so the config file can override the logger self.logger = setup_custom_logger('beaver', args, config=self.beaver_config) if self.beaver_config.get('logstash_version') not in [0, 1]: raise LookupError("Invalid logstash_version") self.queue = multiprocessing.Queue(self.beaver_config.get('max_queue_size')) self.manager_proc = None self.ssh_tunnel = create_ssh_tunnel(self.beaver_config) signal.signal(signal.SIGTERM, self.cleanup) signal.signal(signal.SIGINT, self.cleanup) if os.name != 'nt': signal.signal(signal.SIGQUIT, self.cleanup)
def run(args): logger = setup_custom_logger('beaver', args) file_config = FileConfig(args, logger=logger) beaver_config = BeaverConfig(args, file_config=file_config, logger=logger) ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) queue = multiprocessing.Queue(beaver_config.get('max_queue_size')) def create_queue_consumer(): process_args = (queue, beaver_config, file_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info("Starting queue consumer") proc.start() return proc while 1: try: if REOPEN_FILES: logger.debug("Detected non-linux platform. Files will be reopened for tailing") logger.info("Starting worker...") worker = Worker(beaver_config, file_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger) logger.info("Working...") worker.loop() except KeyboardInterrupt: logger.info("Shutting down. Please wait.") worker.close() if ssh_tunnel is not None: logger.info("Closing ssh tunnel.") ssh_tunnel.close() logger.info("Shutdown complete.") sys.exit(0)
def run(args=None): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) if beaver_config.get('logstash_version') not in [0, 1]: raise LookupError("Invalid logstash_version") queue = multiprocessing.Queue(beaver_config.get('max_queue_size')) worker_proc = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def cleanup(signalnum, frame): if signalnum is not None: sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info('{0} detected'.format(sig_name)) logger.info('Shutting down. Please wait...') else: logger.info('Worker process cleanup in progress...') try: queue.put_nowait(('exit', ())) except Queue.Full: pass if worker_proc is not None: try: worker_proc.terminate() worker_proc.join() except RuntimeError: pass if ssh_tunnel is not None: logger.info('Closing ssh tunnel...') ssh_tunnel.close() if signalnum is not None: logger.info('Shutdown complete.') return os._exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info('Starting queue consumer') proc.start() return proc def create_queue_producer(): worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger) worker.loop() while 1: try: if REOPEN_FILES: logger.debug( 'Detected non-linux platform. Files will be reopened for tailing' ) t = time.time() while True: if worker_proc is None or not worker_proc.is_alive(): logger.info('Starting worker...') t = time.time() worker_proc = multiprocessing.Process( target=create_queue_producer) worker_proc.start() logger.info('Working...') worker_proc.join(10) if beaver_config.get('refresh_worker_process'): if beaver_config.get( 'refresh_worker_process') < time.time() - t: logger.info( 'Worker has exceeded refresh limit. Terminating process...' ) cleanup(None, None) except KeyboardInterrupt: pass
def run(args=None): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) # so the config file can override the logger logger = setup_custom_logger('beaver', args, config=beaver_config) if beaver_config.get('logstash_version') not in [0, 1]: raise LookupError("Invalid logstash_version") queue = multiprocessing.Queue(beaver_config.get('max_queue_size')) worker_proc = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def cleanup(signalnum, frame): if signalnum is not None: sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info('{0} detected'.format(sig_name)) logger.info('Shutting down. Please wait...') else: logger.info('Worker process cleanup in progress...') try: queue.put_nowait(('exit', ())) except Queue.Full: pass if worker_proc is not None: try: worker_proc.terminate() worker_proc.join() except RuntimeError: pass if ssh_tunnel is not None: logger.info('Closing ssh tunnel...') ssh_tunnel.close() if signalnum is not None: logger.info('Shutdown complete.') return os._exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info('Starting queue consumer') proc.start() return proc def create_queue_producer(): worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger) worker.loop() while 1: try: if REOPEN_FILES: logger.debug('Detected non-linux platform. Files will be reopened for tailing') t = time.time() while True: if worker_proc is None or not worker_proc.is_alive(): logger.info('Starting worker...') t = time.time() worker_proc = multiprocessing.Process(target=create_queue_producer) worker_proc.start() logger.info('Working...') worker_proc.join(10) if beaver_config.get('refresh_worker_process'): if beaver_config.get('refresh_worker_process') < time.time() - t: logger.info('Worker has exceeded refresh limit. Terminating process...') cleanup(None, None) except KeyboardInterrupt: pass
def run(args=None): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size')) manager = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def queue_put(*args): return queue.put(*args) def queue_put_nowait(*args): return queue.put_nowait(*args) def cleanup(signalnum, frame): sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info("{0} detected".format(sig_name)) logger.info("Shutting down. Please wait...") if manager is not None: logger.info("Closing worker...") try: manager.close() except RuntimeError: pass try: queue_put_nowait(("exit", ())) except Queue.Full: pass if ssh_tunnel is not None: logger.info("Closing ssh tunnel...") ssh_tunnel.close() logger.info("Shutdown complete.") return sys.exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info("Starting queue consumer") proc.start() return proc if REOPEN_FILES: logger.debug("Detected non-linux platform. Files will be reopened for tailing") logger.info("Starting worker...") manager = TailManager( beaver_config=beaver_config, queue_consumer_function=create_queue_consumer, callback=queue_put, logger=logger ) logger.info("Working...") manager.run()
def run(args=None): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size')) manager = None ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def queue_put(*args): return queue.put(*args) def queue_put_nowait(*args): return queue.put_nowait(*args) def cleanup(signalnum, frame): sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info("{0} detected".format(sig_name)) logger.info("Shutting down. Please wait...") if manager is not None: logger.info("Closing worker...") try: manager.close() except RuntimeError: pass try: queue_put_nowait(("exit", ())) except Queue.Full: pass if ssh_tunnel is not None: logger.info("Closing ssh tunnel...") ssh_tunnel.close() logger.info("Shutdown complete.") return sys.exit(signalnum) signal.signal(signal.SIGTERM, cleanup) signal.signal(signal.SIGINT, cleanup) signal.signal(signal.SIGQUIT, cleanup) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info("Starting queue consumer") proc.start() return proc if REOPEN_FILES: logger.debug("Detected non-linux platform. Files will be reopened for tailing") logger.info("Starting worker...") manager = TailManager( paths=["/var/log/system.log"], beaver_config=beaver_config, queue_consumer_function=create_queue_consumer, callback=queue_put, logger=logger ) logger.info("Working...") manager.run()
def run(args=None): logger = setup_custom_logger('beaver', args) beaver_config = BeaverConfig(args, logger=logger) if beaver_config.get('logstash_version') not in [0, 1]: raise LookupError("Invalid logstash_version") queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size')) manager_proc = None termination_requested = multiprocessing.Event() ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger) def queue_put(*args): return queue.put(*args) def queue_put_nowait(*args): return queue.put_nowait(*args) def request_shutdown(signalnum, frame): termination_requested.set() if signalnum is not None: sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0] logger.info("{0} detected".format(sig_name)) logger.info("Shutting down. Please wait...") else: logger.info('Worker process cleanup in progress...') def cleanup(): try: queue_put_nowait(("exit", ())) except Queue.Full: pass if manager_proc is not None: try: manager_proc.close() manager_proc.join() except RuntimeError: pass if ssh_tunnel is not None: logger.info("Closing ssh tunnel...") ssh_tunnel.close() signal.signal(signal.SIGTERM, request_shutdown) signal.signal(signal.SIGINT, request_shutdown) signal.signal(signal.SIGQUIT, request_shutdown) def create_queue_consumer(): process_args = (queue, beaver_config, logger) proc = multiprocessing.Process(target=run_queue, args=process_args) logger.info("Starting queue consumer") proc.start() return proc def create_queue_producer(): return TailManager(beaver_config=beaver_config, queue_consumer_function=create_queue_consumer, callback=queue_put, logger=logger) last_start = None while not termination_requested.is_set(): try: if REOPEN_FILES: logger.debug( "Detected non-linux platform. Files will be reopened for tailing" ) if manager_proc is None or not manager_proc.is_alive(): logger.info('Starting worker...') manager_proc = create_queue_producer() manager_proc.start() last_start = time.time() logger.info('Working...') if beaver_config.get( 'refresh_worker_process') and manager_proc.is_alive(): if last_start and beaver_config.get( 'refresh_worker_process') < time.time() - last_start: logger.info( 'Worker has exceeded refresh limit. Terminating process...' ) cleanup() else: # Workaround for fact that multiprocessing.Event.wait() deadlocks on main thread # And blocks SIGINT signals from getting through. while not termination_requested.is_set(): time.sleep(0.5) except KeyboardInterrupt: pass cleanup()