示例#1
0
def run(args):
    logger = setup_custom_logger('beaver', args)

    beaver_config = BeaverConfig(args, logger=logger)
    queue = multiprocessing.Queue(beaver_config.get('max_queue_size'))

    worker = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def cleanup(signalnum, frame):
        sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]

        logger.info('{0} detected'.format(sig_name))
        logger.info('Shutting down. Please wait...')

        if worker is not None:
            logger.info('Closing worker...')
            try:
                worker.close()
            except RuntimeError:
                pass

        try:
            queue.put_nowait(('exit', ()))
        except Queue.Full:
            pass

        if ssh_tunnel is not None:
            logger.info('Closing ssh tunnel...')
            ssh_tunnel.close()

        logger.info('Shutdown complete.')
        return sys.exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info('Starting queue consumer')
        proc.start()
        return proc

    while 1:
        try:
            if REOPEN_FILES:
                logger.debug('Detected non-linux platform. Files will be reopened for tailing')

            logger.info('Starting worker...')
            worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger)

            logger.info('Working...')
            worker.loop()

        except KeyboardInterrupt:
            pass
示例#2
0
def run(args):
    logger = setup_custom_logger('beaver', args)

    beaver_config = BeaverConfig(args, logger=logger)
    queue = multiprocessing.Queue(beaver_config.get('max_queue_size'))

    worker = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def cleanup(signalnum, frame):
        sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]

        logger.info('{0} detected'.format(sig_name))
        logger.info('Shutting down. Please wait...')

        if worker is not None:
            logger.info('Closing worker...')
            try:
                worker.close()
            except RuntimeError:
                pass

        try:
            queue.put_nowait(('exit', ()))
        except Queue.Full:
            pass

        if ssh_tunnel is not None:
            logger.info('Closing ssh tunnel...')
            ssh_tunnel.close()

        logger.info('Shutdown complete.')
        return os._exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info('Starting queue consumer')
        proc.start()
        return proc

    while 1:
        try:
            if REOPEN_FILES:
                logger.debug('Detected non-linux platform. Files will be reopened for tailing')

            logger.info('Starting worker...')
            worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger)

            logger.info('Working...')
            worker.loop()

        except KeyboardInterrupt:
            pass
示例#3
0
    def setUpClass(cls):
        cls.logger = logging.getLogger(__name__)

        empty_conf = tempfile.NamedTemporaryFile(delete=True)
        cls.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))
        cls.beaver_config.set('transport', 'kinesis')
        cls.beaver_config.set('logstash_version', 1)
示例#4
0
    def __init__(self, args):
        self.logger = setup_custom_logger('beaver', args)
        self.beaver_config = BeaverConfig(args, logger=self.logger)

        # so the config file can override the logger
        self.logger = setup_custom_logger('beaver', args, config=self.beaver_config)

        if self.beaver_config.get('logstash_version') not in [0, 1]:
            raise LookupError("Invalid logstash_version")

        self.queue = multiprocessing.Queue(self.beaver_config.get('max_queue_size'))

        self.manager_proc = None
        self.ssh_tunnel = create_ssh_tunnel(self.beaver_config)
        signal.signal(signal.SIGTERM, self.cleanup)
        signal.signal(signal.SIGINT, self.cleanup)
        if os.name != 'nt':
            signal.signal(signal.SIGQUIT, self.cleanup)
示例#5
0
    def setUpClass(cls):
        cls.logger = logging.getLogger(__name__)

        empty_conf = tempfile.NamedTemporaryFile(delete=True)
        cls.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))
        cls.beaver_config.set('transport', 'kinesis')
        cls.beaver_config.set('logstash_version', 1)

        output_file = Fixture.download_official_distribution()
        Fixture.extract_distribution(output_file)
    def setUpClass(cls):
        cls.logger = logging.getLogger(__name__)

        empty_conf = tempfile.NamedTemporaryFile(delete=True)
        cls.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))

        output_file = Fixture.download_official_distribution()
        Fixture.extract_distribution(output_file)
        cls.zk = ZookeeperFixture.instance()
        cls.server = KafkaFixture.instance(0, cls.zk.host, cls.zk.port)
class ConfigTests(unittest.TestCase):

    def setUp(self):
        self.config = lambda: None
        self.config.config = 'tests/square_bracket_sections.ini'
        self.config.mode = 'bind'
        self.beaver_config = BeaverConfig(self.config)

    def test_globs(self):
        files = [os.path.realpath(x) for x in glob.glob('tests/logs/0x[0-9]*.log')]
        for file in self.beaver_config.getfilepaths():
            self.assertTrue(file in files)
示例#8
0
class ConfigTests(unittest.TestCase):
    def setUp(self):
        self.config = lambda: None
        self.config.config = 'tests/square_bracket_sections.ini'
        self.config.mode = 'bind'
        self.beaver_config = BeaverConfig(self.config)

    def test_globs(self):
        files = [
            os.path.realpath(x) for x in glob.glob('tests/logs/0x[0-9]*.log')
        ]
        for file in self.beaver_config.getfilepaths():
            self.assertTrue(file in files)
示例#9
0
def run(args):
    logger = setup_custom_logger('beaver', args)

    file_config = FileConfig(args, logger=logger)
    beaver_config = BeaverConfig(args, file_config=file_config, logger=logger)
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    queue = multiprocessing.Queue(beaver_config.get('max_queue_size'))

    def create_queue_consumer():
        process_args = (queue, beaver_config, file_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info("Starting queue consumer")
        proc.start()
        return proc

    while 1:
        try:
            if REOPEN_FILES:
                logger.debug("Detected non-linux platform. Files will be reopened for tailing")

            logger.info("Starting worker...")
            worker = Worker(beaver_config, file_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger)

            logger.info("Working...")
            worker.loop()

        except KeyboardInterrupt:
            logger.info("Shutting down. Please wait.")
            worker.close()
            if ssh_tunnel is not None:
                logger.info("Closing ssh tunnel.")
                ssh_tunnel.close()

            logger.info("Shutdown complete.")
            sys.exit(0)
示例#10
0
class ZmqTests(unittest.TestCase):
    def setUp(self):
        self.beaver_config = BeaverConfig(mock.Mock(config=None))

    def test_pub(self):
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        transport = ZmqTransport(self.beaver_config)
        transport.interrupt()
        #assert not transport.zeromq_bind

    def test_bind(self):
        self.beaver_config.set('zeromq_bind', 'bind')
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        ZmqTransport(self.beaver_config)
示例#11
0
class ZmqTests(unittest.TestCase):

    def setUp(self):
        self.beaver_config = BeaverConfig(mock.Mock(config=None))

    def test_pub(self):
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        transport = ZmqTransport(self.beaver_config)
        transport.interrupt()
        #assert not transport.zeromq_bind

    def test_bind(self):
        self.beaver_config.set('zeromq_bind', 'bind')
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        ZmqTransport(self.beaver_config)
示例#12
0
class ZmqTests(unittest.TestCase):
    def setUp(self):
        empty_conf = tempfile.NamedTemporaryFile(delete=True)
        self.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))

    def test_pub(self):
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        transport = ZmqTransport(self.beaver_config)
        transport.interrupt()
        #assert not transport.zeromq_bind

    def test_bind(self):
        self.beaver_config.set('zeromq_bind', 'bind')
        self.beaver_config.set('zeromq_address', 'tcp://localhost:2120')
        ZmqTransport(self.beaver_config)
class ZmqTests(unittest.TestCase):

    def setUp(self):
        empty_conf = tempfile.NamedTemporaryFile(delete=True)
        self.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))

    def test_pub(self):
        self.beaver_config.set('zeromq_address', ['tcp://localhost:2120'])
        transport = ZmqTransport(self.beaver_config)
        transport.interrupt()
        #assert not transport.zeromq_bind

    def test_bind(self):
        self.beaver_config.set('zeromq_bind', 'bind')
        self.beaver_config.set('zeromq_address', ['tcp://localhost:2120'])
        ZmqTransport(self.beaver_config)
示例#14
0
 def setUp(self):
     self.beaver_config = BeaverConfig(mock.Mock(config=None))
 def setUp(self):
     empty_conf = tempfile.NamedTemporaryFile(delete=True)
     self.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))
示例#16
0
def run(args=None):

    logger = setup_custom_logger('beaver', args)
    beaver_config = BeaverConfig(args, logger=logger)
    # so the config file can override the logger
    logger = setup_custom_logger('beaver', args, config=beaver_config)

    if beaver_config.get('logstash_version') not in [0, 1]:
        raise LookupError("Invalid logstash_version")

    queue = multiprocessing.Queue(beaver_config.get('max_queue_size'))

    worker_proc = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def cleanup(signalnum, frame):
        if signalnum is not None:
            sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]
            logger.info('{0} detected'.format(sig_name))
            logger.info('Shutting down. Please wait...')
        else:
            logger.info('Worker process cleanup in progress...')

        try:
            queue.put_nowait(('exit', ()))
        except Queue.Full:
            pass

        if worker_proc is not None:
            try:
                worker_proc.terminate()
                worker_proc.join()
            except RuntimeError:
                pass

        if ssh_tunnel is not None:
            logger.info('Closing ssh tunnel...')
            ssh_tunnel.close()

        if signalnum is not None:
            logger.info('Shutdown complete.')
            return os._exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info('Starting queue consumer')
        proc.start()
        return proc

    def create_queue_producer():
        worker = Worker(beaver_config, queue_consumer_function=create_queue_consumer, callback=queue.put, logger=logger)
        worker.loop()

    while 1:

        try:
            if REOPEN_FILES:
                logger.debug('Detected non-linux platform. Files will be reopened for tailing')

            t = time.time()
            while True:
                if worker_proc is None or not worker_proc.is_alive():
                    logger.info('Starting worker...')
                    t = time.time()
                    worker_proc = multiprocessing.Process(target=create_queue_producer)
                    worker_proc.start()
                    logger.info('Working...')
                worker_proc.join(10)

                if beaver_config.get('refresh_worker_process'):
                    if beaver_config.get('refresh_worker_process') < time.time() - t:
                        logger.info('Worker has exceeded refresh limit. Terminating process...')
                        cleanup(None, None)

        except KeyboardInterrupt:
            pass
示例#17
0
def run(args=None):

    logger = setup_custom_logger('beaver', args)
    beaver_config = BeaverConfig(args, logger=logger)

    if beaver_config.get('logstash_version') not in [0, 1]:
        raise LookupError("Invalid logstash_version")

    queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size'))

    manager_proc = None
    termination_requested = multiprocessing.Event()
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def queue_put(*args):
        return queue.put(*args)

    def queue_put_nowait(*args):
        return queue.put_nowait(*args)

    def request_shutdown(signalnum, frame):
        termination_requested.set()
        if signalnum is not None:
            sig_name = tuple((v) for v, k in signal.__dict__.iteritems()
                             if k == signalnum)[0]
            logger.info("{0} detected".format(sig_name))
            logger.info("Shutting down. Please wait...")
        else:
            logger.info('Worker process cleanup in progress...')

    def cleanup():
        try:
            queue_put_nowait(("exit", ()))
        except Queue.Full:
            pass

        if manager_proc is not None:
            try:
                manager_proc.close()
                manager_proc.join()
            except RuntimeError:
                pass

        if ssh_tunnel is not None:
            logger.info("Closing ssh tunnel...")
            ssh_tunnel.close()

    signal.signal(signal.SIGTERM, request_shutdown)
    signal.signal(signal.SIGINT, request_shutdown)
    signal.signal(signal.SIGQUIT, request_shutdown)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info("Starting queue consumer")
        proc.start()
        return proc

    def create_queue_producer():
        return TailManager(beaver_config=beaver_config,
                           queue_consumer_function=create_queue_consumer,
                           callback=queue_put,
                           logger=logger)

    last_start = None
    while not termination_requested.is_set():

        try:

            if REOPEN_FILES:
                logger.debug(
                    "Detected non-linux platform. Files will be reopened for tailing"
                )

            if manager_proc is None or not manager_proc.is_alive():
                logger.info('Starting worker...')
                manager_proc = create_queue_producer()
                manager_proc.start()
                last_start = time.time()
                logger.info('Working...')

            if beaver_config.get(
                    'refresh_worker_process') and manager_proc.is_alive():
                if last_start and beaver_config.get(
                        'refresh_worker_process') < time.time() - last_start:
                    logger.info(
                        'Worker has exceeded refresh limit. Terminating process...'
                    )
                    cleanup()
            else:
                # Workaround for fact that multiprocessing.Event.wait() deadlocks on main thread
                # And blocks SIGINT signals from getting through.
                while not termination_requested.is_set():
                    time.sleep(0.5)

        except KeyboardInterrupt:
            pass
    cleanup()
示例#18
0
class TailRunner(object):
    def __init__(self, args):
        self.logger = setup_custom_logger('beaver', args)
        self.beaver_config = BeaverConfig(args, logger=self.logger)

        # so the config file can override the logger
        self.logger = setup_custom_logger('beaver', args, config=self.beaver_config)

        if self.beaver_config.get('logstash_version') not in [0, 1]:
            raise LookupError("Invalid logstash_version")

        self.queue = multiprocessing.Queue(self.beaver_config.get('max_queue_size'))

        self.manager_proc = None
        self.ssh_tunnel = create_ssh_tunnel(self.beaver_config)
        signal.signal(signal.SIGTERM, self.cleanup)
        signal.signal(signal.SIGINT, self.cleanup)
        if os.name != 'nt':
            signal.signal(signal.SIGQUIT, self.cleanup)

    def __getstate__(self):
        orig_dict = self.__dict__.copy()
        orig_dict['logger'] = self.logger.name
        return orig_dict

    def __setstate__(self, orig_dict):
        self.__dict__.update(orig_dict)
        self.logger = setup_custom_logger(orig_dict['logger'])

    def queue_put(*args):
        return self.queue.put(*args)

    def queue_put_nowait(*args):
        return self.queue.put_nowait(*args)

    def cleanup(signalnum, frame):
        if signalnum is not None:
            sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]
            self.logger.info("{0} detected".format(sig_name))
            self.logger.info("Shutting down. Please wait...")
        else:
            self.logger.info('Worker process cleanup in progress...')

        try:
            self.queue_put_nowait(("exit", ()))
        except Queue.Full:
            pass

        if self.manager_proc is not None:
            try:
                self.manager_proc.terminate()
                self.manager_proc.join()
            except RuntimeError:
                pass

        if self.ssh_tunnel is not None:
            self.logger.info("Closing ssh tunnel...")
            self.ssh_tunnel.close()

        if signalnum is not None:
            self.logger.info("Shutdown complete.")
            return os._exit(signalnum)


    def create_queue_consumer():
        process_args = (self.queue, self.beaver_config, self.logger.name)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        self.logger.info("Starting queue consumer")
        proc.start()
        return proc

    def create_queue_producer():
        manager = TailManager(
            beaver_config=self.beaver_config,
            queue_consumer_function=self.create_queue_consumer,
            callback=self.queue_put,
            logger=self.logger
        )
        manager.run()

    def run(self):
        while 1:
            try:
                if REOPEN_FILES:
                    self.logger.debug("Detected non-linux platform. Files will be reopened for tailing")

                t = time.time()
                while True:
                    if self.manager_proc is None or not self.manager_proc.is_alive():
                        self.logger.info('Starting worker...')
                        t = time.time()
                        self.manager_proc = multiprocessing.Process(target=self.create_queue_producer)
                        self.manager_proc.start()
                        self.logger.info('Working...')
                    self.manager_proc.join(10)

                    if self.beaver_config.get('refresh_worker_process'):
                        if self.beaver_config.get('refresh_worker_process') < time.time() - t:
                            self.logger.info('Worker has exceeded refresh limit. Terminating process...')
                            self.cleanup(None, None)
            except KeyboardInterrupt:
                pass
 def setUp(self):
     self.file_config = mock.Mock(spec=FileConfig)
     self.beaver_config = BeaverConfig(mock.Mock(config=None))
示例#20
0
文件: tail.py 项目: needle/beaver
def run(args=None):
    logger = setup_custom_logger('beaver', args)

    beaver_config = BeaverConfig(args, logger=logger)
    queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size'))

    manager = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def queue_put(*args):
        return queue.put(*args)

    def queue_put_nowait(*args):
        return queue.put_nowait(*args)

    def cleanup(signalnum, frame):
        sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]
        logger.info("{0} detected".format(sig_name))
        logger.info("Shutting down. Please wait...")

        if manager is not None:
            logger.info("Closing worker...")
            try:
                manager.close()
            except RuntimeError:
                pass

        try:
            queue_put_nowait(("exit", ()))
        except Queue.Full:
            pass

        if ssh_tunnel is not None:
            logger.info("Closing ssh tunnel...")
            ssh_tunnel.close()

        logger.info("Shutdown complete.")
        return sys.exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info("Starting queue consumer")
        proc.start()
        return proc

    if REOPEN_FILES:
        logger.debug("Detected non-linux platform. Files will be reopened for tailing")

    logger.info("Starting worker...")
    manager = TailManager(
        paths=["/var/log/system.log"],
        beaver_config=beaver_config,
        queue_consumer_function=create_queue_consumer,
        callback=queue_put,
        logger=logger
    )

    logger.info("Working...")
    manager.run()
示例#21
0
文件: tail.py 项目: Katafalkas/beaver
def run(args=None):
    logger = setup_custom_logger('beaver', args)

    beaver_config = BeaverConfig(args, logger=logger)
    queue = multiprocessing.JoinableQueue(beaver_config.get('max_queue_size'))

    manager = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def queue_put(*args):
        return queue.put(*args)

    def queue_put_nowait(*args):
        return queue.put_nowait(*args)

    def cleanup(signalnum, frame):
        sig_name = tuple((v) for v, k in signal.__dict__.iteritems() if k == signalnum)[0]
        logger.info("{0} detected".format(sig_name))
        logger.info("Shutting down. Please wait...")

        if manager is not None:
            logger.info("Closing worker...")
            try:
                manager.close()
            except RuntimeError:
                pass

        try:
            queue_put_nowait(("exit", ()))
        except Queue.Full:
            pass

        if ssh_tunnel is not None:
            logger.info("Closing ssh tunnel...")
            ssh_tunnel.close()

        logger.info("Shutdown complete.")
        return sys.exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info("Starting queue consumer")
        proc.start()
        return proc

    if REOPEN_FILES:
        logger.debug("Detected non-linux platform. Files will be reopened for tailing")

    logger.info("Starting worker...")
    manager = TailManager(
        beaver_config=beaver_config,
        queue_consumer_function=create_queue_consumer,
        callback=queue_put,
        logger=logger
    )

    logger.info("Working...")
    manager.run()
示例#22
0
 def setUp(self):
     self.config = lambda: None
     self.config.config = 'tests/square_bracket_sections.ini'
     self.config.mode = 'bind'
     self.beaver_config = BeaverConfig(self.config)
示例#23
0
 def setUp(self):
     self.beaver_config = BeaverConfig(mock.Mock(config=None))
示例#24
0
 def setUp(self):
     empty_conf = tempfile.NamedTemporaryFile(delete=True)
     self.beaver_config = BeaverConfig(mock.Mock(config=empty_conf.name))
示例#25
0
def run(args=None):
    logger = setup_custom_logger('beaver', args)

    beaver_config = BeaverConfig(args, logger=logger)
    if beaver_config.get('logstash_version') not in [0, 1]:
        raise LookupError("Invalid logstash_version")

    queue = multiprocessing.Queue(beaver_config.get('max_queue_size'))

    worker_proc = None
    ssh_tunnel = create_ssh_tunnel(beaver_config, logger=logger)

    def cleanup(signalnum, frame):
        if signalnum is not None:
            sig_name = tuple((v) for v, k in signal.__dict__.iteritems()
                             if k == signalnum)[0]
            logger.info('{0} detected'.format(sig_name))
            logger.info('Shutting down. Please wait...')
        else:
            logger.info('Worker process cleanup in progress...')

        try:
            queue.put_nowait(('exit', ()))
        except Queue.Full:
            pass

        if worker_proc is not None:
            try:
                worker_proc.terminate()
                worker_proc.join()
            except RuntimeError:
                pass

        if ssh_tunnel is not None:
            logger.info('Closing ssh tunnel...')
            ssh_tunnel.close()

        if signalnum is not None:
            logger.info('Shutdown complete.')
            return os._exit(signalnum)

    signal.signal(signal.SIGTERM, cleanup)
    signal.signal(signal.SIGINT, cleanup)
    signal.signal(signal.SIGQUIT, cleanup)

    def create_queue_consumer():
        process_args = (queue, beaver_config, logger)
        proc = multiprocessing.Process(target=run_queue, args=process_args)

        logger.info('Starting queue consumer')
        proc.start()
        return proc

    def create_queue_producer():
        worker = Worker(beaver_config,
                        queue_consumer_function=create_queue_consumer,
                        callback=queue.put,
                        logger=logger)
        worker.loop()

    while 1:

        try:
            if REOPEN_FILES:
                logger.debug(
                    'Detected non-linux platform. Files will be reopened for tailing'
                )

            t = time.time()
            while True:
                if worker_proc is None or not worker_proc.is_alive():
                    logger.info('Starting worker...')
                    t = time.time()
                    worker_proc = multiprocessing.Process(
                        target=create_queue_producer)
                    worker_proc.start()
                    logger.info('Working...')
                worker_proc.join(10)

                if beaver_config.get('refresh_worker_process'):
                    if beaver_config.get(
                            'refresh_worker_process') < time.time() - t:
                        logger.info(
                            'Worker has exceeded refresh limit. Terminating process...'
                        )
                        cleanup(None, None)

        except KeyboardInterrupt:
            pass
示例#26
0
 def _get_config(self, **kwargs):
     return BeaverConfig(mock.Mock(config=None, **kwargs))
示例#27
0
 def _get_config(self, **kwargs):
     empty_conf = tempfile.NamedTemporaryFile(delete=True)
     return BeaverConfig(mock.Mock(config=empty_conf.name, **kwargs))
 def setUp(self):
     self.config = lambda: None
     self.config.config = 'tests/square_bracket_sections.ini'
     self.config.mode = 'bind'
     self.beaver_config = BeaverConfig(self.config)