class WorkerManagement(object): """Alamo worker management object.""" parser = None config = None def __init__(self): self.parser = self.build_args() self.manager = PluginManager() @staticmethod def build_args(): """Build management arguments.""" parser = argparse.ArgumentParser() parser.add_argument( '--config', '-c', type=str, required=False, help=( 'Provide config file for Alamo worker. ' 'If not provided default config file will be taken.' ) ) return parser def parse_config(self): """Parse config file.""" args = self.parser.parse_args() config_file = args.config or os.path.join( os.path.dirname(__file__), 'config.cfg' ) self.config = ConfigParser() self.config.read(config_file) def execute(self): self.parse_config() try: plugins = self.config.get('default', 'plugins').split(',') except NoOptionError: plugins = [] # list could have "empty" string ... plugins = [plugin for plugin in plugins if plugin] assert plugins, 'At least one plugin should be defined in config file.' self.manager.load(self.config, plugins) zero_mq_host = self.config.get('zero_mq', 'remote_host') zero_mq_port = self.config.get('zero_mq', 'remote_port') logger.info(zero_mq_host) logger.info(zero_mq_port)
class PluginManagerTest(TestCase): def setUp(self): self.test_manager = PluginManager() self.config = MagicMock() self.config.items = MagicMock(return_value=[('a', 'a')]) def test_singletons(self): b = PluginManager() self.assertEqual(id(self.test_manager), id(b)) def test_plugin_instantiation(self): # test plugin already registered self.test_manager._instantiate_plugins(self.config) self.assertTrue( isinstance(self.test_manager._plugins['test'], TestPlugin)) self.assertFalse( isinstance(self.test_manager._classes['test'], TestPlugin)) @patch('alamo_worker.plugins.logger.warn') def test_re_register(self, logger_mock): self.test_manager.register(TestPlugin) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.logger.error') def test_plugin_subclass(self, logger_mock): class Fake(object): pass self.test_manager.register(Fake) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.tests.test_plugin_manager.TestPlugin.execute') def test_dispatch(self, execute): payload = { 'type': 'test', 'foo': 'foo', 'bar': 'bar' } self.test_manager._instantiate_plugins(self.config) self.test_manager.dispatch(payload) self.assertTrue(execute.called)
def __init__(self, loop=None): self.loop = loop or ZMQEventLoop() self.scheduler_pool = [] self.manager = PluginManager() settings.reconfigure( AttributeDict(**{'ENVIRONMENTS': self._get_environments()}) ) self._load_plugins() self._connect_to_queue() self.open()
def setUp(self): self.loop = asyncio.get_event_loop() self.test_manager = PluginManager() self.statsd_config = [ ('statsd_host', 'localhost'), ('statsd_port', 8051), ('statsd_prefix', 'stats'), ('statsd_maxudpsize', 512) ]
class PluginManagerTest(TestCase): def setUp(self): self.test_manager = PluginManager() self.config = MagicMock() self.config.items = MagicMock(return_value=[('a', 'a')]) def test_singletons(self): b = PluginManager() self.assertEqual(id(self.test_manager), id(b)) def test_plugin_instantiation(self): # test plugin already registered self.test_manager._instantiate_plugins(self.config) self.assertTrue( isinstance(self.test_manager._plugins['test'], TestPlugin)) self.assertFalse( isinstance(self.test_manager._classes['test'], TestPlugin)) @patch('alamo_worker.plugins.logger.warn') def test_re_register(self, logger_mock): self.test_manager.register(TestPlugin) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.logger.error') def test_plugin_subclass(self, logger_mock): class Fake(object): pass self.test_manager.register(Fake) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.tests.test_plugin_manager.TestPlugin.execute') def test_dispatch(self, execute): payload = {'type': 'test', 'foo': 'foo', 'bar': 'bar'} self.test_manager._instantiate_plugins(self.config) self.test_manager.dispatch(payload) self.assertTrue(execute.called)
def setUp(self): self.test_manager = PluginManager() self.config = MagicMock() self.config.items = MagicMock(return_value=[('a', 'a')])
def __init__(self): self.parser = self.build_args() self.manager = PluginManager()
class PluginManagerTest(TestCase): def setUp(self): self.loop = asyncio.get_event_loop() self.test_manager = PluginManager() self.statsd_config = [ ('statsd_host', 'localhost'), ('statsd_port', 8051), ('statsd_prefix', 'stats'), ('statsd_maxudpsize', 512) ] def tearDown(self): self.loop.close() def test_singletons(self): b = PluginManager() self.assertEqual(id(self.test_manager), id(b)) def test_plugin_instantiation_with_types(self): self.test_manager._instantiate_plugins(['test']) self.assertTrue( isinstance(self.test_manager._plugins['test'], TestPlugin)) self.assertFalse( isinstance(self.test_manager._classes['test'], TestPlugin)) def test_plugin_instantiation(self): # test plugin already registered self.test_manager._instantiate_plugins(['test']) self.assertTrue( isinstance(self.test_manager._plugins['test'], TestPlugin)) self.assertFalse( isinstance(self.test_manager._classes['test'], TestPlugin)) @patch('alamo_worker.plugins.logger.warn') def test_re_register(self, logger_mock): self.test_manager.register(TestPlugin) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.logger.error') def test_plugin_subclass(self, logger_mock): class Fake(object): pass self.test_manager.register(Fake) self.assertTrue(logger_mock.called) @patch('alamo_worker.plugins.tests.test_plugin_manager.TestPlugin.execute') def test_dispatch(self, execute): payload = { 'type': 'test', 'foo': 'foo', 'bar': 'bar', 'triggers': [] } self.test_manager._instantiate_plugins(['test']) self.loop.run_until_complete( asyncio.gather(*[self.test_manager.dispatch(self.loop, payload)]) ) self.assertTrue(execute.called)
class WorkerManagement(object): """Alamo worker management object.""" # used to tell coroutines when to exit soft_exit = False open_monitor_socket = False # scheduler connections scheduler_pool = None def __init__(self, loop=None): self.loop = loop or ZMQEventLoop() self.scheduler_pool = [] self.manager = PluginManager() settings.reconfigure( AttributeDict(**{'ENVIRONMENTS': self._get_environments()}) ) self._load_plugins() self._connect_to_queue() self.open() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def open(self): """Open sockets and register them in poller object.""" for address in settings.SCHEDULERS: self.scheduler_pool.append(SchedulerConnection(address)) def close(self): """Unregister open sockets from poller objects and close them.""" logger.info('Closing open sockets ...') for scheduler in self.scheduler_pool: scheduler.close() logger.info('Worker was closed successfully ...') def send(self, result): """Process check with received result to alerter.""" self.queue.enqueue('alamo_alerter.job.analyze', args=(result,), result_ttl=settings.WORKER_RESULT_TTL) def _get_environments(self): data = {} try: session = Session() session.verify = False response = session.get(settings.ENVIRONMENT_URL) response.raise_for_status() data = response.json() except ConnectionError as e: logger.critical('{}'.format(e)) except (ValueError, HTTPError) as e: logger.error('{}'.format(e)) return data def _connect_to_queue(self): """Establish connection to ALAMO-Scheduler ZeroMQ.""" # should be taken from service discovery... nodes = settings.SENTINEL_NODES sentinels = [(x, settings.SENTINEL_PORT) for x in nodes] sentinel = Sentinel( sentinels, socket_timeout=settings.SENTINEL_SOCKET_TIMEOUT, db=settings.REDIS_DB ) conn = sentinel.master_for(settings.SENTINEL_CLUSTER_NAME) self.queue = Queue( settings.ALERTER_QUEUE, connection=conn ) def _load_plugins(self): """Loads plugins.""" plugins = settings.PLUGINS # list could have "empty" string ... plugins = [plugin for plugin in plugins if plugin] assert plugins, 'At least one plugin should be defined.' self.manager.load(plugins) @asyncio.coroutine def wait_and_kill(self, sig): logger.warning('Got `%s` signal. Preparing worker to exit ...', sig) self.soft_exit = True def register_exit_signals(self): for sig in ['SIGQUIT', 'SIGINT', 'SIGTERM']: logger.info('Registering handler for `%s` signal ' 'in current event loop ...', sig) self.loop.add_signal_handler( getattr(signal, sig), asyncio.async, self.wait_and_kill(sig) ) def generate_futures(self): for scheduler in self.scheduler_pool: yield self.receiver(scheduler) if settings.SCHEDULER_MONITOR: yield self.monitor(scheduler) def execute(self): self.register_exit_signals() futures = [future for future in self.generate_futures()] self.loop.run_until_complete(asyncio.gather(*futures)) self.loop.close() @asyncio.coroutine def receiver(self, scheduler): logger.info('Starting check receiver coroutine for %s ...', scheduler) while True: if self.soft_exit: logger.info( 'Soft exit registered, closing check receiver ' 'connection to %s ...', repr(scheduler) ) break check = yield from scheduler.receive() if check is not None: result = yield from self.manager.dispatch(self.loop, check) if result: yield from self.loop.run_in_executor( None, self.send, result ) else: logger.error( 'Check with id %s was not dispatched properly!', check.get('id', '<unknown>') ) @asyncio.coroutine def monitor(self, scheduler): if not settings.SCHEDULER_MONITOR: return logger.info('Starting monitor coroutine for %s ...', scheduler) while True: if self.soft_exit: logger.info( 'Soft exit registered, ' 'closing monitor connection to %s ...', repr(scheduler)) break event = yield from scheduler.receive_event() if event is not None: if event['event'] == EVENT_MONITOR_STOPPED: break elif event['event'] == EVENT_DISCONNECTED: logger.warning( 'Retrieve disconnect event from %s.', repr(scheduler) ) else: logger.info('Retrieve unknown event %s', event['event'])
def test_singletons(self): b = PluginManager() self.assertEqual(id(self.test_manager), id(b))