def __init__(self, config, db=None, stackdistiller=None, trigger_defs=None, time_sync=None): config = ConfigManager.wrap(config, self.config_description()) self.config = config self.debug_manager = debugging.DebugManager() self.trigger_definitions = [] config.check_config() config.add_config_path(*config['config_path']) if time_sync is None: time_sync = ts.TimeSync() self.time_sync = time_sync if db is not None: self.db = db else: self.db = DBInterface(config['database']) if stackdistiller is not None: self.distiller = stackdistiller else: # distiller_config is optional if config.contains('distiller_config'): dist_config = config.load_file(config['distiller_config']) plugmap = self._load_plugins(config['distiller_trait_plugins'], distiller.DEFAULT_PLUGINMAP) self.distiller = distiller.Distiller( dist_config, trait_plugin_map=plugmap, catchall=config['catch_all_notifications']) if trigger_defs is not None: self.trigger_definitions = trigger_defs for t in self.trigger_definitions: t.set_debugger(self.debug_manager) else: # trigger_definition config file is optional if config.contains('trigger_definitions'): defs = config.load_file(config['trigger_definitions']) self.trigger_definitions = [ TriggerDefinition(conf, self.debug_manager) for conf in defs ] # trigger_map is used to quickly access existing trigger_defs self.trigger_map = dict( (tdef.name, tdef) for tdef in self.trigger_definitions) self.saved_events = 0 self.received = 0 self.last_status = self.current_time()
def main(): parser = argparse.ArgumentParser(description="Winchester pipeline worker") parser.add_argument('--config', '-c', default='winchester.yaml', help='The name of the winchester config file') parser.add_argument('--name', '-n', default='pipeline_worker', help='The name of this process for logging purposes') parser.add_argument('--daemon', '-d', help='Run in daemon mode.') args = parser.parse_args() conf = ConfigManager.load_config_file(args.config) proc_name = args.name if 'log_level' in conf: level = conf['log_level'] level = getattr(logging, level.upper()) else: level = logging.INFO if 'log_file' in conf: log_file = conf['log_file'] % dict(proc_name=proc_name) else: log_file = '%(proc_name)s.log' % dict(proc_name=proc_name) # This is a hack, but it's needed to pass the logfile name & default # loglevel into log handlers configured with a config file. (mdragon) logging.LOCAL_LOG_FILE = log_file logging.LOCAL_DEFAULT_LEVEL = level if 'logging_config' in conf: fileConfig(conf['logging_config']) else: logging.basicConfig() logging.getLogger('winchester').setLevel(level) timesync = time_sync.TimeSync(conf) pipe = PipelineManager(conf, time_sync=timesync, proc_name=proc_name) if args.daemon: print("Backgrounding for daemon mode.") with daemon.DaemonContext(): pipe.run() else: pipe.run()
def __init__(self, app=None, queue_name=None): super(WinchesterHandler, self).__init__(app=app, queue_name=queue_name) conf_file = self.config_get("config_file") config = ConfigManager.load_config_file(conf_file) self.time_sync = time_sync.TimeSync(config, publishes=True) self.trigger_manager = TriggerManager(config, time_sync=self.time_sync)
def __init__(self, config, db=None, pipeline_handlers=None, pipeline_config=None, trigger_defs=None, time_sync=None, proc_name='pipeline_worker'): # name used to distinguish worker processes in logs self.proc_name = proc_name logger.debug("PipelineManager(%s): Using config: %s" % (self.proc_name, str(config))) config = ConfigManager.wrap(config, self.config_description()) self.config = config self.trigger_definitions = [] config.check_config() config.add_config_path(*config['config_path']) if time_sync is None: time_sync = ts.TimeSync() self.time_sync = time_sync if db is not None: self.db = db else: self.db = DBInterface(config['database']) if pipeline_handlers is not None: self.pipeline_handlers = pipeline_handlers else: self.pipeline_handlers = self._load_plugins( config['pipeline_handlers']) logger.debug("Pipeline handlers: %s" % str(self.pipeline_handlers)) if pipeline_config is not None: self.pipeline_config = pipeline_config else: self.pipeline_config = config.load_file(config['pipeline_config']) logger.debug("Pipeline config: %s" % str(self.pipeline_config)) for pipeline, handler_configs in self.pipeline_config.items(): self.pipeline_config[pipeline] = [ Pipeline.check_handler_config(conf, self.pipeline_handlers) for conf in handler_configs ] if trigger_defs is not None: self.trigger_definitions = trigger_defs else: # trigger_definition config file is optional if config.contains('trigger_definitions'): defs = config.load_file(config['trigger_definitions']) logger.debug("Loaded trigger definitions %s" % str(defs)) self.trigger_definitions = [ TriggerDefinition(conf, None) for conf in defs ] self.trigger_manager = TriggerManager( self.config, db=self.db, trigger_defs=self.trigger_definitions, time_sync=time_sync) self.pipeline_worker_batch_size = config['pipeline_worker_batch_size'] self.pipeline_worker_delay = config['pipeline_worker_delay'] self.statistics_period = config['statistics_period'] self.purge_completed_streams = config['purge_completed_streams'] self.trim_events = config['trim_events'] self.trim_events_batch_size = config['trim_events_batch_size'] try: self.trim_events_age = timex.parse(str(config['trim_events_age'])) except timex.TimexError: logger.error("Invalid trim event expression: %s Event trimming " "disabled." % config['trim_events_age']) self.trim_events_age = None self.trim_events = False self.streams_fired = 0 self.streams_expired = 0 self.streams_loaded = 0 self.last_status = self.current_time()
def setUp(self): super(TestTimeSyncEndpointPublisher, self).setUp() self.time_sync = time_sync.TimeSync( {"time_sync_endpoint": "example.com"}, publishes=True)
def setUp(self): super(TestTimeSyncNoEndpoint, self).setUp() self.time_sync = time_sync.TimeSync({})
def setUp(self): super(TestTimeSyncEndpointConsumer, self).setUp() self.time_sync = time_sync.TimeSync( {"time_sync_endpoint": "example.com"})