def __init__(self, conf):
     super(EventProcessor, self).__init__(conf)
     self._winchester_config = conf.winchester.winchester_config
     self._config_mgr = ConfigManager.load_config_file(self._winchester_config)
     self._trigger_manager = TriggerManager(self._config_mgr)
     self._group = conf.kafka.stream_def_group
     self._tm_lock = threading.Lock()
Ejemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser(description="Winchester pipeline worker")
    parser.add_argument('--config',
                        '-c',
                        default='winchester.yaml',
                        help='The name of the winchester config file')
    parser.add_argument('--daemon', '-d', help='Run in daemon mode.')
    args = parser.parse_args()
    conf = ConfigManager.load_config_file(args.config)

    if 'logging_config' in conf:
        fileConfig(conf['logging_config'])
    else:
        logging.basicConfig()
        if 'log_level' in conf:
            level = conf['log_level']
            level = getattr(logging, level.upper())
            logging.getLogger('winchester').setLevel(level)
    pipe = PipelineManager(conf)
    if args.daemon:
        print "Backgrounding for daemon mode."
        with daemon.DaemonContext():
            pipe.run()
    else:
        pipe.run()
    def __init__(self, config, db=None, stackdistiller=None, trigger_defs=None):
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.debug_manager = debugging.DebugManager()
        config.check_config()
        config.add_config_path(*config['config_path'])

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])
        if stackdistiller is not None:
            self.distiller = stackdistiller
        else:
            dist_config = config.load_file(config['distiller_config'])
            plugmap = self._load_plugins(config['distiller_trait_plugins'],
                                         distiller.DEFAULT_PLUGINMAP)
            self.distiller = distiller.Distiller(dist_config,
                                                 trait_plugin_map=plugmap,
                                                 catchall=config['catch_all_notifications'])
        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
            for t in self.trigger_definitions:
                t.set_debugger(self.debug_manager)
        else:
            defs = config.load_file(config['trigger_definitions'])
            self.trigger_definitions = [TriggerDefinition(conf,
                                        self.debug_manager)
                                            for conf in defs]
        self.saved_events = 0
        self.received = 0
        self.last_status = self.current_time()
 def __init__(self, conf):
     super(PipelineProcessor, self).__init__(conf)
     self._winchester_config = conf.winchester.winchester_config
     self._config_mgr = ConfigManager.load_config_file(
         self._winchester_config)
     self._group = conf.kafka.stream_def_pipe_group
     self._pm_lock = threading.Lock()
     self._pipe = PipelineManager(self._config_mgr)
    def __init__(self,
                 config,
                 db=None,
                 pipeline_handlers=None,
                 pipeline_config=None,
                 trigger_defs=None):
        logger.debug("PipelineManager: Using config: %s" % str(config))
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        config.check_config()
        config.add_config_path(*config['config_path'])
        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])

        if pipeline_handlers is not None:
            self.pipeline_handlers = pipeline_handlers
        else:
            self.pipeline_handlers = self._load_plugins(
                config['pipeline_handlers'])
        logger.debug("Pipeline handlers: %s" % str(self.pipeline_handlers))

        if pipeline_config is not None:
            self.pipeline_config = pipeline_config
        else:
            self.pipeline_config = config.load_file(config['pipeline_config'])

        logger.debug("Pipeline config: %s" % str(self.pipeline_config))
        for pipeline, handler_configs in self.pipeline_config.items():
            self.pipeline_config[pipeline] = [
                Pipeline.check_handler_config(conf, self.pipeline_handlers)
                for conf in handler_configs
            ]

        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
        else:
            defs = config.load_file(config['trigger_definitions'])
            logger.debug("Loaded trigger definitions %s" % str(defs))
            self.trigger_definitions = [
                TriggerDefinition(conf, None) for conf in defs
            ]
        self.trigger_map = dict(
            (tdef.name, tdef) for tdef in self.trigger_definitions)

        self.trigger_manager = TriggerManager(
            self.config, db=self.db, trigger_defs=self.trigger_definitions)

        self.pipeline_worker_batch_size = config['pipeline_worker_batch_size']
        self.pipeline_worker_delay = config['pipeline_worker_delay']
        self.statistics_period = config['statistics_period']
        self.purge_completed_streams = config['purge_completed_streams']
        self.streams_fired = 0
        self.streams_expired = 0
        self.streams_loaded = 0
        self.last_status = self.current_time()
Ejemplo n.º 6
0
    def __init__(self, config, scratchpad):
        """config is a ConfigParser object.

           Use the scratchpad to ensure we don't create multiple
           connections to the db.
        """

        if 'quincy_config' not in scratchpad:
            target = config.get('quince', 'winchester_config')
            logger.debug("Quince is using Winchester config from %s" % target)
            quincy_config = ConfigManager.load_config_file(target)
            quincy_config = ConfigManager.wrap(quincy_config,
                                               self.config_description())

            scratchpad['quincy_config'] = quincy_config
            scratchpad['quincy_driver'] = DBInterface(
                quincy_config['database'])

        self.winchester_config = scratchpad['quincy_config']
        self.driver = scratchpad['quincy_driver']
Ejemplo n.º 7
0
    def __init__(self,
                 config,
                 db=None,
                 stackdistiller=None,
                 trigger_defs=None,
                 time_sync=None):
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.debug_manager = debugging.DebugManager()
        self.trigger_definitions = []
        config.check_config()
        config.add_config_path(*config['config_path'])
        if time_sync is None:
            time_sync = ts.TimeSync()
        self.time_sync = time_sync

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])
        if stackdistiller is not None:
            self.distiller = stackdistiller
        else:
            # distiller_config is optional
            if config.contains('distiller_config'):
                dist_config = config.load_file(config['distiller_config'])
                plugmap = self._load_plugins(config['distiller_trait_plugins'],
                                             distiller.DEFAULT_PLUGINMAP)
                self.distiller = distiller.Distiller(
                    dist_config,
                    trait_plugin_map=plugmap,
                    catchall=config['catch_all_notifications'])
        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
            for t in self.trigger_definitions:
                t.set_debugger(self.debug_manager)
        else:
            # trigger_definition config file is optional
            if config.contains('trigger_definitions'):
                defs = config.load_file(config['trigger_definitions'])
                self.trigger_definitions = [
                    TriggerDefinition(conf, self.debug_manager)
                    for conf in defs
                ]
        # trigger_map is used to quickly access existing trigger_defs
        self.trigger_map = dict(
            (tdef.name, tdef) for tdef in self.trigger_definitions)
        self.saved_events = 0
        self.received = 0
        self.last_status = self.current_time()
    def __init__(self, config, db=None, pipeline_handlers=None,
                 pipeline_config=None, trigger_defs=None):
        logger.debug("PipelineManager: Using config: %s" % str(config))
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        config.check_config()
        config.add_config_path(*config['config_path'])
        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])

        if pipeline_handlers is not None:
            self.pipeline_handlers = pipeline_handlers
        else:
            self.pipeline_handlers = self._load_plugins(config['pipeline_handlers'])
        logger.debug("Pipeline handlers: %s" % str(self.pipeline_handlers))

        if pipeline_config is not None:
            self.pipeline_config = pipeline_config
        else:
            self.pipeline_config = config.load_file(config['pipeline_config'])

        logger.debug("Pipeline config: %s" % str(self.pipeline_config))
        for pipeline, handler_configs in self.pipeline_config.items():
            self.pipeline_config[pipeline] = [Pipeline.check_handler_config(conf,
                                                self.pipeline_handlers)
                                              for conf in handler_configs]

        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
        else:
            defs = config.load_file(config['trigger_definitions'])
            logger.debug("Loaded trigger definitions %s" % str(defs))
            self.trigger_definitions = [TriggerDefinition(conf, None) for conf in defs]
        self.trigger_map = dict((tdef.name, tdef) for tdef in self.trigger_definitions)

        self.trigger_manager = TriggerManager(self.config, db=self.db,
                                              trigger_defs=self.trigger_definitions)

        self.pipeline_worker_batch_size = config['pipeline_worker_batch_size']
        self.pipeline_worker_delay = config['pipeline_worker_delay']
        self.statistics_period = config['statistics_period']
        self.purge_completed_streams = config['purge_completed_streams']
        self.streams_fired = 0
        self.streams_expired = 0
        self.streams_loaded = 0
        self.last_status = self.current_time()
    def __init__(self, config, db=None, stackdistiller=None, trigger_defs=None,
                 time_sync=None):
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.debug_manager = debugging.DebugManager()
        self.trigger_definitions = []
        config.check_config()
        config.add_config_path(*config['config_path'])
        if time_sync is None:
            time_sync = ts.TimeSync()
        self.time_sync = time_sync

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])
        if stackdistiller is not None:
            self.distiller = stackdistiller
        else:
            # distiller_config is optional
            if config.contains('distiller_config'):
                dist_config = config.load_file(config['distiller_config'])
                plugmap = self._load_plugins(config['distiller_trait_plugins'],
                                             distiller.DEFAULT_PLUGINMAP)
                self.distiller = distiller.Distiller(
                    dist_config,
                    trait_plugin_map=plugmap,
                    catchall=config['catch_all_notifications'])
        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
            for t in self.trigger_definitions:
                t.set_debugger(self.debug_manager)
        else:
            # trigger_definition config file is optional
            if config.contains('trigger_definitions'):
                defs = config.load_file(config['trigger_definitions'])
                self.trigger_definitions = [
                    TriggerDefinition(conf, self.debug_manager)
                    for conf in defs]
        # trigger_map is used to quickly access existing trigger_defs
        self.trigger_map = dict(
            (tdef.name, tdef) for tdef in self.trigger_definitions)
        self.saved_events = 0
        self.received = 0
        self.last_status = self.current_time()
Ejemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(description="Winchester pipeline worker")
    parser.add_argument('--config',
                        '-c',
                        default='winchester.yaml',
                        help='The name of the winchester config file')
    parser.add_argument('--name',
                        '-n',
                        default='pipeline_worker',
                        help='The name of this process for logging purposes')
    parser.add_argument('--daemon', '-d', help='Run in daemon mode.')
    args = parser.parse_args()

    conf = ConfigManager.load_config_file(args.config)
    proc_name = args.name

    if 'log_level' in conf:
        level = conf['log_level']
        level = getattr(logging, level.upper())
    else:
        level = logging.INFO

    if 'log_file' in conf:
        log_file = conf['log_file'] % dict(proc_name=proc_name)
    else:
        log_file = '%(proc_name)s.log' % dict(proc_name=proc_name)

    # This is a hack, but it's needed to pass the logfile name & default
    # loglevel into log handlers configured with a config file. (mdragon)
    logging.LOCAL_LOG_FILE = log_file
    logging.LOCAL_DEFAULT_LEVEL = level

    if 'logging_config' in conf:
        fileConfig(conf['logging_config'])
    else:
        logging.basicConfig()
        logging.getLogger('winchester').setLevel(level)
    timesync = time_sync.TimeSync(conf)
    pipe = PipelineManager(conf, time_sync=timesync, proc_name=proc_name)
    if args.daemon:
        print("Backgrounding for daemon mode.")
        with daemon.DaemonContext():
            pipe.run()
    else:
        pipe.run()
Ejemplo n.º 11
0
def main():
    parser = argparse.ArgumentParser(description="Winchester pipeline worker")
    parser.add_argument('--config', '-c', default='winchester.yaml',
                        help='The name of the winchester config file')
    parser.add_argument('--name', '-n', default='pipeline_worker',
                        help='The name of this process for logging purposes')
    parser.add_argument('--daemon', '-d', help='Run in daemon mode.')
    args = parser.parse_args()

    conf = ConfigManager.load_config_file(args.config)
    proc_name = args.name

    if 'log_level' in conf:
        level = conf['log_level']
        level = getattr(logging, level.upper())
    else:
        level = logging.INFO

    if 'log_file' in conf:
        log_file = conf['log_file'] % dict(proc_name=proc_name)
    else:
        log_file = '%(proc_name)s.log' % dict(proc_name=proc_name)

    # This is a hack, but it's needed to pass the logfile name & default
    # loglevel into log handlers configured with a config file. (mdragon)
    logging.LOCAL_LOG_FILE = log_file
    logging.LOCAL_DEFAULT_LEVEL = level

    if 'logging_config' in conf:
        fileConfig(conf['logging_config'])
    else:
        logging.basicConfig()
        logging.getLogger('winchester').setLevel(level)
    timesync = time_sync.TimeSync(conf)
    pipe = PipelineManager(conf, time_sync=timesync, proc_name=proc_name)
    if args.daemon:
        print("Backgrounding for daemon mode.")
        with daemon.DaemonContext():
            pipe.run()
    else:
        pipe.run()
Ejemplo n.º 12
0
def main():
    parser = argparse.ArgumentParser(description="Winchester pipeline worker")
    parser.add_argument('--config', '-c', default='winchester.yaml',
                        help='The name of the winchester config file')
    parser.add_argument('--daemon', '-d', help='Run in daemon mode.')
    args = parser.parse_args()
    conf = ConfigManager.load_config_file(args.config)

    if 'logging_config' in conf:
        fileConfig(conf['logging_config'])
    else:
        logging.basicConfig()
        if 'log_level' in conf:
            level = conf['log_level']
            level = getattr(logging, level.upper())
            logging.getLogger('winchester').setLevel(level)
    pipe = PipelineManager(conf)
    if args.daemon:
        print "Backgrounding for daemon mode."
        with daemon.DaemonContext():
            pipe.run()
    else:
        pipe.run()
    def __init__(self,
                 config,
                 db=None,
                 stackdistiller=None,
                 trigger_defs=None):
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.debug_manager = debugging.DebugManager()
        config.check_config()
        config.add_config_path(*config['config_path'])

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])
        if stackdistiller is not None:
            self.distiller = stackdistiller
        else:
            dist_config = config.load_file(config['distiller_config'])
            plugmap = self._load_plugins(config['distiller_trait_plugins'],
                                         distiller.DEFAULT_PLUGINMAP)
            self.distiller = distiller.Distiller(
                dist_config,
                trait_plugin_map=plugmap,
                catchall=config['catch_all_notifications'])
        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
            for t in self.trigger_definitions:
                t.set_debugger(self.debug_manager)
        else:
            defs = config.load_file(config['trigger_definitions'])
            self.trigger_definitions = [
                TriggerDefinition(conf, self.debug_manager) for conf in defs
            ]
        self.saved_events = 0
        self.received = 0
        self.last_status = self.current_time()
Ejemplo n.º 14
0
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
# fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from winchester.config import ConfigManager
from winchester.models import Base

target_metadata = Base.metadata

winchester_config = ConfigManager.load_config_file(
    config.get_main_option("winchester_config"))
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
Ejemplo n.º 15
0
 def __init__(self, app=None, queue_name=None):
     super(WinchesterHandler, self).__init__(app=app, queue_name=queue_name)
     conf_file = self.config_get("config_file")
     config = ConfigManager.load_config_file(conf_file)
     self.time_sync = time_sync.TimeSync(config, publishes=True)
     self.trigger_manager = TriggerManager(config, time_sync=self.time_sync)
Ejemplo n.º 16
0
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
# fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from winchester.config import ConfigManager
from winchester.models import Base

target_metadata = Base.metadata

winchester_config = ConfigManager.load_config_file(
    config.get_main_option("winchester_config"))
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
Ejemplo n.º 17
0
 def __init__(self, app=None, queue_name=None):
     super(WinchesterHandler, self).__init__(app=app, queue_name=queue_name)
     conf_file = self.config_get("config_file")
     config = ConfigManager.load_config_file(conf_file)
     self.trigger_manager = TriggerManager(config)
Ejemplo n.º 18
0
    def __init__(self, winchester_config):
        self.winchester_config = winchester_config

        self.config = ConfigManager.load_config_file(winchester_config)
        self.trigger_manager = TriggerManager(self.config)
        self.pipe = PipelineManager(self.config)
Ejemplo n.º 19
0
 def __init__(self, app=None, queue_name=None):
     super(WinchesterHandler, self).__init__(app=app, queue_name=queue_name)
     conf_file = self.config_get("config_file")
     config = ConfigManager.load_config_file(conf_file)
     self.time_sync = time_sync.TimeSync(config, publishes=True)
     self.trigger_manager = TriggerManager(config, time_sync=self.time_sync)
    def __init__(self, config, db=None, pipeline_handlers=None,
                 pipeline_config=None, trigger_defs=None, time_sync=None,
                 proc_name='pipeline_worker'):
        # name used to distinguish worker processes in logs
        self.proc_name = proc_name

        logger.debug("PipelineManager(%s): Using config: %s"
                     % (self.proc_name, str(config)))
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.trigger_definitions = []
        config.check_config()
        config.add_config_path(*config['config_path'])
        if time_sync is None:
            time_sync = ts.TimeSync()
        self.time_sync = time_sync

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])

        if pipeline_handlers is not None:
            self.pipeline_handlers = pipeline_handlers
        else:
            self.pipeline_handlers = self._load_plugins(
                config['pipeline_handlers'])
        logger.debug("Pipeline handlers: %s" % str(self.pipeline_handlers))

        if pipeline_config is not None:
            self.pipeline_config = pipeline_config
        else:
            self.pipeline_config = config.load_file(config['pipeline_config'])

        logger.debug("Pipeline config: %s" % str(self.pipeline_config))
        for pipeline, handler_configs in self.pipeline_config.items():
            self.pipeline_config[pipeline] = [
                Pipeline.check_handler_config(conf,
                                              self.pipeline_handlers)
                for conf in handler_configs]

        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
        else:
            # trigger_definition config file is optional
            if config.contains('trigger_definitions'):
                defs = config.load_file(config['trigger_definitions'])
                logger.debug("Loaded trigger definitions %s" % str(defs))
                self.trigger_definitions = [
                    TriggerDefinition(conf, None) for conf in defs]

        self.trigger_manager = TriggerManager(
            self.config, db=self.db,
            trigger_defs=self.trigger_definitions,
            time_sync=time_sync)

        self.pipeline_worker_batch_size = config['pipeline_worker_batch_size']
        self.pipeline_worker_delay = config['pipeline_worker_delay']
        self.statistics_period = config['statistics_period']
        self.purge_completed_streams = config['purge_completed_streams']
        self.trim_events = config['trim_events']
        self.trim_events_batch_size = config['trim_events_batch_size']
        try:
            self.trim_events_age = timex.parse(str(config['trim_events_age']))
        except timex.TimexError:
            logger.error("Invalid trim event expression: %s Event trimming "
                         "disabled." % config['trim_events_age'])
            self.trim_events_age = None
            self.trim_events = False
        self.streams_fired = 0
        self.streams_expired = 0
        self.streams_loaded = 0
        self.last_status = self.current_time()
Ejemplo n.º 21
0
    def __init__(self,
                 config,
                 db=None,
                 pipeline_handlers=None,
                 pipeline_config=None,
                 trigger_defs=None,
                 time_sync=None,
                 proc_name='pipeline_worker'):
        # name used to distinguish worker processes in logs
        self.proc_name = proc_name

        logger.debug("PipelineManager(%s): Using config: %s" %
                     (self.proc_name, str(config)))
        config = ConfigManager.wrap(config, self.config_description())
        self.config = config
        self.trigger_definitions = []
        config.check_config()
        config.add_config_path(*config['config_path'])
        if time_sync is None:
            time_sync = ts.TimeSync()
        self.time_sync = time_sync

        if db is not None:
            self.db = db
        else:
            self.db = DBInterface(config['database'])

        if pipeline_handlers is not None:
            self.pipeline_handlers = pipeline_handlers
        else:
            self.pipeline_handlers = self._load_plugins(
                config['pipeline_handlers'])
        logger.debug("Pipeline handlers: %s" % str(self.pipeline_handlers))

        if pipeline_config is not None:
            self.pipeline_config = pipeline_config
        else:
            self.pipeline_config = config.load_file(config['pipeline_config'])

        logger.debug("Pipeline config: %s" % str(self.pipeline_config))
        for pipeline, handler_configs in self.pipeline_config.items():
            self.pipeline_config[pipeline] = [
                Pipeline.check_handler_config(conf, self.pipeline_handlers)
                for conf in handler_configs
            ]

        if trigger_defs is not None:
            self.trigger_definitions = trigger_defs
        else:
            # trigger_definition config file is optional
            if config.contains('trigger_definitions'):
                defs = config.load_file(config['trigger_definitions'])
                logger.debug("Loaded trigger definitions %s" % str(defs))
                self.trigger_definitions = [
                    TriggerDefinition(conf, None) for conf in defs
                ]

        self.trigger_manager = TriggerManager(
            self.config,
            db=self.db,
            trigger_defs=self.trigger_definitions,
            time_sync=time_sync)

        self.pipeline_worker_batch_size = config['pipeline_worker_batch_size']
        self.pipeline_worker_delay = config['pipeline_worker_delay']
        self.statistics_period = config['statistics_period']
        self.purge_completed_streams = config['purge_completed_streams']
        self.trim_events = config['trim_events']
        self.trim_events_batch_size = config['trim_events_batch_size']
        try:
            self.trim_events_age = timex.parse(str(config['trim_events_age']))
        except timex.TimexError:
            logger.error("Invalid trim event expression: %s Event trimming "
                         "disabled." % config['trim_events_age'])
            self.trim_events_age = None
            self.trim_events = False
        self.streams_fired = 0
        self.streams_expired = 0
        self.streams_loaded = 0
        self.last_status = self.current_time()
Ejemplo n.º 22
0
 def __init__(self, config):
     self.config = ConfigManager.wrap(config, self.config_description())
     self.db_url = config['url']
     self.echo_sql = config.get('echo_sql', False)
Ejemplo n.º 23
0
    def __init__(self, winchester_config):
        self.winchester_config = winchester_config

        self.config = ConfigManager.load_config_file(winchester_config)
        self.trigger_manager = TriggerManager(self.config)
        self.pipe = PipelineManager(self.config)
Ejemplo n.º 24
0
 def __init__(self, kafka_config, winchester_config):
     self.winchester_config = winchester_config
     self.kafka_config = kafka_config
     self.config_mgr = ConfigManager.load_config_file(
         self.winchester_config)
Ejemplo n.º 25
0
 def __init__(self, config):
     self.config = ConfigManager.wrap(config, self.config_description())
     self.db_url = config['url']
     self.echo_sql = config.get('echo_sql', False)