def donttest_for_configobj_basics_3(self): n = Namespace() n.add_option("name", default='lars') n.add_option("awesome", default='lars') n.namespace('othersection') n.othersection.add_option('foo', default=23) tmp_filename = os.path.join(tempfile.gettempdir(), 'test.ini') open(tmp_filename, 'w').write(""" # comment name=Peter awesome= # comment [othersection] bad_option=bar # other comment """) try: self.assertRaises( NotAnOptionError, config_manager.ConfigurationManager, [n], [tmp_filename], ) finally: if os.path.isfile(tmp_filename): os.remove(tmp_filename)
def run(*crash_ids): definition_source = Namespace() definition_source.namespace('queuing') definition_source.queuing.add_option( 'rabbitmq_reprocessing_class', default=SingleCrashMQCrashStorage, ) config_dict = { 'resource': { 'rabbitmq': { 'host': 'localhost', 'port': '5672', 'virtual_host': '/' } }, 'secrets': { 'rabbitmq': { 'rabbitmq_password': '******', 'rabbitmq_user': '******' } } } config = configuration( definition_source=definition_source, values_source_list=[config_dict], ) config.queuing.logger = logger config.logger = logger storage = SingleCrashMQCrashStorage(config=config['queuing']) for crash_id in crash_ids: print storage.submit(crash_id) return 0
def test_as_overlay(self): rc = Namespace() rc.add_option('a', default=23) rc.add_option('b', default='this is b') rc.namespace('n') rc.n.add_option('x', default=datetime(1999, 12, 31, 11, 59)) rc.n.add_option('y', default=timedelta(3)) rc.n.add_option('z', default=date(1650, 10, 2)) rc.dynamic_load = None cm = ConfigurationManager( [rc], values_source_list=[ 'configman.tests.values_for_module_tests_2', 'configman.tests.values_for_module_tests_3', ]) config = cm.get_config() self.assertEqual(config.a, 99) self.assertEqual(config.b, 'now is the time') self.assertEqual(config.n.x, datetime(1960, 5, 4, 15, 10)) self.assertEqual(config.n.y, timedelta(1)) self.assertEqual(config.n.z, date(1960, 5, 4)) from configman.tests.values_for_module_tests_3 import Alpha self.assertEqual(config.dynamic_load, Alpha) self.assertEqual(config.host, 'localhost') self.assertEqual(config.port, 5432)
def get_config_manager_with_internal_pg( more_definitions=None, service_classes=None, overrides=None, ): internal_namespace = Namespace() internal_namespace.namespace('database') internal_namespace.database.add_option( 'crashstorage_class', default='socorro' '.external.postgresql.crashstorage.PostgreSQLCrashStorage', from_string_converter=class_converter, reference_value_from='resource.postgresql') internal_namespace.database.add_option( name='database_superusername', default='test', doc='Username to connect to database', ) internal_namespace.database.add_option( name='database_superuserpassword', default='aPassword', doc='Password to connect to database', ) if isinstance(more_definitions, Sequence): more_definitions.append(internal_namespace) elif more_definitions is not None: more_definitions = [more_definitions, internal_namespace] else: more_definitions = [internal_namespace] return get_standard_config_manager(more_definitions=more_definitions, service_classes=service_classes, overrides=overrides)
def logging_required_config(app_name): lc = Namespace() lc.namespace("logging") lc.logging.add_option("syslog_host", doc="syslog hostname", default="localhost") lc.logging.add_option("syslog_port", doc="syslog port", default=514) lc.logging.add_option( "syslog_facility_string", doc='syslog facility string ("user", "local0", etc)', default="user" ) lc.logging.add_option( "syslog_line_format_string", doc="python logging system format for syslog entries", default="%s (pid {process}): " "{asctime} {levelname} - {threadName} - " "{message}" % app_name, ) lc.logging.add_option( "syslog_error_logging_level", doc="logging level for the log file (10 - DEBUG, 20 " "- INFO, 30 - WARNING, 40 - ERROR, 50 - CRITICAL)", default=40, ) lc.logging.add_option( "stderr_line_format_string", doc="python logging system format for logging to stderr", default="{asctime} {levelname} - {threadName} - " "{message}", ) lc.logging.add_option( "stderr_error_logging_level", doc="logging level for the logging to stderr (10 - " "DEBUG, 20 - INFO, 30 - WARNING, 40 - ERROR, " "50 - CRITICAL)", default=10, ) return lc
def logging_required_config(app_name): lc = Namespace() lc.namespace('logging') lc.logging.add_option('syslog_host', doc='syslog hostname', default='localhost') lc.logging.add_option('syslog_port', doc='syslog port', default=514) lc.logging.add_option('syslog_facility_string', doc='syslog facility string ("user", "local0", etc)', default='user') lc.logging.add_option( 'syslog_line_format_string', doc='python logging system format for syslog entries', default='%s (pid {process}): ' '{asctime} {levelname} - {threadName} - ' '{message}' % app_name) lc.logging.add_option('syslog_error_logging_level', doc='logging level for the log file (10 - DEBUG, 20 ' '- INFO, 30 - WARNING, 40 - ERROR, 50 - CRITICAL)', default=40) lc.logging.add_option( 'stderr_line_format_string', doc='python logging system format for logging to stderr', default='{asctime} {levelname} - {threadName} - ' '{message}') lc.logging.add_option('stderr_error_logging_level', doc='logging level for the logging to stderr (10 - ' 'DEBUG, 20 - INFO, 30 - WARNING, 40 - ERROR, ' '50 - CRITICAL)', default=10) return lc
def logging_required_config(app_name): lc = Namespace() lc.namespace('logging') lc.logging.add_option( 'syslog_host', doc='syslog hostname', default='localhost', reference_value_from='resource.logging', ) lc.logging.add_option( 'syslog_port', doc='syslog port', default=514, reference_value_from='resource.logging', ) lc.logging.add_option( 'syslog_facility_string', doc='syslog facility string ("user", "local0", etc)', default='user', reference_value_from='resource.logging', ) lc.logging.add_option( 'syslog_line_format_string', doc='python logging system format for syslog entries', default='%s (pid {process}): ' '{asctime} {levelname} - {threadName} - ' '{message}' % app_name, reference_value_from='resource.logging', ) lc.logging.add_option( 'syslog_error_logging_level', doc='logging level for the log file (10 - DEBUG, 20 ' '- INFO, 30 - WARNING, 40 - ERROR, 50 - CRITICAL)', default=40, reference_value_from='resource.logging', ) lc.logging.add_option( 'stderr_line_format_string', doc='python logging system format for logging to stderr', default='{asctime} {levelname} - {threadName} - ' '{message}', reference_value_from='resource.logging', ) lc.logging.add_option( 'stderr_error_logging_level', doc='logging level for the logging to stderr (10 - ' 'DEBUG, 20 - INFO, 30 - WARNING, 40 - ERROR, ' '50 - CRITICAL)', default=10, reference_value_from='resource.logging', ) return lc
def test_write_simple(self): rc = Namespace() rc.add_option( 'a', default=23 ) rc.add_option( 'b', default='this is b' ) rc.namespace('n') rc.n.add_option( 'x', default=datetime(1999, 12, 31, 11, 59) ) rc.n.add_option( 'y', default=timedelta(3) ) rc.n.add_option( 'z', default=date(1650, 10, 2) ) cm = ConfigurationManager( [rc], values_source_list=[ { 'a': 68, 'n.x': datetime(1960, 5, 4, 15, 10), 'n.y': timedelta(3), 'n.z': date(2001, 1, 1) } ] ) s = StringIO() @contextlib.contextmanager def s_opener(): yield s cm.write_conf('py', s_opener) r = s.getvalue() g = {} l = {} exec r in g, l self.assertEqual(l['a'], 68) self.assertEqual(l['b'], 'this is b') self.assertEqual(l['n'].x, datetime(1960, 5, 4, 15, 10)) self.assertEqual(l['n'].y, timedelta(3)) self.assertEqual(l['n'].z, date(2001, 1, 1))
def test_write_simple(self): rc = Namespace() rc.add_option( 'a', default=23 ) rc.add_option( 'b', default='this is b' ) rc.namespace('n') rc.n.add_option( 'x', default=datetime(1999, 12, 31, 11, 59) ) rc.n.add_option( 'y', default=timedelta(3) ) rc.n.add_option( 'z', default=date(1650, 10, 2) ) cm = ConfigurationManager( [rc], values_source_list=[ { 'a': 68, 'n.x': datetime(1960, 5, 4, 15, 10), 'n.y': timedelta(3), 'n.z': date(2001, 1, 1) } ] ) s = StringIO() @contextlib.contextmanager def s_opener(): yield s cm.write_conf('py', s_opener) r = s.getvalue() g = {} l = {} six.exec_(r, g, l) self.assertEqual(l['a'], 68) self.assertEqual(l['b'], 'this is b') self.assertEqual(l['n'].x, datetime(1960, 5, 4, 15, 10)) self.assertEqual(l['n'].y, timedelta(3)) self.assertEqual(l['n'].z, date(2001, 1, 1))
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace("hbase") required_config.hbase.hbase_class = crashstorage.HBaseCrashStorage required_config.hbase.add_option("logger", default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name="testapp", app_version="1.0", app_description="app description", values_source_list=[{"hbase": {"logger": mock_logging}}], ) return config_manager
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace("filesystem") required_config.filesystem.filesystem_class = crashstorage.FSRadixTreeStorage required_config.filesystem.add_option("logger", default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name="testapp", app_version="1.0", app_description="app description", values_source_list=[{"filesystem": {"logger": mock_logging, "fs_root": self.fs_root}}], ) return config_manager
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace('hbase') required_config.hbase.hbase_class = \ crashstorage.HBaseCrashStorage required_config.hbase.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'hbase': { 'logger': mock_logging }}] ) return config_manager
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace('filesystem') required_config.filesystem.filesystem_class = \ crashstorage.FSRadixTreeStorage required_config.filesystem.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'filesystem': { 'logger': mock_logging, 'fs_root': self.fs_root, }}] ) return config_manager
def donttest_write_flat_with_migration(self): n = Namespace() n.add_option('x', default=13, doc='the x') n.add_option('y', default=-1, doc='the y') n.add_option('z', default='fred', doc='the z') n.namespace('o') n.o.add_option('x', default=13, doc='the x') c = ConfigurationManager( [n], use_admin_controls=True, use_auto_help=False, argv_source=[] ) out = StringIO() c.write_conf(for_conf, opener=stringIO_context_wrapper(out)) result = out.getvalue() expected = ( "# name: x\n" "# doc: the x\n" "# converter: int\n" "# x='13'\n" "\n" "# name: y\n" "# doc: the y\n" "# converter: int\n" "y='-1'\n" "\n" "# name: z\n" "# doc: the z\n" "# converter: str\n" "z='fred'\n" "\n" "#-------------------------------------------------------------------------------\n" "# o - \n" "\n" "# name: o.x\n" "# doc: the x\n" "# converter: int\n" "# o.x='13'\n" "\n" ) self.assertEqual(expected, result, "exepected\n%s\nbut got\n%s" % (expected, result))
def test_as_overlay(self): rc = Namespace() rc.add_option( 'a', default=23 ) rc.add_option( 'b', default='this is b' ) rc.namespace('n') rc.n.add_option( 'x', default=datetime(1999, 12, 31, 11, 59) ) rc.n.add_option( 'y', default=timedelta(3) ) rc.n.add_option( 'z', default=date(1650, 10, 2) ) rc.dynamic_load = None cm = ConfigurationManager( [rc], values_source_list=[ 'configman.tests.values_for_module_tests_2', 'configman.tests.values_for_module_tests_3', ] ) config = cm.get_config() self.assertEqual(config.a, 99) self.assertEqual(config.b, 'now is the time') self.assertEqual(config.n.x, datetime(1960, 5, 4, 15, 10)) self.assertEqual(config.n.y, timedelta(1)) self.assertEqual(config.n.z, date(1960, 5, 4)) from configman.tests.values_for_module_tests_3 import Alpha self.assertEqual(config.dynamic_load, Alpha) self.assertEqual(config.host, 'localhost') self.assertEqual(config.port, 5432)
def test_as_overlay_bad_symbols_with_strict(self): rc = Namespace() rc.add_option('a', default=23) rc.add_option('b', default='this is b') rc.namespace('n') rc.n.add_option('x', default=datetime(1999, 12, 31, 11, 59)) rc.n.add_option('y', default=timedelta(3)) rc.n.add_option('z', default=date(1650, 10, 2)) rc.dynamic_load = None self.assertRaises( NotAnOptionError, ConfigurationManager, [rc], values_source_list=[ 'configman.tests.values_for_module_tests_2', 'configman.tests.values_for_module_tests_4', command_line ], argv_source=['--admin.strict'], )
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace('filesystem') required_config.filesystem = \ crashstorage.FileSystemCrashStorage.get_required_config() required_config.filesystem.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'filesystem': { 'logger': mock_logging, 'std_fs_root': self.std_tmp_dir, 'def_fs_root': self.def_tmp_dir, 'pro_fs_root': self.pro_tmp_dir, }}] ) return config_manager
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace('database') required_config.database.crashstorage_class = \ crashstorage.PostgreSQLCrashStorage required_config.database.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'database': { 'logger': mock_logging, 'database_name': 'socorro_integration_test', 'database_hostname': 'localhost', 'database_username': '******', 'database_password': '******', }}] ) return config_manager
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace('filesystem') required_config.filesystem = \ crashstorage.FileSystemCrashStorage.get_required_config() required_config.filesystem.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'filesystem': { 'logger': mock_logging, 'std_fs_root': self.std_tmp_dir, 'def_fs_root': self.def_tmp_dir, 'pro_fs_root': self.pro_tmp_dir, } }]) return config_manager
def donttest_write_flat_with_migration(self): n = Namespace() n.add_option('x', default=13, doc='the x') n.add_option('y', default=-1, doc='the y') n.add_option('z', default='fred', doc='the z') n.namespace('o') n.o.add_option('x', default=13, doc='the x') c = ConfigurationManager([n], use_admin_controls=True, use_auto_help=False, argv_source=[]) out = StringIO() c.write_conf(for_conf, opener=stringIO_context_wrapper(out)) result = out.getvalue() expected = ( "# name: x\n" "# doc: the x\n" "# converter: int\n" "# x='13'\n" "\n" "# name: y\n" "# doc: the y\n" "# converter: int\n" "y='-1'\n" "\n" "# name: z\n" "# doc: the z\n" "# converter: str\n" "z='fred'\n" "\n" "#-------------------------------------------------------------------------------\n" "# o - \n" "\n" "# name: o.x\n" "# doc: the x\n" "# converter: int\n" "# o.x='13'\n" "\n") self.assertEqual(expected, result, "exepected\n%s\nbut got\n%s" % (expected, result))
def test_as_overlay_bad_symbols_with_strict(self): rc = Namespace() rc.add_option( 'a', default=23 ) rc.add_option( 'b', default='this is b' ) rc.namespace('n') rc.n.add_option( 'x', default=datetime(1999, 12, 31, 11, 59) ) rc.n.add_option( 'y', default=timedelta(3) ) rc.n.add_option( 'z', default=date(1650, 10, 2) ) rc.dynamic_load = None self.assertRaises( NotAnOptionError, ConfigurationManager, [rc], values_source_list=[ 'configman.tests.values_for_module_tests_2', 'configman.tests.values_for_module_tests_4', command_line ], argv_source=['--admin.strict'], )
def _common_config_setup(self): mock_logging = Mock() required_config = Namespace() required_config.namespace("database") required_config.database.crashstorage_class = crashstorage.PostgreSQLCrashStorage required_config.database.add_option("logger", default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name="testapp", app_version="1.0", app_description="app description", values_source_list=[ { "database": { "logger": mock_logging, "database_name": "socorro_integration_test", "database_hostname": "localhost", "database_username": "******", "database_password": "******", } } ], ) return config_manager
def get_config_manager_with_internal_pg( more_definitions=None, service_classes=None, overrides=None, ): internal_namespace = Namespace() internal_namespace.namespace('database') internal_namespace.database.add_option( 'crashstorage_class', default='socorro' '.external.postgresql.crashstorage.PostgreSQLCrashStorage', from_string_converter=class_converter, reference_value_from='resource.postgresql' ) internal_namespace.database.add_option( name='database_superusername', default='test', doc='Username to connect to database', ) internal_namespace.database.add_option( name='database_superuserpassword', default='aPassword', doc='Password to connect to database', ) if isinstance(more_definitions, Sequence): more_definitions.append(internal_namespace) elif more_definitions is not None: more_definitions = [more_definitions, internal_namespace] else: more_definitions = [internal_namespace] return get_standard_config_manager( more_definitions=more_definitions, service_classes=service_classes, overrides=overrides )
def config_from_configman(): definition_source = Namespace() definition_source.namespace('logging') definition_source.logging = socorro_app.App.required_config.logging definition_source.namespace('metricscfg') definition_source.metricscfg = socorro_app.App.required_config.metricscfg definition_source.namespace('elasticsearch') definition_source.elasticsearch.add_option( 'elasticsearch_class', default=ElasticsearchConfig, ) definition_source.namespace('queuing') definition_source.queuing.add_option( 'rabbitmq_reprocessing_class', default=ReprocessingOneRabbitMQCrashStore, ) definition_source.namespace('priority') definition_source.priority.add_option( 'rabbitmq_priority_class', default=PriorityjobRabbitMQCrashStore, ) definition_source.namespace('crashdata') definition_source.crashdata.add_option( 'crash_data_class', default=socorro.external.boto.crash_data.SimplifiedCrashData, ) definition_source.namespace('telemetrydata') definition_source.telemetrydata.add_option( 'telemetry_data_class', default=socorro.external.boto.crash_data.TelemetryCrashData, ) config = configuration(definition_source=definition_source, values_source_list=[ settings.SOCORRO_IMPLEMENTATIONS_CONFIG, ]) # The ReprocessingOneRabbitMQCrashStore crash storage, needs to have # a "logger" in the config object. To avoid having to use the # logger set up by configman as an aggregate, we just use the # same logger as we have here in the webapp. config.queuing.logger = logger config.priority.logger = logger config.crashdata.logger = logger config.telemetrydata.logger = logger return config
def get_standard_config_manager( more_definitions=None, service_classes=None, overrides=None, ): # MOCKED CONFIG DONE HERE required_config = Namespace() required_config.add_option( 'logger', default=SilentFakeLogger(), doc='a logger', ) required_config.add_option( 'executor_identity', default=Mock() ) if service_classes: required_config.namespace('services') if not isinstance(service_classes, Sequence): service_classes = (service_classes,) for service_class in service_classes: # config for the services being tested service_name = service_class.__name__.split('.')[-1] required_config.services.namespace(service_name) # adding the service as if it had been put in via the # classes_in_namespaces converter defined in the dataservice # package. Configman will pull the services additional # requirements required_config.services[service_name].add_option( 'cls', default=service_class, from_string_converter=class_converter ) if isinstance(more_definitions, Sequence): definitions = [required_config] definitions.extend(more_definitions) elif more_definitions is not None: definitions = [required_config, more_definitions] else: definitions = [required_config] local_overrides = [ environment, ] if isinstance(overrides, Sequence): overrides.extend(local_overrides) elif overrides is not None: overrides = [overrides] + local_overrides else: overrides = local_overrides config_manager = ConfigurationManager( definitions, values_source_list=overrides, app_name='ES tests', app_description=__doc__, argv_source=[] ) # very useful debug #import contextlib #import sys #@contextlib.contextmanager #def stdout_opener(): #yield sys.stdout #config_manager.write_conf('conf', stdout_opener) return config_manager
def config_from_configman(): definition_source = Namespace() definition_source.namespace('logging') definition_source.logging = socorro_app.App.required_config.logging definition_source.namespace('metricscfg') definition_source.metricscfg = socorro_app.App.required_config.metricscfg definition_source.namespace('elasticsearch') definition_source.elasticsearch.add_option( 'elasticsearch_class', default=ESConnectionContext, ) definition_source.namespace('queue') definition_source.add_option( 'crashqueue_class', default=PubSubCrashQueue ) definition_source.namespace('crashdata') definition_source.crashdata.add_option( 'crash_data_class', default=socorro.external.boto.crash_data.SimplifiedCrashData, ) definition_source.namespace('telemetrydata') definition_source.telemetrydata.add_option( 'telemetry_data_class', default=socorro.external.boto.crash_data.TelemetryCrashData, ) return configuration( definition_source=definition_source, values_source_list=[ settings.SOCORRO_IMPLEMENTATIONS_CONFIG, ] )
def config_from_configman(): """Generate a configman DotDict to pass to configman components.""" definition_source = Namespace() definition_source.namespace("logging") definition_source.logging = App.required_config.logging definition_source.namespace("metricscfg") definition_source.metricscfg = App.required_config.metricscfg definition_source.namespace("elasticsearch") definition_source.elasticsearch.add_option("elasticsearch_class", default=ESConnectionContext) definition_source.namespace("queue") definition_source.add_option("crashqueue_class", default=import_string(settings.CRASHQUEUE)) definition_source.namespace("crashdata") definition_source.crashdata.add_option("crash_data_class", default=SimplifiedCrashData) definition_source.namespace("telemetrydata") definition_source.telemetrydata.add_option("telemetry_data_class", default=TelemetryCrashData) return configuration( definition_source=definition_source, values_source_list=[settings.SOCORRO_CONFIG], )
import json from django.core.cache import cache from django.conf import settings from socorro.external.es import supersearch from socorro.middleware.middleware_app import MiddlewareApp from configman import Namespace, configuration, class_converter from crashstats import scrubber from crashstats.crashstats import models def_source = Namespace() def_source.namespace('elasticsearch') def_source.elasticsearch.add_option( 'elasticsearch_class', doc='a class that implements the ES connection object', default='socorro.external.es.connection_context.ConnectionContext', from_string_converter=class_converter ) def config_from_configman(): return configuration( definition_source=[ def_source, # This is a tie-over until we totally get rid of the MiddlwareApp. # At the moment it defines some useful options that are required # by SuperSearch implementation.
# ConfigFileFutureProxy, Namespace, environment ) from socorro.app.socorro_app import App from socorro.dataservice.util import ( classes_in_namespaces_converter, ) SERVICES_LIST = ('socorro.external.postgresql.bugs_service.Bugs',) # Allow configman to dynamically load the configuration and classes # for our API dataservice objects def_source = Namespace() def_source.namespace('services') def_source.services.add_option( 'service_list', default=','.join(SERVICES_LIST), from_string_converter=classes_in_namespaces_converter() ) settings.DATASERVICE_CONFIG = configuration( definition_source=[ def_source, App.get_required_config(), ], values_source_list=[ settings.DATASERVICE_CONFIG_BASE, # ConfigFileFutureProxy, environment
def config_from_configman(): definition_source = Namespace() definition_source.namespace('logging') definition_source.logging = socorro_app.App.required_config.logging definition_source.namespace('elasticsearch') definition_source.elasticsearch.add_option( 'elasticsearch_class', default=ElasticsearchConfig, ) definition_source.namespace('database') definition_source.database.add_option( 'database_storage_class', default=PostgreSQLCrashStorage, ) definition_source.namespace('queuing') definition_source.queuing.add_option( 'rabbitmq_reprocessing_class', default=ReprocessingOneRabbitMQCrashStore, ) definition_source.namespace('priority') definition_source.priority.add_option( 'rabbitmq_priority_class', default=PriorityjobRabbitMQCrashStore, ) definition_source.namespace('data') definition_source.data.add_option( 'crash_data_class', default=socorro.external.boto.crash_data.SimplifiedCrashData, ) config = configuration( definition_source=definition_source, values_source_list=[ settings.SOCORRO_IMPLEMENTATIONS_CONFIG, ] ) # The ReprocessingOneRabbitMQCrashStore crash storage, needs to have # a "logger" in the config object. To avoid having to use the # logger set up by configman as an aggregate, we just use the # same logger as we have here in the webapp. config.queuing.logger = logger config.priority.logger = logger config.data.logger = logger return config
def config_from_configman(): """Generate a configman DotDict to pass to configman components.""" definition_source = Namespace() definition_source.namespace('logging') definition_source.logging = App.required_config.logging definition_source.namespace('metricscfg') definition_source.metricscfg = App.required_config.metricscfg definition_source.namespace('elasticsearch') definition_source.elasticsearch.add_option( 'elasticsearch_class', default=ESConnectionContext, ) definition_source.namespace('queue') definition_source.add_option('crashqueue_class', default=PubSubCrashQueue) definition_source.namespace('crashdata') definition_source.crashdata.add_option( 'crash_data_class', default=SimplifiedCrashData, ) definition_source.namespace('telemetrydata') definition_source.telemetrydata.add_option( 'telemetry_data_class', default=TelemetryCrashData, ) return configuration(definition_source=definition_source, values_source_list=[ settings.SOCORRO_IMPLEMENTATIONS_CONFIG, ])
def config_from_configman(): """Generate a configman DotDict to pass to configman components.""" definition_source = Namespace() definition_source.namespace('logging') definition_source.logging = App.required_config.logging definition_source.namespace('metricscfg') definition_source.metricscfg = App.required_config.metricscfg definition_source.namespace('elasticsearch') definition_source.elasticsearch.add_option( 'elasticsearch_class', default=ESConnectionContext, ) definition_source.namespace('queue') definition_source.add_option( 'crashqueue_class', default=PubSubCrashQueue ) definition_source.namespace('crashdata') definition_source.crashdata.add_option( 'crash_data_class', default=SimplifiedCrashData, ) definition_source.namespace('telemetrydata') definition_source.telemetrydata.add_option( 'telemetry_data_class', default=TelemetryCrashData, ) return configuration( definition_source=definition_source, values_source_list=[ settings.SOCORRO_IMPLEMENTATIONS_CONFIG, ] )
def get_standard_config_manager( more_definitions=None, service_classes=None, overrides=None, ): # MOCKED CONFIG DONE HERE required_config = Namespace() required_config.add_option( 'logger', default=SilentFakeLogger(), doc='a logger', ) required_config.add_option('executor_identity', default=Mock()) if service_classes: required_config.namespace('services') if not isinstance(service_classes, Sequence): service_classes = (service_classes, ) for service_class in service_classes: # config for the services being tested service_name = service_class.__name__.split('.')[-1] required_config.services.namespace(service_name) # adding the service as if it had been put in via the # classes_in_namespaces converter defined in the dataservice # package. Configman will pull the services additional # requirements required_config.services[service_name].add_option( 'cls', default=service_class, from_string_converter=class_converter) if isinstance(more_definitions, Sequence): definitions = [required_config] definitions.extend(more_definitions) elif more_definitions is not None: definitions = [required_config, more_definitions] else: definitions = [required_config] local_overrides = [ environment, ] if isinstance(overrides, Sequence): overrides.extend(local_overrides) elif overrides is not None: overrides = [overrides] + local_overrides else: overrides = local_overrides config_manager = ConfigurationManager(definitions, values_source_list=overrides, app_name='ES tests', app_description=__doc__, argv_source=[]) # very useful debug #import contextlib #import sys #@contextlib.contextmanager #def stdout_opener(): #yield sys.stdout #config_manager.write_conf('conf', stdout_opener) return config_manager
#!/usr/bin/env python3 from configman import (configuration, Namespace) from configman.converters import (class_converter) from iostreams import (StdInStream, StdOutStream) from transforms import (PassThrough) required_config = Namespace() required_config.namespace("input") required_config.input.add_option(name="implementation", default=StdInStream, from_string_converter=class_converter) required_config.namespace("transform") required_config.transform.add_option(name="implementation", default=PassThrough, from_string_converter=class_converter) required_config.namespace("output") required_config.output.add_option(name="implementation", default=StdOutStream, from_string_converter=class_converter) config = configuration(required_config) in_stream = config.input.implementation(config.input) transform = config.transform.implementation(config.transform) out_stream = config.output.implementation(config.output) for message in in_stream: out_stream.send(transform(message))