def test_Processor2015_init(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{ 'rule_sets': rule_set_02_str }], ) config = cm.get_config() config.logger = Mock() p = Processor2015(config) ok_(isinstance(p.rule_system, DotDict)) eq_(len(p.rule_system), 2) ok_('ruleset01' in p.rule_system) print p.rule_system.ruleset01 ok_(isinstance(p.rule_system.ruleset01, TransformRuleSystem)) trs = p.rule_system.ruleset01 eq_(trs.act, trs.apply_all_rules) eq_(len(trs.rules), 2) ok_(isinstance(trs.rules[0], BitguardClassifier)) ok_(isinstance(trs.rules[1], OutOfDateClassifier)) ok_('ruleset02' in p.rule_system) ok_(isinstance(p.rule_system.ruleset02, TransformRuleSystem)) trs = p.rule_system.ruleset02 eq_(trs.act, trs.apply_until_action_succeeds) eq_(len(trs.rules), 2) ok_(isinstance(trs.rules[0], SetWindowPos)) ok_(isinstance(trs.rules[1], UpdateWindowAttributes))
def test_classes_in_namespaces_converter_5(self): n = Namespace() n.add_option( 'kls_list', default=('socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha'), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d')) cm = ConfigurationManager(n, [{ 'kls_list': ('socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Alpha'), 'Alpha_00.a': 21, 'Beta_01.b': 38, }]) config = cm.get_config() assert len(config.kls_list.subordinate_namespace_names) == 4 for i, (a_class_name, a_class, ns_name) in (enumerate(config.kls_list.class_list)): assert isinstance(a_class_name, str) assert a_class_name == a_class.__name__ assert ns_name == "%s_%02d" % (a_class_name, i)
def test_basic_02_change_all(self): option_definitions = self.setup_configman_namespace() cm = ConfigurationManager( definition_source=option_definitions, values_source_list=[command_line], argv_source=[ "16", "-b=THE SECOND", '--gamma="88 99 111 333"', "--delta", ], use_auto_help=False, ) config = cm.get_config() expected = { "alpha": 16, "beta": 'THE SECOND', "gamma": [88, 99, 111, 333], "delta": True, "admin.print_conf": None, "admin.dump_conf": '', "admin.strict": False, "admin.expose_secrets": False } for k in config.keys_breadth_first(): self.assertEqual(config[k], expected[k])
def test_for_mapping_nested_namespaces(self): n = self._some_namespaces() cm = ConfigurationManager( n, values_source_list=[], ) out = StringIO() cm.write_conf(for_mapping, opener=stringIO_context_wrapper(out)) received = out.getvalue() out.close() expected = """ # the a (default: '2011-05-04T15:10:00') aaa='2011-05-04T15:10:00' # your uncle (default: 98) c__dwight='98' # husband from Flintstones (default: 'stupid') c__fred='stupid' # wife from Flintstones (default: 'waspish') c__wilma='waspish' # my uncle (default: 97) c__e__dwight='97' # female neighbor from I Love Lucy (default: 'silly') d__ethel='silly' # male neighbor from I Love Lucy (default: 'crabby') d__fred='crabby' # the password (default: 'secret') x__password='******' # how big in tons (default: 100) x__size='100' """.strip() self.assertEqual(received.strip(), expected)
def test_basic_key_error_on_save_processed(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = PostgreSQLCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }], argv_source=[]) with config_manager.context() as config: crashstorage = PostgreSQLCrashStorage(config) database = crashstorage.database.return_value = mock.MagicMock() ok_(isinstance(database, mock.Mock)) broken_processed_crash = { "product": "Peter", "version": "1.0B3", "ooid": "abc123", "submitted_timestamp": time.time(), "unknown_field": 'whatever' } assert_raises(KeyError, crashstorage.save_processed, broken_processed_crash)
def test_classes_in_namespaces_converter_4(self): n = Namespace() n.add_option( 'kls_list', default=('collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Alpha'), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d')) cm = ConfigurationManager(n, [{ 'kls_list': ('collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Beta, ' 'collector.unittest.lib.test_converters.Beta, ' 'collector.unittest.lib.test_converters.Alpha'), 'Alpha_00.a': 21, 'Beta_01.b': 38, }]) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for x in config.kls_list.subordinate_namespace_names: self.assertTrue(x in config) self.assertEqual(config.Alpha_00.a, 21) self.assertEqual(config.Beta_01.b, 38)
def test_requested_processor_id(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = ProcessorAppRegistrationClient.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }]) m_registration = mock.Mock() class NoRegister(ProcessorAppRegistrationClient): _registration = m_registration with config_manager.context() as config: registrar = NoRegister(config) i = registrar._requested_processor_id(0) self.assertEqual(i, 0) i = registrar._requested_processor_id(1) self.assertEqual(1, i) i = registrar._requested_processor_id('host') self.assertEqual('host', i) i = registrar._requested_processor_id('auto') self.assertEqual('auto', i) self.assertRaises(ValueError, registrar._requested_processor_id, 'dwight')
def get_tuned_config(self, sources, extra_values=None): if not isinstance(sources, (list, tuple)): sources = [sources] mock_logging = mock.Mock() mock_metrics = mock.Mock() config_definitions = [] for source in sources: conf = source.get_required_config() conf.add_option('logger', default=mock_logging) conf.add_option('metrics', default=mock_metrics) config_definitions.append(conf) values_source = {'logger': mock_logging, 'metrics': mock_metrics} if extra_values: values_source.update(extra_values) config_manager = ConfigurationManager( config_definitions, app_name='testapp', app_version='1.0', app_description='Elasticsearch integration tests', values_source_list=[environment, values_source], argv_source=[], ) return config_manager.get_config()
def get_es_conn(): """Return an Elasticsearch ConnectionContext.""" cm = ConfigurationManager( ConnectionContext.get_required_config(), values_source_list=[environment] ) config = cm.get_config() return ConnectionContext(config)
def test_basic_crashstorage(self): required_config = Namespace() mock_logging = Mock() required_config.add_option('logger', default=mock_logging) required_config.update(CrashStorageBase.required_config) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, }], argv_source=[]) with config_manager.context() as config: crashstorage = CrashStorageBase( config, quit_check_callback=fake_quit_check) crashstorage.save_raw_crash({}, 'payload', 'ooid') crashstorage.save_processed({}) self.assertRaises(NotImplementedError, crashstorage.get_raw_crash, 'ooid') self.assertRaises(NotImplementedError, crashstorage.get_raw_dump, 'ooid') self.assertRaises(NotImplementedError, crashstorage.get_unredacted_processed, 'ooid') self.assertRaises(NotImplementedError, crashstorage.remove, 'ooid') self.assertEquals(crashstorage.new_crashes(), []) crashstorage.close()
def get_config_context(self, es_index=None): mock_logging = mock.Mock() storage_config = \ crashstorage.ElasticSearchCrashStorage.get_required_config() middleware_config = MiddlewareApp.get_required_config() middleware_config.add_option('logger', default=mock_logging) values_source = { 'logger': mock_logging, 'resource.elasticsearch.elasticsearch_default_index': 'socorro_integration_test', 'resource.elasticsearch.elasticsearch_index': 'socorro_integration_test', 'resource.elasticsearch.backoff_delays': [1], 'resource.elasticsearch.elasticsearch_timeout': 5, 'resource.postgresql.database_name': 'socorro_integration_test' } if es_index: values_source[ 'resource.elasticsearch.elasticsearch_index'] = es_index config_manager = ConfigurationManager( [storage_config, middleware_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[os.environ, values_source], argv_source=[], ) return config_manager.get_config()
def test_wrapper(self, metricsmock): fake_crash_store_class = mock.MagicMock() fake_crash_store_class.__name__ = 'Phil' config_manager = ConfigurationManager( [MetricsBenchmarkingWrapper.get_required_config()], values_source_list=[{ 'wrapped_object_class': fake_crash_store_class, 'metrics_prefix': 'phil', 'active_list': 'run', }], argv_source=[]) with config_manager.context() as config: mbw = MetricsBenchmarkingWrapper(config) with metricsmock as mm: mbw.run() mbw.walk() # Assert that the timing call occurred assert len(mm.get_records()) == 1 assert mm.has_record('timing', stat='phil.Phil.run') # Assert that the wrapped crash storage class .run() and .walk() were # called on the instance fake_crash_store_class.return_value.run.assert_called_with() fake_crash_store_class.return_value.walk.assert_called_with()
def get_config(cls, values_source=None): """Return config based on the required config of the cls. This uses environment configuration and configuration defaults and lets you override that by passing in a ``values_source``. :arg cls: a configman-enhanced class :arg values_source: dict of configurable overrides :returns: a configman config object """ values_source = values_source or {} conf = cls.get_required_config() conf.add_option("logger", default=mock.Mock()) conf.add_option("metrics", default=mock.Mock()) cm = ConfigurationManager( [conf], app_name="testapp", app_version="1.0", app_description="", values_source_list=[environment, values_source], argv_source=[], ) return cm.get_config()
def test_classes_in_namespaces_converter_4(self): n = Namespace() n.add_option( "kls_list", default="configman.tests.test_converters.Alpha, " "configman.tests.test_converters.Alpha, " "configman.tests.test_converters.Alpha", from_string_converter=converters.classes_in_namespaces_converter("kls%d", "kls", instantiate_classes=True), ) cm = ConfigurationManager( n, [ { "kls_list": "configman.tests.test_converters.Alpha, " "configman.tests.test_converters.Beta, " "configman.tests.test_converters.Beta, " "configman.tests.test_converters.Alpha" } ], ) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for x in config.kls_list.subordinate_namespace_names: self.assertTrue(x in config) self.assertTrue("kls_instance" in config[x]) self.assertTrue(isinstance(config[x].kls_instance, config[x].kls))
def test_with_transactional_resource(self): @ctm.with_transactional_resource( 'socorro.external.postgresql.connection_context.ConnectionContext', 'database' ) class Alpha(BaseCronApp): pass self.assertTrue self.assertTrue(hasattr(Alpha, "required_config")) alpha_required = Alpha.get_required_config() self.assertTrue(isinstance(alpha_required, Namespace)) self.assertTrue('database' in alpha_required) self.assertTrue('database_class' in alpha_required.database) self.assertTrue( 'database_transaction_executor_class' in alpha_required.database ) cm = ConfigurationManager( definition_source=[Alpha.get_required_config(), ], values_source_list=[], argv_source=[], ) config = cm.get_config() a = Alpha(config, mock.Mock()) self.assertTrue(hasattr(a, 'database_connection')) self.assertTrue(isinstance( a.database_connection, ConnectionContext )) self.assertTrue(hasattr(a, 'database_transaction')) self.assertTrue(isinstance( a.database_transaction, TransactionExecutor ))
def get_s3_context(): """Return an S3ConnectionContext.""" cm = ConfigurationManager( S3Connection.get_required_config(), values_source_list=[environment] ) config = cm.get_config() return S3Connection(config)
def test_with_transactional_resource(self): @ctm.with_transactional_resource( 'crontabber.connection_factory.ConnectionFactory', 'database') class Alpha(BaseCronApp): pass self.assertTrue ok_(hasattr(Alpha, "required_config")) alpha_required = Alpha.get_required_config() ok_(isinstance(alpha_required, Namespace)) ok_('database' in alpha_required) ok_('database_class' in alpha_required.database) ok_('database_transaction_executor_class' in alpha_required.database) cm = ConfigurationManager( definition_source=[ Alpha.get_required_config(), ], values_source_list=[], argv_source=[], ) config = cm.get_config() a = Alpha(config, mock.Mock()) ok_(hasattr(a, 'database_connection_factory')) ok_(isinstance(a.database_connection_factory, ConnectionFactory)) ok_(hasattr(a, 'database_transaction_executor')) ok_(isinstance(a.database_transaction_executor, TransactionExecutor))
def test_poly_crash_storage_processed_crash_immutability(self): n = Namespace() n.add_option( 'storage', default=PolyCrashStorage, ) n.add_option( 'logger', default=mock.Mock(), ) value = { 'storage_classes': ('socorro.unittest.external.test_crashstorage_base' '.MutatingProcessedCrashCrashStorage'), } cm = ConfigurationManager(n, values_source_list=[value]) with cm.context() as config: raw_crash = {'ooid': '12345'} dump = '12345' processed_crash = {'foo': 'bar'} poly_store = config.storage(config) poly_store.save_raw_and_processed(raw_crash, dump, processed_crash, 'n') # It's important to be aware that the only thing # MutatingProcessedCrashCrashStorage class does, in its # save_raw_and_processed() is that it deletes a key called # 'foo'. # This test makes sure that the dict processed_crash here # is NOT affected. eq_(processed_crash['foo'], 'bar')
def test_classes_in_namespaces_converter_5(self): n = Namespace() n.add_option( 'kls_list', default=('collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Alpha'), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d')) cm = ConfigurationManager(n, [{ 'kls_list': ('collector.unittest.lib.test_converters.Alpha, ' 'collector.unittest.lib.test_converters.Beta, ' 'collector.unittest.lib.test_converters.Beta, ' 'collector.unittest.lib.test_converters.Alpha'), 'Alpha_00.a': 21, 'Beta_01.b': 38, }]) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for i, (a_class_name, a_class, ns_name) in \ enumerate(config.kls_list.class_list): self.assertTrue(isinstance(a_class_name, str)) self.assertEqual(a_class_name, a_class.__name__) self.assertEqual(ns_name, "%s_%02d" % (a_class_name, i))
def test_polycrashstorage_processed_immutability_with_nonmutating(self): """Verifies if a crash storage says it doesn't mutate the class that " we don't do a deepcopy """ n = Namespace() n.add_option( 'storage', default=PolyCrashStorage, ) n.add_option( 'logger', default=mock.Mock(), ) value = { 'storage_classes': ('socorro.unittest.external.test_crashstorage_base' '.NonMutatingProcessedCrashCrashStorage'), } cm = ConfigurationManager(n, values_source_list=[value]) with cm.context() as config: raw_crash = {'ooid': '12345'} dump = '12345' processed_crash = {'foo': 'bar'} poly_store = config.storage(config) poly_store.save_raw_and_processed(raw_crash, dump, processed_crash, 'n') # We have a crashstorage that says it's not mutating, but deletes a # key so that we can verify that the code went down the right path # in the processor. assert 'foo' not in processed_crash
def test_basic_setup(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = ProcessorAppRegistrationClient.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }]) m_registration = mock.Mock() class NoRegister(ProcessorAppRegistrationClient): _registration = m_registration with config_manager.context() as config: registrar = NoRegister(config) self.assertEqual(registrar.last_checkin_ts, datetime(1999, 1, 1, tzinfo=UTC)) self.assertTrue(registrar.processor_id is None) self.assertEqual(registrar.processor_name, 'unknown') self.assertEqual(m_registration.call_count, 1)
def test_poly_crash_storage_immutability_deeper(self): n = Namespace() n.add_option( 'storage', default=PolyCrashStorage, ) n.add_option( 'logger', default=mock.Mock(), ) value = { 'storage_classes': ('socorro.unittest.external.test_crashstorage_base' '.MutatingProcessedCrashCrashStorage'), } cm = ConfigurationManager(n, values_source_list=[value]) with cm.context() as config: raw_crash = {'ooid': '12345'} dump = '12345' processed_crash = { 'foo': DotDict({'other': 'thing'}), 'bar': SocorroDotDict({'something': 'else'}), } poly_store = config.storage(config) poly_store.save_raw_and_processed(raw_crash, dump, processed_crash, 'n') eq_(processed_crash['foo']['other'], 'thing') eq_(processed_crash['bar']['something'], 'else')
def test_for_mapping_long_doc_in_write_conf(self): n = self._some_namespaces() n = Namespace(doc='top') n.add_option( 'aaa', 'Default Value Goes In Here', 'This time the documentation string is really long. So long ' 'that we have to write it on multiple lines.', ) cm = ConfigurationManager( n, values_source_list=[], ) out = StringIO() cm.write_conf(for_mapping, opener=stringIO_context_wrapper(out)) received = out.getvalue() out.close() for line in received.splitlines(): self.assertTrue(len(line) < 80, line) expected = """ # This time the documentation string is really long. So long that we have to # write it on multiple lines. (default: 'Default Value Goes In Here') aaa='Default Value Goes In Here' """.strip() self.assertEqual(received.strip(), expected)
def test_basic_usage_with_postgres_with_backoff(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', default=TransactionExecutorWithInfiniteBackoff, #default=TransactionExecutor, doc='a class that will execute transactions') required_config.add_option('database_class', default=MockConnectionFactory, from_string_converter=class_converter) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[environment], argv_source=[]) with config_manager.context() as config: mocked_context = config.database_class(config) executor = config.transaction_executor_class( config, mocked_context) _function_calls = [] # some mutable def mock_function(connection): assert isinstance(connection, MockConnection) _function_calls.append(connection) executor(mock_function) ok_(_function_calls) eq_(commit_count, 1) eq_(rollback_count, 0)
def test_classes_in_namespaces_converter_4(self): n = Namespace() n.add_option( 'kls_list', default=('socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha'), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d')) cm = ConfigurationManager(n, [{ 'kls_list': ('socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Alpha'), 'Alpha_00.a': 21, 'Beta_01.b': 38, }]) config = cm.get_config() assert len(config.kls_list.subordinate_namespace_names) == 4 for x in config.kls_list.subordinate_namespace_names: assert x in config assert config.Alpha_00.a == 21 assert config.Beta_01.b == 38
def test_operation_error_with_postgres_with_backoff_with_rollback(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', default=TransactionExecutorWithInfiniteBackoff, #default=TransactionExecutor, doc='a class that will execute transactions') required_config.add_option('database_class', default=MockConnectionContext, from_string_converter=class_converter) mock_logging = MockLogging() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'backoff_delays': [2, 4, 6, 10, 15] }], ) with config_manager.context() as config: mocked_context = config.database_class(config) executor = config.transaction_executor_class( config, mocked_context) _function_calls = [] # some mutable _sleep_count = [] def mock_function(connection): assert isinstance(connection, MockConnection) connection.transaction_status = \ psycopg2.extensions.TRANSACTION_STATUS_INTRANS _function_calls.append(connection) # the default sleep times are going to be, # 2, 4, 6, 10, 15 # so after 2 + 4 + 6 + 10 + 15 seconds # all will be exhausted if sum(_sleep_count) < sum([2, 4, 6, 10, 15]): raise psycopg2.OperationalError('Arh!') def mock_sleep(n): _sleep_count.append(n) # monkey patch the sleep function from inside transaction_executor _orig_sleep = socorro.database.transaction_executor.time.sleep socorro.database.transaction_executor.time.sleep = mock_sleep try: executor(mock_function) self.assertTrue(_function_calls) self.assertEqual(commit_count, 1) self.assertEqual(rollback_count, 5) self.assertTrue(mock_logging.criticals) self.assertEqual(len(mock_logging.criticals), 5) self.assertTrue(len(_sleep_count) > 10) finally: socorro.database.transaction_executor.time.sleep = _orig_sleep
def test_write_with_imported_module_with_regex(self): required_config = Namespace() required_config.add_option( 'identifier', doc='just an identifier re', default=r'[a-zA-Z][a-zA-Z0-9]*', from_string_converter=re.compile ) cm = ConfigurationManager( required_config, values_source_list=[], ) config = cm.get_config() s = StringIO() @contextlib.contextmanager def s_opener(): yield s cm.write_conf('py', s_opener) generated_python_module_text = s.getvalue() expected = """# generated Python configman file # just an identifier re identifier = "[a-zA-Z][a-zA-Z0-9]*" """ self.assertEqual(generated_python_module_text, expected)
def test_basic_usage_with_postgres(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', #default=TransactionExecutorWithBackoff, default=TransactionExecutor, doc='a class that will execute transactions') required_config.add_option('database_class', default=MockConnectionContext, from_string_converter=class_converter) mock_logging = MockLogging() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[], ) with config_manager.context() as config: mocked_context = config.database_class(config) executor = config.transaction_executor_class( config, mocked_context) _function_calls = [] # some mutable def mock_function(connection): assert isinstance(connection, MockConnection) _function_calls.append(connection) executor(mock_function) self.assertTrue(_function_calls) self.assertEqual(commit_count, 1) self.assertEqual(rollback_count, 0)
def test_basic_03_with_some_admin(self): option_definitions = self.setup_configman_namespace() cm = ConfigurationManager( definition_source=option_definitions, values_source_list=[command_line], argv_source=[ "0", "--admin.expose_secrets", '--gamma="-1 -2 -3 -4 -5 -6"', "--delta", "--admin.strict", ], use_auto_help=False, ) config = cm.get_config() expected = { "alpha": 0, "beta": 'the second', "gamma": [-1, -2, -3, -4, -5, -6], "delta": True, "admin.print_conf": None, "admin.dump_conf": '', "admin.strict": True, "admin.expose_secrets": True } for k in config.keys_breadth_first(): self.assertEqual(config[k], expected[k])
def test_wrapper(self, metricsmock): fake_crash_store_class = mock.MagicMock() fake_crash_store_class.__name__ = 'Phil' config_manager = ConfigurationManager( [MetricsBenchmarkingWrapper.get_required_config()], values_source_list=[{ 'wrapped_object_class': fake_crash_store_class, 'metrics_prefix': 'phil', 'active_list': 'run', }], argv_source=[] ) with config_manager.context() as config: mbw = MetricsBenchmarkingWrapper(config) with metricsmock as mm: mbw.run() mbw.walk() # Assert that the timing call occurred assert len(mm.get_records()) == 1 assert mm.has_record('timing', stat='phil.Phil.run') # Assert that the wrapped crash storage class .run() and .walk() were # called on the instance fake_crash_store_class.return_value.run.assert_called_with() fake_crash_store_class.return_value.walk.assert_called_with()
def test_no_rollback_exception_with_postgres(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', default=TransactionExecutor, doc='a class that will execute transactions' ) mock_logging = MockLogging() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'database_class': MockConnectionContext}], ) with config_manager.context() as config: executor = config.transaction_executor_class(config) def mock_function(connection): assert isinstance(connection, MockConnection) raise NameError('crap!') self.assertRaises(NameError, executor, mock_function) self.assertEqual(commit_count, 0) self.assertEqual(rollback_count, 0) self.assertTrue(mock_logging.errors)
def test_basic_05_argparse_overrides_when_appropriate(self): option_definitions = self.setup_configman_namespace() other_value_source = { "gamma": [38, 28, 18, 8] } cm = ConfigurationManager( definition_source=option_definitions, values_source_list=[other_value_source, command_line], argv_source=[ "0", "--admin.expose_secrets", "--delta", "--admin.strict", '--gamma="8 18 28 38"', ], use_auto_help=False, ) config = cm.get_config() expected = { "alpha": 0, "beta": 'the second', "gamma": [8, 18, 28, 38], "delta": True, "admin.print_conf": None, "admin.dump_conf": '', "admin.strict": True, "admin.expose_secrets": True } for k in config.keys_breadth_first(): self.assertEqual(config[k], expected[k])
def test_operation_error_with_postgres_with_backoff(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', default=TransactionExecutorWithInfiniteBackoff, #default=TransactionExecutor, doc='a class that will execute transactions' ) required_config.add_option( 'database_class', default=MockConnectionContext, from_string_converter=class_converter ) mock_logging = MockLogging() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{'backoff_delays': [2, 4, 6, 10, 15]}], argv_source=[] ) with config_manager.context() as config: mocked_context = config.database_class(config) executor = config.transaction_executor_class(config, mocked_context) _function_calls = [] # some mutable _sleep_count = [] def mock_function(connection): assert isinstance(connection, MockConnection) _function_calls.append(connection) # the default sleep times are going to be, # 2, 4, 6, 10, 15 # so after 2 + 4 + 6 + 10 + 15 seconds # all will be exhausted if sum(_sleep_count) < sum([2, 4, 6, 10, 15]): raise psycopg2.OperationalError('Arh!') def mock_sleep(n): _sleep_count.append(n) # monkey patch the sleep function from inside transaction_executor _orig_sleep = socorro.database.transaction_executor.time.sleep socorro.database.transaction_executor.time.sleep = mock_sleep try: executor(mock_function) ok_(_function_calls) eq_(commit_count, 1) eq_(rollback_count, 5) ok_(mock_logging.criticals) eq_(len(mock_logging.criticals), 5) ok_(len(_sleep_count) > 10) finally: socorro.database.transaction_executor.time.sleep = _orig_sleep
def test_convert_raw_crash_to_processed_crash_no_rules(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{'rule_sets': '[]'}], ) config = cm.get_config() config.logger = Mock() config.processor_name = 'dwight' p = Processor2015(config) raw_crash = DotDict() raw_dumps = {} with patch('socorro.processor.processor_2015.utc_now') as faked_utcnow: faked_utcnow.return_value = '2015-01-01T00:00:00' processed_crash = p.convert_raw_crash_to_processed_crash( raw_crash, raw_dumps ) ok_(processed_crash.success) eq_(processed_crash.started_datetime, '2015-01-01T00:00:00') eq_(processed_crash.startedDateTime, '2015-01-01T00:00:00') eq_(processed_crash.completed_datetime, '2015-01-01T00:00:00') eq_(processed_crash.completeddatetime, '2015-01-01T00:00:00') eq_(processed_crash.processor_notes, 'dwight; Processor2015')
def test_basic_crashstorage(self): required_config = Namespace() mock_logging = Mock() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, }] ) with config_manager.context() as config: crashstorage = CrashStorageBase( config, quit_check_callback=fake_quit_check ) crashstorage.save_raw_crash({}, 'payload', 'ooid') crashstorage.save_processed({}) self.assertRaises(NotImplementedError, crashstorage.get_raw_crash, 'ooid') self.assertRaises(NotImplementedError, crashstorage.get_raw_dump, 'ooid') self.assertRaises(NotImplementedError, crashstorage.get_processed, 'ooid') self.assertRaises(NotImplementedError, crashstorage.remove, 'ooid') self.assertRaises(StopIteration, crashstorage.new_crashes) crashstorage.close()
def test_Processor2015_init(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{'rule_sets': rule_set_02_str}], ) config = cm.get_config() config.logger = Mock() p = Processor2015(config) ok_(isinstance(p.rule_system, DotDict)) eq_(len(p.rule_system), 2) ok_('ruleset01' in p.rule_system) print p.rule_system.ruleset01 ok_(isinstance(p.rule_system.ruleset01, TransformRuleSystem)) trs = p.rule_system.ruleset01 eq_(trs.act, trs.apply_all_rules) eq_(len(trs.rules), 2) ok_(isinstance(trs.rules[0], BitguardClassifier)) ok_(isinstance(trs.rules[1], OutOfDateClassifier)) ok_('ruleset02' in p.rule_system) ok_(isinstance(p.rule_system.ruleset02, TransformRuleSystem)) trs = p.rule_system.ruleset02 eq_(trs.act, trs.apply_until_action_succeeds) eq_(len(trs.rules), 2) ok_(isinstance(trs.rules[0], SetWindowPos)) ok_(isinstance(trs.rules[1], UpdateWindowAttributes))
def test_basic_crashstorage(self): required_config = Namespace() mock_logging = Mock() required_config.add_option("logger", default=mock_logging) required_config.update(CrashStorageBase.required_config) config_manager = ConfigurationManager( [required_config], app_name="testapp", app_version="1.0", app_description="app description", values_source_list=[{"logger": mock_logging}], argv_source=[], ) with config_manager.context() as config: crashstorage = CrashStorageBase(config, quit_check_callback=fake_quit_check) crashstorage.save_raw_crash({}, "payload", "ooid") crashstorage.save_processed({}) assert_raises(NotImplementedError, crashstorage.get_raw_crash, "ooid") assert_raises(NotImplementedError, crashstorage.get_raw_dump, "ooid") assert_raises(NotImplementedError, crashstorage.get_unredacted_processed, "ooid") assert_raises(NotImplementedError, crashstorage.remove, "ooid") eq_(crashstorage.new_crashes(), []) crashstorage.close()
def test_with_transactional_resource(self): @ctm.with_transactional_resource( 'crontabber.connection_factory.ConnectionFactory', 'database' ) class Alpha(BaseCronApp): pass self.assertTrue ok_(hasattr(Alpha, "required_config")) alpha_required = Alpha.get_required_config() ok_(isinstance(alpha_required, Namespace)) ok_('database' in alpha_required) ok_('database_class' in alpha_required.database) ok_( 'database_transaction_executor_class' in alpha_required.database ) cm = ConfigurationManager( definition_source=[Alpha.get_required_config(), ], values_source_list=[environment], argv_source=[], ) config = cm.get_config() a = Alpha(config, mock.Mock()) ok_(hasattr(a, 'database_connection_factory')) ok_(isinstance(a.database_connection_factory, ConnectionFactory)) ok_(hasattr(a, 'database_transaction_executor')) ok_(isinstance(a.database_transaction_executor, TransactionExecutor))
def test_requested_processor_id(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = ProcessorAppRegistrationClient.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }] ) m_registration = mock.Mock() class NoRegister(ProcessorAppRegistrationClient): _registration = m_registration with config_manager.context() as config: registrar = NoRegister(config) i = registrar._requested_processor_id(0) self.assertEqual(i, 0) i = registrar._requested_processor_id(1) self.assertEqual(1, i) i = registrar._requested_processor_id('host') self.assertEqual('host', i) i = registrar._requested_processor_id('auto') self.assertEqual('auto', i) self.assertRaises(ValueError, registrar._requested_processor_id, 'dwight')
def test_failure_limited_retry(self, pyes_mock): mock_logging = mock.Mock() mock_es = mock.Mock() pyes_mock.ElasticSearch.return_value = mock_es required_config = ElasticSearchCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'elasticsearch_urls': 'http://elasticsearch_host:9200', 'timeout': 0, 'backoff_delays': [0, 0, 0], 'transaction_executor_class': TransactionExecutorWithLimitedBackoff }], argv_source=[] ) with config_manager.context() as config: es_storage = ElasticSearchCrashStorage(config) failure_exception = pyelasticsearch.exceptions.Timeout mock_es.index.side_effect = failure_exception crash_id = a_processed_crash['uuid'] assert_raises( pyelasticsearch.exceptions.Timeout, es_storage.save_raw_and_processed, a_raw_crash, None, a_processed_crash.copy(), crash_id, ) expected_crash = { 'crash_id': crash_id, 'processed_crash': a_processed_crash.copy(), 'raw_crash': a_raw_crash } expected_request_args = ( 'socorro201214', 'crash_reports', expected_crash ) expected_request_kwargs = { 'id': crash_id, } mock_es.index.assert_called_with( *expected_request_args, **expected_request_kwargs )
def test_process_crash_existing_processed_crash(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{'rule_sets': '[]'}], ) config = cm.get_config() config.logger = Mock() config.processor_name = 'dwight' p = Processor2015(config) raw_crash = DotDict() raw_dumps = {} processed_crash = DotDict() processed_crash.processor_notes = "we've been here before; yep" processed_crash.started_datetime = '2014-01-01T00:00:00' with patch('socorro.processor.processor_2015.utc_now') as faked_utcnow: faked_utcnow.return_value = '2015-01-01T00:00:00' processed_crash = p.process_crash( raw_crash, raw_dumps, processed_crash ) ok_(processed_crash.success) eq_(processed_crash.started_datetime, '2015-01-01T00:00:00') eq_(processed_crash.startedDateTime, '2015-01-01T00:00:00') eq_(processed_crash.completed_datetime, '2015-01-01T00:00:00') eq_(processed_crash.completeddatetime, '2015-01-01T00:00:00') eq_( processed_crash.processor_notes, "dwight; Processor2015; earlier processing: 2014-01-01T00:00:00; " "we've been here before; yep" )
def get_tuned_config(self, sources, extra_values=None): if not isinstance(sources, (list, tuple)): sources = [sources] mock_logging = mock.Mock() config_definitions = [] for source in sources: conf = source.get_required_config() conf.add_option('logger', default=mock_logging) config_definitions.append(conf) values_source = DEFAULT_VALUES.copy() values_source.update({'logger': mock_logging}) if extra_values: values_source.update(extra_values) config_manager = ConfigurationManager( config_definitions, app_name='testapp', app_version='1.0', app_description='Elasticsearch integration tests', values_source_list=[environment, values_source], argv_source=[], ) return config_manager.get_config()
def test_classes_in_namespaces_converter_4(self): n = Namespace() n.add_option('kls_list', default='configman.tests.test_converters.Alpha, ' 'configman.tests.test_converters.Alpha, ' 'configman.tests.test_converters.Alpha', from_string_converter= converters.classes_in_namespaces_converter( 'kls%d', 'kls', instantiate_classes=True)) cm = ConfigurationManager( n, [{'kls_list':'configman.tests.test_converters.Alpha, ' 'configman.tests.test_converters.Beta, ' 'configman.tests.test_converters.Beta, ' 'configman.tests.test_converters.Alpha'}]) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for x in config.kls_list.subordinate_namespace_names: self.assertTrue(x in config) self.assertTrue('kls_instance' in config[x]) self.assertTrue(isinstance(config[x].kls_instance, config[x].kls))
def test_basic_usage_with_postgres(self): required_config = Namespace() required_config.add_option( 'transaction_executor_class', #default=TransactionExecutorWithBackoff, default=TransactionExecutor, doc='a class that will execute transactions' ) required_config.add_option( 'database_class', default=MockConnectionContext, from_string_converter=class_converter ) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[], ) with config_manager.context() as config: mocked_context = config.database_class(config) executor = config.transaction_executor_class(config, mocked_context) _function_calls = [] # some mutable def mock_function(connection): assert isinstance(connection, MockConnection) _function_calls.append(connection) executor(mock_function) self.assertTrue(_function_calls) self.assertEqual(commit_count, 1) self.assertEqual(rollback_count, 0)
def get_config_context(self, es_index=None): mock_logging = mock.Mock() storage_config = \ crashstorage.ElasticSearchCrashStorage.get_required_config() middleware_config = MiddlewareApp.get_required_config() middleware_config.add_option('logger', default=mock_logging) values_source = { 'logger': mock_logging, 'elasticsearch_index': 'socorro_integration_test', 'backoff_delays': [1], 'elasticsearch_timeout': 5, } if es_index: values_source['elasticsearch_index'] = es_index config_manager = ConfigurationManager( [storage_config, middleware_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[os.environ, values_source], argv_source=[], ) with config_manager.context() as config: # This is an ugly hack to compensate for a bug in configman. # See https://github.com/mozilla/configman/issues/103 config.backoff_delays = [1] return config
def get_config_context(self, es_index=None): mock_logging = mock.Mock() storage_config = \ crashstorage.ElasticSearchCrashStorage.get_required_config() middleware_config = MiddlewareApp.get_required_config() middleware_config.add_option('logger', default=mock_logging) values_source = { 'logger': mock_logging, 'resource.elasticsearch.elasticsearch_default_index': 'socorro_integration_test', 'resource.elasticsearch.elasticsearch_index': 'socorro_integration_test_reports', 'resource.elasticsearch.backoff_delays': [1], 'resource.elasticsearch.elasticsearch_timeout': 10, } if es_index: values_source['resource.elasticsearch.elasticsearch_index'] = es_index config_manager = ConfigurationManager( [storage_config, middleware_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[os.environ, values_source], argv_source=[], ) return config_manager.get_config()
def test_basic_postgres_save_processed_success_3_truncations(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = PostgreSQLCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }], argv_source=[] ) with config_manager.context() as config: crashstorage = PostgreSQLCrashStorage(config) with mock.patch( 'socorro.external.postgresql.crashstorage.single_value_sql' ) as mocked_sql_execute: fake_connection = mock.Mock(), crashstorage._save_processed_report( fake_connection, a_processed_crash_with_everything_too_long ) mocked_sql_execute.assert_called_with( fake_connection, "\n WITH\n update_report AS (\n UPDATE reports_20120402 SET\n addons_checked = %s, address = %s, app_notes = %s, build = %s, client_crash_date = %s, completed_datetime = %s, cpu_info = %s, cpu_name = %s, date_processed = %s, distributor = %s, distributor_version = %s, email = %s, exploitability = %s, flash_version = %s, hangid = %s, install_age = %s, last_crash = %s, os_name = %s, os_version = %s, processor_notes = %s, process_type = %s, product = %s, productid = %s, reason = %s, release_channel = %s, signature = %s, started_datetime = %s, success = %s, topmost_filenames = %s, truncated = %s, uptime = %s, user_comments = %s, user_id = %s, url = %s, uuid = %s, version = %s\n WHERE uuid = %s\n RETURNING id\n ),\n insert_report AS (\n INSERT INTO reports_20120402 (addons_checked, address, app_notes, build, client_crash_date, completed_datetime, cpu_info, cpu_name, date_processed, distributor, distributor_version, email, exploitability, flash_version, hangid, install_age, last_crash, os_name, os_version, processor_notes, process_type, product, productid, reason, release_channel, signature, started_datetime, success, topmost_filenames, truncated, uptime, user_comments, user_id, url, uuid, version)\n ( SELECT\n %s as addons_checked, %s as address, %s as app_notes, %s as build, %s as client_crash_date, %s as completed_datetime, %s as cpu_info, %s as cpu_name, %s as date_processed, %s as distributor, %s as distributor_version, %s as email, %s as exploitability, %s as flash_version, %s as hangid, %s as install_age, %s as last_crash, %s as os_name, %s as os_version, %s as processor_notes, %s as process_type, %s as product, %s as productid, %s as reason, %s as release_channel, %s as signature, %s as started_datetime, %s as success, %s as topmost_filenames, %s as truncated, %s as uptime, %s as user_comments, %s as user_id, %s as url, %s as uuid, %s as version\n WHERE NOT EXISTS (\n SELECT uuid from reports_20120402\n WHERE\n uuid = %s\n LIMIT 1\n )\n )\n RETURNING id\n )\n SELECT * from update_report\n UNION ALL\n SELECT * from insert_report\n ", a_processed_report_with_everything_truncated * 2 )
def test_classes_in_namespaces_converter_4(self): n = Namespace() n.add_option( 'kls_list', default=( 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha' ), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d' ) ) cm = ConfigurationManager( n, [{ 'kls_list': ( 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Alpha' ), 'Alpha_00.a': 21, 'Beta_01.b': 38, }] ) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for x in config.kls_list.subordinate_namespace_names: self.assertTrue(x in config) self.assertEqual(config.Alpha_00.a, 21) self.assertEqual(config.Beta_01.b, 38)
def test_classes_in_namespaces_converter_5(self): n = Namespace() n.add_option( 'kls_list', default=( 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Alpha' ), from_string_converter=str_to_classes_in_namespaces_converter( '%(name)s_%(index)02d' ) ) cm = ConfigurationManager( n, [{ 'kls_list': ( 'socorro.unittest.lib.test_converters.Alpha, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Beta, ' 'socorro.unittest.lib.test_converters.Alpha' ), 'Alpha_00.a': 21, 'Beta_01.b': 38, }] ) config = cm.get_config() self.assertEqual(len(config.kls_list.subordinate_namespace_names), 4) for i, (a_class_name, a_class, ns_name) in \ enumerate(config.kls_list.class_list): self.assertTrue(isinstance(a_class_name, str)) self.assertEqual(a_class_name, a_class.__name__) self.assertEqual(ns_name, "%s_%02d" % (a_class_name, i))
def test_basic_key_error_on_save_processed(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = PostgreSQLCrashStorage.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }] ) with config_manager.context() as config: crashstorage = PostgreSQLCrashStorage(config) database = crashstorage.database.return_value = mock.MagicMock() self.assertTrue(isinstance(database, mock.Mock)) broken_processed_crash = { "product": "Peter", "version": "1.0B3", "ooid": "abc123", "submitted_timestamp": time.time(), "unknown_field": 'whatever' } self.assertRaises(KeyError, crashstorage.save_processed, broken_processed_crash)
def test_poly_crash_storage_immutability_deeper(self): n = Namespace() n.add_option( 'storage', default=PolyCrashStorage, ) n.add_option( 'logger', default=mock.Mock(), ) value = { 'storage_classes': ( 'socorro.unittest.external.test_crashstorage_base' '.MutatingProcessedCrashCrashStorage' ), } cm = ConfigurationManager(n, values_source_list=[value]) with cm.context() as config: raw_crash = {'ooid': '12345'} dump = '12345' processed_crash = { 'foo': DotDict({'other': 'thing'}), 'bar': SocorroDotDict({'something': 'else'}), } poly_store = config.storage(config) poly_store.save_raw_and_processed( raw_crash, dump, processed_crash, 'n' ) eq_(processed_crash['foo']['other'], 'thing') eq_(processed_crash['bar']['something'], 'else')
def test_basic_setup(self): mock_logging = mock.Mock() mock_postgres = mock.Mock() required_config = ProcessorAppRegistrationClient.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'database_class': mock_postgres }] ) m_registration = mock.Mock() class NoRegister(ProcessorAppRegistrationClient): _registration = m_registration with config_manager.context() as config: registrar = NoRegister(config) self.assertEqual(registrar.last_checkin_ts, datetime(1999, 1, 1, tzinfo=UTC)) self.assertTrue(registrar.processor_id is None) self.assertEqual(registrar.processor_name, 'unknown') self.assertEqual(m_registration.call_count, 1)
def test_convert_raw_crash_to_processed_crash_no_rules(self): cm = ConfigurationManager( definition_source=Processor2015.get_required_config(), values_source_list=[{ 'rule_sets': '[]' }], ) config = cm.get_config() config.logger = Mock() config.processor_name = 'dwight' p = Processor2015(config) raw_crash = DotDict() raw_dumps = {} with patch('socorro.processor.processor_2015.utc_now') as faked_utcnow: faked_utcnow.return_value = '2015-01-01T00:00:00' processed_crash = p.convert_raw_crash_to_processed_crash( raw_crash, raw_dumps) ok_(processed_crash.success) eq_(processed_crash.started_datetime, '2015-01-01T00:00:00') eq_(processed_crash.startedDateTime, '2015-01-01T00:00:00') eq_(processed_crash.completed_datetime, '2015-01-01T00:00:00') eq_(processed_crash.completeddatetime, '2015-01-01T00:00:00') eq_(processed_crash.processor_notes, 'dwight; Processor2015')
def main(app_object=None): if isinstance(app_object, basestring): app_object = class_converter(app_object) # the only config parameter is a special one that refers to a class or # module that defines an application. In order to qualify, a class must # have a constructor that accepts a DotDict derivative as the sole # input parameter. It must also have a 'main' function that accepts no # parameters. For a module to be acceptable, it must have a main # function that accepts a DotDict derivative as its input parameter. app_definition = Namespace() app_definition.admin = admin = Namespace() admin.add_option('application', doc='the fully qualified module or class of the ' 'application', default=app_object, from_string_converter=class_converter) app_name = getattr(app_object, 'app_name', 'unknown') app_version = getattr(app_object, 'app_version', '0.0') app_description = getattr(app_object, 'app_description', 'no idea') # create an iterable collection of value sources # the order is important as these will supply values for the sources # defined in the_definition_source. The values will be overlain in turn. # First the os.environ values will be applied. Then any values from an ini # file parsed by getopt. Finally any values supplied on the command line # will be applied. value_sources = ( ConfigFileFutureProxy, # alias for allowing the user # to specify a config file on # the command line environment, # alias for os.environ command_line) # alias for getopt # set up the manager with the definitions and values # it isn't necessary to provide the app_name because the # app_object passed in or loaded by the ConfigurationManager will alredy # have that information. config_manager = ConfigurationManager( app_definition, value_sources, app_name=app_name, app_version=app_version, app_description=app_description, ) config = config_manager.get_config() app_object = config.admin.application if isinstance(app_object, type): # invocation of the app if the app_object was a class instance = app_object(config) instance.main() elif inspect.ismodule(app_object): # invocation of the app if the app_object was a module app_object.main(config) elif inspect.isfunction(app_object): # invocation of the app if the app_object was a function app_object(config)
def get_standard_config(cls): config_manager = ConfigurationManager([cls.required_config], app_name='PostgreSQLTestCase', app_description=__doc__, argv_source=[]) with config_manager.context() as config: return config
def get_conn(): # Create a configuration manager that will only check the environment for # configuration and not command line parameters cm = ConfigurationManager(ConnectionContext.get_required_config(), values_source_list=[environment]) config = cm.get_config() return ConnectionContext(config)
def get_standard_config(self): config_manager = ConfigurationManager([self.required_config], app_name='ElasticSearchTestCase', app_description=__doc__, argv_source=[]) with config_manager.context() as config: return config
def test_setup_definitions_2(self): d = {'cls': MooseBase} cm = ConfigurationManager(d, values_source_list=[]) c = cm.get_config() self.assertTrue( cm.option_definitions.cls.from_string_converter is class_converter) self.assertTrue(c.cls is MooseBase)
def test_success_after_limited_retry(self): mock_logging = mock.Mock() required_config = ElasticSearchCrashStorage.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'submission_url': 'http://elasticsearch_host/%s', 'timeout': 0, 'backoff_delays': [0, 0, 0], 'transaction_executor_class': TransactionExecutorWithLimitedBackoff }]) with config_manager.context() as config: es_storage = ElasticSearchCrashStorage(config) urllib_str = 'socorro.external.elasticsearch.crashstorage.urllib2' m_request = mock.Mock() m_urlopen = mock.Mock() with mock.patch(urllib_str) as mocked_urllib: mocked_urllib.Request = m_request m_request.return_value = 17 mocked_urllib.urlopen = m_urlopen urlopen_results = [ urllib2.socket.timeout, urllib2.socket.timeout ] def urlopen_fn(*args, **kwargs): try: r = urlopen_results.pop(0) raise r except IndexError: return m_urlopen m_urlopen.side_effect = urlopen_fn es_storage.save_processed(a_processed_crash) expected_request_args = ( 'http://elasticsearch_host/9120408936ce666-ff3b-4c7a-9674-' '367fe2120408', {}, ) m_request.assert_called_with(*expected_request_args) self.assertEqual(m_urlopen.call_count, 3) expected_urlopen_args = (17, ) expected_urlopen_kwargs = {'timeout': 0} m_urlopen.assert_called_with(*expected_urlopen_args, **expected_urlopen_kwargs)
def test_failure_limited_retry(self, pyes_mock): mock_logging = mock.Mock() mock_es = mock.Mock() pyes_mock.ElasticSearch.return_value = mock_es required_config = ElasticSearchCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'elasticsearch_urls': 'http://elasticsearch_host:9200', 'timeout': 0, 'backoff_delays': [0, 0, 0], 'transaction_executor_class': TransactionExecutorWithLimitedBackoff }], argv_source=[]) with config_manager.context() as config: es_storage = ElasticSearchCrashStorage(config) failure_exception = pyelasticsearch.exceptions.Timeout mock_es.index.side_effect = failure_exception crash_id = a_processed_crash['uuid'] self.assertRaises( pyelasticsearch.exceptions.Timeout, es_storage.save_raw_and_processed, a_raw_crash, None, a_processed_crash.copy(), crash_id, ) expected_crash = { 'crash_id': crash_id, 'processed_crash': a_processed_crash.copy(), 'raw_crash': a_raw_crash } expected_request_args = ('socorro201214', 'crash_reports', expected_crash) expected_request_kwargs = { 'replication': 'async', 'id': crash_id, } mock_es.index.assert_called_with(*expected_request_args, **expected_request_kwargs)