def get_dummy_context(self): """Create a dummy config object to use when testing.""" context = util.DotDict() context.database = util.DotDict({ 'database_host': 'somewhere', 'database_port': '8888', 'database_name': 'somename', 'database_user': '******', 'database_password': '******', }) context.webapi = util.DotDict() context.webapi.platforms = ( { "id": "windows", "name": "Windows NT" }, { "id": "linux", "name": "Linux" }, { "id": "mac", "name": "Mac OS X" } ) return context
def testCrashStorageSystemForHBase___init__(): d = util.DotDict() j = util.DotDict() d.hbaseHost = 'fred' d.hbasePort = 'ethel' d.hbaseTimeout = 9000 j.root = d.hbaseFallbackFS = '.' d.throttleConditions = [] j.maxDirectoryEntries = d.hbaseFallbackDumpDirCount = 1000000 j.jsonSuffix = d.jsonFileSuffix = '.json' j.dumpSuffix = d.dumpFileSuffix = '.dump' j.dumpGID = d.hbaseFallbackdumpGID = 666 j.dumpPermissions = d.hbaseFallbackDumpPermissions = 660 j.dirPermissions = d.hbaseFallbackDirPermissions = 770 j.logger = d.logger = util.SilentFakeLogger() fakeHbaseConnection = exp.DummyObjectWithExpectations( 'fakeHbaseConnection') fakeHbaseModule = exp.DummyObjectWithExpectations('fakeHbaseModule') fakeHbaseModule.expect('HBaseConnectionForCrashReports', (d.hbaseHost, d.hbasePort, d.hbaseTimeout), {"logger": d.logger}, fakeHbaseConnection, None) fakeHbaseConnection.expect('hbaseThriftExceptions', None, None, (), None) fakeHbaseModule.expect('NoConnectionException', None, None, hbc.NoConnectionException, None) fakeJsonDumpStore = exp.DummyObjectWithExpectations('fakeJsonDumpStore') fakeJsonDumpModule = exp.DummyObjectWithExpectations('fakeJsonDumpModule') fakeJsonDumpModule.expect('JsonDumpStorage', (), j, fakeJsonDumpStore, None) css = cstore.CrashStorageSystemForHBase(d, configPrefix='', hbaseClient=fakeHbaseModule, jsonDumpStorage=fakeJsonDumpModule) assert css.hbaseConnection == fakeHbaseConnection
def testCrashStorageForDualHbaseCrashStorageSystem01(): d = util.DotDict() j = util.DotDict() d.hbaseHost = 'fred' d.secondaryHbaseHost = 'barney' d.hbasePort = 'ethel' d.secondaryHbasePort = 'betty' d.hbaseTimeout = 3000 d.secondaryHbaseTimeout = 10000 j.root = d.hbaseFallbackFS = '.' d.throttleConditions = [] j.maxDirectoryEntries = d.hbaseFallbackDumpDirCount = 1000000 j.jsonSuffix = d.jsonFileSuffix = '.json' j.dumpSuffix = d.dumpFileSuffix = '.dump' j.dumpGID = d.hbaseFallbackdumpGID = 666 j.dumpPermissions = d.hbaseFallbackDumpPermissions = 660 j.dirPermissions = d.hbaseFallbackDirPermissions = 770 j.logger = d.logger = util.SilentFakeLogger() fakeHbaseConnection1 = exp.DummyObjectWithExpectations( 'fakeHbaseConnection1') fakeHbaseConnection2 = exp.DummyObjectWithExpectations( 'fakeHbaseConnection2') fakeHbaseConnection1.expect('hbaseThriftExceptions', None, None, (), None) fakeHbaseConnection1.expect('get_json', ('fakeOoid1', ), {'number_of_retries': 2}, 'fake_json1') import socorro.storage.hbaseClient as hbc fakeHbaseConnection1.expect('get_json', ('fakeOoid2', ), {'number_of_retries': 2}, None, hbc.OoidNotFoundException()) fakeHbaseConnection2.expect('hbaseThriftExceptions', None, None, (), None) fakeHbaseConnection2.expect('get_json', ('fakeOoid2', ), {'number_of_retries': 2}, 'fake_json2') fakeHbaseModule = exp.DummyObjectWithExpectations('fakeHbaseModule') fakeHbaseModule.expect('HBaseConnectionForCrashReports', (d.hbaseHost, d.hbasePort, d.hbaseTimeout), {"logger": d.logger}, fakeHbaseConnection1, None) fakeHbaseModule.expect('NoConnectionException', None, None, hbc.NoConnectionException, None) fakeHbaseModule.expect( 'HBaseConnectionForCrashReports', (d.secondaryHbaseHost, d.secondaryHbasePort, d.secondaryHbaseTimeout), {"logger": d.logger}, fakeHbaseConnection2, None) fakeHbaseModule.expect('NoConnectionException', None, None, hbc.NoConnectionException, None) fakeJsonDumpStore = exp.DummyObjectWithExpectations('fakeJsonDumpStore') fakeJsonDumpModule = exp.DummyObjectWithExpectations('fakeJsonDumpModule') fakeJsonDumpModule.expect('JsonDumpStorage', (), j, fakeJsonDumpStore, None) fakeJsonDumpModule.expect('JsonDumpStorage', (), j, fakeJsonDumpStore, None) css = cstore.DualHbaseCrashStorageSystem( d, hbaseClient=fakeHbaseModule, jsonDumpStorage=fakeJsonDumpModule) assert css.hbaseConnection == fakeHbaseConnection1 assert css.fallbackHBase.hbaseConnection == fakeHbaseConnection2 result = css.get_meta('fakeOoid1') assert result == 'fake_json1' result = css.get_meta('fakeOoid2') assert result == 'fake_json2'
def get_dummy_context(self): """Create a dummy config object to use when testing.""" context = util.DotDict() context.database = util.DotDict({ 'database_hostname': 'somewhere', 'database_port': '8888', 'database_name': 'somename', 'database_username': '******', 'database_password': '******', }) return context
def testCrashStorageSystemForHBase_save_1(): """straight save into hbase with no trouble""" currentTimestamp = 'now' expectedDumpResult = '1234567890/n' jdict = util.DotDict({ 'ProductName': 'FireFloozy', 'Version': '3.6', 'legacy_processing': 1 }) d = util.DotDict() j = util.DotDict() d.hbaseHost = 'fred' d.hbasePort = 'ethel' d.hbaseTimeout = 9000 j.root = d.hbaseFallbackFS = '.' d.throttleConditions = [] j.maxDirectoryEntries = d.hbaseFallbackDumpDirCount = 1000000 j.jsonSuffix = d.jsonFileSuffix = '.json' j.dumpSuffix = d.dumpFileSuffix = '.dump' j.dumpGID = d.hbaseFallbackdumpGID = 666 j.dumpPermissions = d.hbaseFallbackDumpPermissions = 660 j.dirPermissions = d.hbaseFallbackDirPermissions = 770 d.logger = util.SilentFakeLogger() fakeHbaseConnection = exp.DummyObjectWithExpectations( 'fakeHbaseConnection') fakeHbaseConnection.expect('hbaseThriftExceptions', None, None, (), None) fakeHbaseConnection.expect('put_json_dump', ('uuid', jdict, expectedDumpResult), {"number_of_retries": 2}, None, None) fakeHbaseModule = exp.DummyObjectWithExpectations('fakeHbaseModule') fakeHbaseModule.expect('HBaseConnectionForCrashReports', (d.hbaseHost, d.hbasePort, d.hbaseTimeout), {"logger": d.logger}, fakeHbaseConnection, None) fakeHbaseModule.expect('NoConnectionException', None, None, hbc.NoConnectionException, None) fakeJsonDumpStore = exp.DummyObjectWithExpectations('fakeJsonDumpStore') fakeJsonDumpModule = exp.DummyObjectWithExpectations('fakeJsonDumpModule') fakeJsonDumpModule.expect('JsonDumpStorage', (), j, fakeJsonDumpStore, None) css = cstore.CrashStorageSystemForHBase(d, configPrefix='', hbaseClient=fakeHbaseModule, jsonDumpStorage=fakeJsonDumpModule) expectedResult = cstore.CrashStorageSystem.OK result = css.save_raw('uuid', jdict, expectedDumpResult, currentTimestamp) assert result == expectedResult, 'expected %s but got %s' % ( expectedResult, result)
def testDoingWorkWithTwoWorkers(): logger = sutil.SilentFakeLogger() config = sutil.DotDict({'logger': logger, 'numberOfThreads': 2}) myList = [] def insertIntoList(anItem): myList.append(anItem[0]) return siwf.OK iwf = siwf.IteratorWorkerFramework(config, name='Wilma', taskFunc=insertIntoList) try: iwf.start() time.sleep(2.0) assert len(iwf.workerPool.threadList ) == 2, "expected 2 threads, but found %d" % len( iwf.workerPool.threadList) assert len( myList ) == 10, 'expected to do 10 inserts, but %d were done instead' % len( myList) assert sorted(myList) == range( 10), 'expected %s, but got %s' % (range(10), sorted(myList)) iwf.stop() except Exception: # we got threads to join iwf.workerPool.waitForCompletion() raise
def test_C_config_tool_init(self): """test_C_config_tool_init: constructor test""" expectedRegEx = sutil.DotDict() expectedRegEx.irrelevant_signature_re = re.compile('ignored1') expectedRegEx.prefix_signature_re = re.compile('pre1|pre2') expectedRegEx.signatures_with_line_numbers_re = re.compile( 'fnNeedNumber') fixupSpace = re.compile(r' (?=[\*&,])') fixupComma = re.compile(r',(?! )') fixupInteger = re.compile(r'(<|, )(\d+)([uUlL]?)([^\w])') s, c = self.setup_config_C_sig_tool( expectedRegEx.irrelevant_signature_re, expectedRegEx.prefix_signature_re, expectedRegEx.signatures_with_line_numbers_re) self.assert_equal_with_nicer_output(c, s.config) self.assert_equal_with_nicer_output( expectedRegEx.irrelevant_signature_re, s.irrelevant_signature_re) self.assert_equal_with_nicer_output(expectedRegEx.prefix_signature_re, s.prefix_signature_re) self.assert_equal_with_nicer_output( expectedRegEx.signatures_with_line_numbers_re, s.signatures_with_line_numbers_re) self.assert_equal_with_nicer_output(fixupSpace, s.fixup_space) self.assert_equal_with_nicer_output(fixupComma, s.fixup_comma) self.assert_equal_with_nicer_output(fixupInteger, s.fixup_integer)
def _get_default_config(self): config = util.DotDict() config.elasticSearchHostname = 'somehost' config.elasticSearchPort = '9200' config.elasticsearch_index = 'socorro%Y%W' return config
def test_parse_arguments_with_class_validators(self): class NumberConverter(object): def clean(self, value): conv = {'one': 1, 'two': 2, 'three': 3} try: return conv[value] except KeyError: raise ValueError('No idea?!') # Define a set of filters with some types being non-trivial types # but instead a custom validator. filters = [ ("param1", 0, NumberConverter()), ] arguments = { "param1": "one", } params_exp = util.DotDict() params_exp.param1 = 1 params = external_common.parse_arguments(filters, arguments, modern=True) assert params == params_exp # note that a ValueError becomes a BadArgumentError arguments = { "param1": "will cause a ValueError in NumberConverter.clean", } with pytest.raises(BadArgumentError): external_common.parse_arguments(filters, arguments, modern=True)
def setup_query_parameters(config): now = config.day + dt.timedelta(1) now_str = now.strftime('%Y-%m-%d') yesterday = config.day yesterday_str = yesterday.strftime('%Y-%m-%d') logger.debug("config.day = %s; now = %s; yesterday = %s", config.day, now, yesterday) prod_phrase = '' try: if config.product != '': if ',' in config.product: prod_list = [x.strip() for x in config.product.split(',')] prod_phrase = ("and r.product in ('%s')" % "','".join(prod_list)) else: prod_phrase = "and r.product = '%s'" % config.product except Exception: util.reportExceptionAndContinue(logger) ver_phrase = '' try: if config.version != '': if ',' in config.product: ver_list = [x.strip() for x in config.version.split(',')] ver_phrase = ("and r.version in ('%s')" % "','".join(ver_list)) else: ver_phrase = "and r.version = '%s'" % config.version except Exception: util.reportExceptionAndContinue(logger) return util.DotDict({ 'now_str': now_str, 'yesterday_str': yesterday_str, 'prod_phrase': prod_phrase, 'ver_phrase': ver_phrase })
def setup_db_C_sig_tool(ig='ignored1', pr='pre1|pre2', si='fnNeedNumber', ss=('sentinel', "('sentinel2', lambda x: 'ff' in x)")): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.database_class = mock.MagicMock() config.transaction_executor_class = TransactionExecutor patch_target = 'socorro.processor.signature_utilities.' \ 'execute_query_fetchall' with mock.patch(patch_target) as mocked_query: # these become the results of four successive calls to # execute_query_fetchall mocked_query.side_effect = [ [ (pr, ), ], [ (ig, ), ], [ (si, ), ], [(x, ) for x in ss], ] s = sig.CSignatureToolDB(config) return s, config
def test_assume_any_identity_1(): conf = sutil.DotDict() fake_logger = exp.DummyObjectWithExpectations() conf.logger = fake_logger conf.processorCheckInTime = dt.timedelta(0, 300) threshold = now_func() + conf.processorCheckInTime os_module = exp.DummyObjectWithExpectations() sdb_module = exp.DummyObjectWithExpectations() db_conn = exp.DummyObjectWithExpectations() db_cur = exp.DummyObjectWithExpectations() db_pool = exp.DummyObjectWithExpectations() hostname = 'fred' class MyRegister(reg.ProcessorRegistrationAgent): def __init__(self, config, db_conn_source, now_func, os, sdb): super(MyRegister, self).__init__(config, db_conn_source, now_func, os, sdb) def take_over_dead_processor(self, cursor, proc_id): expected_assert(db_cur, cursor) expected_assert(proc_id, 17) def registration(self): pass fake_logger.expect('debug', ('looking for any dead processor', ), {}) sql = ("select id from processors" " where lastseendatetime < %s limit 1") sdb_module.expect('singleValueSql', (db_cur, sql, (threshold, )), {}, 17) fake_logger.expect('info', ('will step in for processor %d', 17), {}) r = MyRegister(conf, db_pool, now_func, os_module, sdb_module) id = r.assume_any_identity(db_cur, threshold, hostname, 17) expected_assert(17, id)
def setup_mocked_register(register_class): conf = sutil.DotDict() conf.processorCheckInTime = dt.timedelta(0, 300) conf.processorCheckInFrequency = dt.timedelta(0, 300) conf.processorId = 17 fake_logger = exp.DummyObjectWithExpectations() conf.logger = fake_logger threshold = now_func() + conf.processorCheckInTime os_module = exp.DummyObjectWithExpectations() sdb_module = exp.DummyObjectWithExpectations() db_conn = exp.DummyObjectWithExpectations() db_cur = exp.DummyObjectWithExpectations() db_pool = exp.DummyObjectWithExpectations() fake_logger.expect('info', ('connecting to database', ), {}) db_pool.expect('connectionCursorPair', (), {}, (db_conn, db_cur)) os_module.expect('uname', (), {}, ['a', 'b', 'c']) os_module.expect('getpid', (), {}, 1111) sdb_module.expect('singleValueSql', (db_cur, register_class.NOW_SQL, (conf.processorCheckInTime, )), {}, threshold) fake_logger.expect('info', ("registering with 'processors' table", ), {}) db_conn.expect('commit', (), {}) return register_class(conf, db_pool, now_func, os_module, sdb_module)
def get(self, *args): convertedArgs = webapi.typeConversion([dataTypeOptions,str], args) parameters = util.DotDict(zip(['datatype','uuid'], convertedArgs)) logger.debug("GetCrash get %s", parameters) self.crashStorage = self.crashStoragePool.crashStorage() function_name = datatype_function_associations[parameters.datatype] function = self.__getattribute__(function_name) return function(parameters.uuid)
def submitter (config): logger = config.logger signal.signal(signal.SIGTERM, iwf.respondToSIGTERM) signal.signal(signal.SIGHUP, iwf.respondToSIGTERM) statsPool = sutil.DotDict( { 'submittedCount': stats.CounterPool(config), 'failureCount': stats.CounterPool(config), 'processTime': stats.DurationAccumulatorPool(config), }) config.statsPool = statsPool reportigCounter = 0 def statsReportingWaitingFunc(): if not statsReportingWaitingFunc.reportingCounter % 60: submittedCountPool = statsPool.submittedCount numberOfMinutes = submittedCountPool.numberOfMinutes() if numberOfMinutes: logger.info('running for %d minutes', numberOfMinutes) numberSubmitted = submittedCountPool.read() logger.info('average submitted per minute: %s', \ (float(numberSubmitted) / numberOfMinutes)) numberOfFailures = statsPool.failureCount.read() logger.info('failures in the last five minutes: %d', \ numberOfFailures) processTime = statsPool.processTime.read() logger.info('average time in last five minutes: %s', \ processTime) statsReportingWaitingFunc.reportingCounter += 1 statsReportingWaitingFunc.reportingCounter = 0 theIterator = config.iteratorFunc (config) theWorkerFunction = createSubmitterFunction(config) submissionMill = iwf.IteratorWorkerFramework(config, jobSourceIterator=theIterator, taskFunc=theWorkerFunction, name='submissionMill') try: submissionMill.start() submissionMill.waitForCompletion(statsReportingWaitingFunc) # though, it only ends if someone # hits ^C or sends SIGHUP or SIGTERM # - any of which will get translated # into a KeyboardInterrupt exception except KeyboardInterrupt: while True: try: submissionMill.stop() break except KeyboardInterrupt: logger.warning('We heard you the first time. There is no need ' 'for further keyboard or signal interrupts. We ' 'are waiting for the worker threads to stop. ' 'If this app does not halt soon, you may have ' 'to send SIGKILL (kill -9)')
def makeJsonDictFromForm (self, form, tm=tm): jsonDict = sutil.DotDict() for name in form.keys(): if type(form[name]) in (str, unicode): jsonDict[name] = form[name] else: jsonDict[name] = form[name].value jsonDict.timestamp = tm.time() return jsonDict
def test_checkin(): def now_func(): return dt.datetime(2011, 1, 1, 0, 6, 0, tzinfo=UTC) conf = sutil.DotDict() conf.processorCheckInTime = dt.timedelta(0, 300) conf.processorCheckInFrequency = dt.timedelta(0, 300) conf.processorId = 17 fake_logger = exp.DummyObjectWithExpectations() conf.logger = fake_logger threshold = now_func() + conf.processorCheckInTime os_module = exp.DummyObjectWithExpectations() sdb_module = exp.DummyObjectWithExpectations() db_conn = exp.DummyObjectWithExpectations() db_cur = exp.DummyObjectWithExpectations() db_pool = exp.DummyObjectWithExpectations() fake_logger.expect('info', ('connecting to database', ), {}) db_pool.expect('connectionCursorPair', (), {}, (db_conn, db_cur)) os_module.expect('uname', (), {}, ['a', 'b', 'c']) os_module.expect('getpid', (), {}, 1111) sdb_module.expect('singleValueSql', (db_cur, reg.ProcessorRegistrationAgent.NOW_SQL, (conf.processorCheckInTime, )), {}, threshold) fake_logger.expect('info', ("registering with 'processors' table", ), {}) db_conn.expect('commit', (), {}) fake_logger.expect('debug', ("updating 'processor' table registration", ), {}) db_pool.expect('connectionCursorPair', (), {}, (db_conn, db_cur)) db_cur.expect('execute', ("update processors set lastseendatetime = %s " "where id = %s", (now_func(), 17)), {}) db_conn.expect('commit', (), {}) r = MockedRegister(conf, db_pool, now_func, os_module, sdb_module) r.checkin() expected_assert(now_func(), r.last_checkin_ts) r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 6, 0, tzinfo=UTC) r.checkin() r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 5, 0, tzinfo=UTC) r.checkin() r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 4, 0, tzinfo=UTC) r.checkin() r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 3, 0, tzinfo=UTC) r.checkin() r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 2, 0, tzinfo=UTC) r.checkin() r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 1, 0, tzinfo=UTC) r.checkin() fake_logger.expect('debug', ("updating 'processor' table registration", ), {}) db_pool.expect('connectionCursorPair', (), {}, (db_conn, db_cur)) db_cur.expect('execute', ("update processors set lastseendatetime = %s " "where id = %s", (now_func(), 17)), {}) db_conn.expect('commit', (), {}) r.last_checkin_ts = dt.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC) r.checkin() expected_assert(now_func(), r.last_checkin_ts)
def get_dummy_context(self): """ Create a dummy config object to use when testing. """ context = util.DotDict() context.database = util.DotDict({ 'database_host': 'fred', 'database_port': '127', 'database_name': 'wilma', 'database_user': '******', 'database_password': '******', }) context.webapi = util.DotDict({ 'elasticSearchHostname': "localhost", 'elasticSearchPort': "9200" }) context.searchImplementationModule = "socorro.external.postgresql" context.serviceImplementationModule = "socorro.external.elasticsearch" return context
def testMovement(): """testMovement (this will take 15-20 seconds)""" config = sutil.DotDict({ 'logger': sutil.SilentFakeLogger(), 'numberOfThreads': 1, }) fakeHbaseStorage = getHbaseStorage1(config) smover.move(config, sourceCrashStorageClass=fakeSource, destCrashStorageClass=fakeHbaseStorage)
def get_dummy_context(self): """Create a dummy config object to use when testing.""" context = util.DotDict() context.database = util.DotDict({ 'database_hostname': 'somewhere', 'database_port': '8888', 'database_name': 'somename', 'database_username': '******', 'database_password': '******', }) context.platforms = ({ "id": "windows", "name": "Windows NT" }, { "id": "linux", "name": "Linux" }) context.non_release_channels = ['beta', 'aurora', 'nightly'] context.restricted_channels = ['beta'] return context
def testCrashStorageSystem_makeJsonDictFromForm(): d = util.DotDict() d.dumpField = 'd' fakeValue = util.DotDict() fakeValue.value = 2 f = util.DotDict() f.a = '1' f.b = fakeValue f.c = '3' f.d = '4' f.e = '5' expectedTime = '12:00:01' fakeTimeModule = exp.DummyObjectWithExpectations('fakeTimeModule') fakeTimeModule.expect('time', (), {}, expectedTime, None) css = cstore.CrashStorageSystem(d) resultJson = css.makeJsonDictFromForm(f, fakeTimeModule) assert resultJson.a == '1' assert resultJson.b == 2 assert resultJson.c == '3' assert resultJson.e == '5'
def setupSigUtil(ig='ignored1', pr='pre1|pre2', si='fnNeedNumber'): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.irrelevantSignatureRegEx = ig config.prefixSignatureRegEx = pr config.signaturesWithLineNumbersRegEx = si config.signatureSentinels = ( 'sentinel', ('sentinel2', lambda x: 'ff' in x), ) s = sig.CSignatureTool(config) return s, config
def test_build_reports_sql_from(self): """Test PostgreSQLBase.build_reports_sql_from().""" pgbase = self.get_instance() params = util.DotDict() params.report_process = "" params.branches = [] # ..................................................................... # Test 1: no specific parameter sql_exp = "FROM reports r" sql = pgbase.build_reports_sql_from(params) self.assertEqual(sql, sql_exp) # ..................................................................... # Test 2: with a plugin params.report_process = "plugin" sql_exp = "FROM reports r JOIN plugins_reports ON " \ "plugins_reports.report_id = r.id JOIN plugins ON " \ "plugins_reports.plugin_id = plugins.id" sql = pgbase.build_reports_sql_from(params) sql = " ".join(sql.split()) # squeeze all \s, \r, \t... self.assertEqual(sql, sql_exp) # ..................................................................... # Test 3: with a branch params.report_process = "" params.branches = ["2.0"] sql_exp = "FROM reports r JOIN branches ON " \ "(branches.product = r.product AND branches.version = " \ "r.version)" sql = pgbase.build_reports_sql_from(params) sql = " ".join(sql.split()) # squeeze all \s, \r, \t... self.assertEqual(sql, sql_exp) # ..................................................................... # Test 4: with a plugin and a branch params.report_process = "plugin" params.branches = ["2.0"] sql_exp = "FROM reports r JOIN plugins_reports ON " \ "plugins_reports.report_id = r.id JOIN plugins ON " \ "plugins_reports.plugin_id = plugins.id JOIN branches ON " \ "(branches.product = r.product AND branches.version = " \ "r.version)" sql = pgbase.build_reports_sql_from(params) sql = " ".join(sql.split()) # squeeze all \s, \r, \t... self.assertEqual(sql, sql_exp)
def setup_config_C_sig_tool( ig='ignored1', pr='pre1|pre2', si='fnNeedNumber', ss=('sentinel', ('sentinel2', lambda x: 'ff' in x)), ): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.irrelevant_signature_re = ig config.prefix_signature_re = pr config.signatures_with_line_numbers_re = si config.signature_sentinels = ss s = sig.CSignatureTool(config) return s, config
def getDummyContext(): context = util.DotDict() context.databaseHost = 'fred' context.databaseName = 'wilma' context.databaseUserName = '******' context.databasePassword = '******' context.databasePort = 127 context.smtpHostname = 'localhost' context.smtpPort = 25 context.smtpUsername = None context.smtpPassword = None context.unsubscribeBaseUrl = 'http://example.com/unsubscribe/%s' context.fromEmailAddress = '*****@*****.**' return context
def test_parse_arguments(self): """Test external_common.parse_arguments(). """ filters = [("param1", "default", ["list", "str"]), ("param2", None, "int"), ("param3", ["list", "of", 4, "values"], ["list", "str"])] arguments = {"param1": "value1", "unknown": 12345} params_exp = util.DotDict() params_exp.param1 = ["value1"] params_exp.param2 = None params_exp.param3 = ["list", "of", "4", "values"] params = external_common.parse_arguments(filters, arguments) self.assertEqual(params, params_exp)
def test_assume_identity_by_host_3(): conf = sutil.DotDict() fake_logger = exp.DummyObjectWithExpectations() conf.logger = fake_logger conf.processorCheckInTime = dt.timedelta(0, 300) threshold = now_func() + conf.processorCheckInTime os_module = exp.DummyObjectWithExpectations() sdb_module = exp.DummyObjectWithExpectations() db_conn = exp.DummyObjectWithExpectations() db_cur = exp.DummyObjectWithExpectations() db_pool = exp.DummyObjectWithExpectations() hostname = 'fred' class MyRegister(reg.ProcessorRegistrationAgent): def __init__(self, config, db_conn_source, now_func, os, sdb): super(MyRegister, self).__init__(config, db_conn_source, now_func, os, sdb) def take_over_dead_processor(self, cursor, proc_id): expected_assert(db_cur, cursor) expected_assert(proc_id, 17) def assume_new_identity(self, cursor, thresh, host, proc_id): expected_assert(db_cur, cursor) expected_assert(threshold, thresh) expected_assert(hostname, host) expected_assert(proc_id, 17) return proc_id def registration(self): pass fake_logger.expect('debug', ('looking for a dead processor for host %s', 'fred'), {}) sql = ("select id from processors" " where lastseendatetime < %s" " and name like %s limit 1") sdb_module.expect('singleValueSql', (db_cur, sql, (threshold, hostname + '%')), {}, None, sdb.SQLDidNotReturnSingleValue) fake_logger.expect('debug', ("no dead processor found for host, %s", hostname), {}) sql2 = "select id from processors where name like 'fred%'" sdb_module.expect('singleValueSql', (db_cur, sql2), {}, None, sdb.SQLDidNotReturnSingleValue) r = MyRegister(conf, db_pool, now_func, os_module, sdb_module) id = r.assume_identity_by_host(db_cur, threshold, hostname, 17) expected_assert(17, id)
def get_dummy_context(self): """Create a dummy config object to use when testing.""" context = util.DotDict() context.platforms = ({ "id": "windows", "name": "Windows NT" }, { "id": "linux", "name": "Linux" }) context.channels = [ 'Beta', 'Aurora', 'Nightly', 'beta', 'aurora', 'nightly' ] context.restricted_channels = ['Beta', 'beta'] return context
def get_dummy_context(self): """ Create a dummy config object to use when testing. """ context = util.DotDict() context.elasticSearchHostname = "" context.elasticSearchPort = 9200 context.platforms = ({ "id": "windows", "name": "Windows NT" }, { "id": "linux", "name": "Linux" }) return context
def get_dummy_context(self): """ Create a dummy config object to use when testing. """ context = util.DotDict() context.databaseHost = 'fred' context.databaseName = 'wilma' context.databaseUserName = '******' context.databasePassword = '******' context.databasePort = 127 context.searchImplementationModule = "socorro.external.postgresql" context.serviceImplementationModule = "socorro.external.elasticsearch" context.elasticSearchHostname = "localhost" context.elasticSearchPort = "9200" return context