def testDatabaseInstantiation(self): sample1 = { 'database_hostname': 'A', 'database_port': 'B', 'database_name': 'C', 'database_username': '******', 'database_password': '******', } d = db.Database(sample1) assert d.dsn == 'host=A port=B dbname=C user=D password=E', 'dsn not created correctly' assert type(d.logger) == type( util.FakeLogger()), 'should have a %s but got %s instead' % (type( util.FakeLogger()), type(d.logger)) d = db.Database(sample1, 1) assert d.logger == 1, 'logger pass as a parameter was not saved, got %s instead' % d.logger sample1 = { 'database_hostname': 'A', 'database_port': 'B', 'database_name': 'C', 'database_username': '******', 'database_password': '******', 'logger': 2 } d = db.Database(sample1) assert d.dsn == 'host=A port=B dbname=C user=D password=E', 'dsn not created correctly' assert d.logger == 2, 'logger passed with dictionary was not saved, got %s instead' % d.logger d = db.Database(sample1, 1) assert d.dsn == 'host=A port=B dbname=C user=D password=E', 'dsn not created correctly' assert d.logger == 1, 'logger passed with dictionary was not overridden by logger passed as a parameter, got %s instead' % d.logger
def setup_db_C_sig_tool(ig='ignored1', pr='pre1|pre2', si='fnNeedNumber', ss=('sentinel', "('sentinel2', lambda x: 'ff' in x)")): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.database_class = mock.MagicMock() config.transaction_executor_class = TransactionExecutor patch_target = 'socorro.processor.signature_utilities.' \ 'execute_query_fetchall' with mock.patch(patch_target) as mocked_query: # these become the results of four successive calls to # execute_query_fetchall mocked_query.side_effect = [ [ (pr, ), ], [ (ig, ), ], [ (si, ), ], [(x, ) for x in ss], ] s = sig.CSignatureToolDB(config) return s, config
def func(paramsTuple): jsonFilePathName, binaryFilePathName = paramsTuple[0] with open(jsonFilePathName) as jsonFile: formData = json.load(jsonFile) if config.uniqueHang: try: if formData['HangId'] in existingHangIdCache: formData['HangId'] = existingHangIdCache else: formData['HangId'] = \ existingHangIdCache[formData['HangId']] = uuid.uuid4() except Exception: pass processTimeStatistic = statsPools.processTime.getStat() submittedCountStatistic = statsPools.submittedCount.getStat() try: processTimeStatistic.start() config.submissionFunc(formData, binaryFilePathName, config.url, config.logger) submittedCountStatistic.increment() except Exception: sutil.reportExceptionAndContinue(sutil.FakeLogger()) failureCountStatistic = statsPools.failureCount.getStat() failureCountStatistic.increment() return iwf.OK finally: processTimeStatistic.end() return iwf.OK
def __init__(self, config, logger=None): super(Database, self).__init__() if 'database_port' not in config or config.get('database_port') == '': config['database_port'] = 5432 self.dsn = "host=%(database_hostname)s port=%(database_port)s dbname=%(database_name)s user=%(database_username)s password=%(database_password)s" % config self.logger = config.setdefault('logger', None) if logger: self.logger = logger if not self.logger: self.logger = util.FakeLogger()
def doSubmission (formData, binaryFilePathName, url, logger=sutil.FakeLogger(), posterModule=poster): fields = dict([(t[0],t[1]) for t in formData.items()]) fields['upload_file_minidump'] = open(binaryFilePathName, 'rb') datagen, headers = posterModule.encode.multipart_encode(fields); request = urllib2.Request(url, datagen, headers) print urllib2.urlopen(request).read(), try: logger.debug('submitted %s', formData['uuid']) except KeyError: logger.debug('submitted unknown')
def __init__(self, config, logger=None): super(Database, self).__init__() if 'databasePort' not in config: config['databasePort'] = 5432 self.dsn = "host=%(databaseHost)s port=%(databasePort)s dbname=%(databaseName)s user=%(databaseUserName)s password=%(databasePassword)s" % config self.logger = config.setdefault('logger', None) if logger: self.logger = logger if not self.logger: self.logger = util.FakeLogger()
def setupSigUtil(ig='ignored1', pr='pre1|pre2', si='fnNeedNumber'): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.irrelevantSignatureRegEx = ig config.prefixSignatureRegEx = pr config.signaturesWithLineNumbersRegEx = si config.signatureSentinels = ( 'sentinel', ('sentinel2', lambda x: 'ff' in x), ) s = sig.CSignatureTool(config) return s, config
def setup_config_C_sig_tool( ig='ignored1', pr='pre1|pre2', si='fnNeedNumber', ss=('sentinel', ('sentinel2', lambda x: 'ff' in x)), ): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.irrelevant_signature_re = ig config.prefix_signature_re = pr config.signatures_with_line_numbers_re = si config.signature_sentinels = ss s = sig.CSignatureTool(config) return s, config
def __init__(self, databaseHostName, databaseName, databaseUserName, databasePassword, logger=util.FakeLogger()): super(DatabaseConnectionPool, self).__init__() if databaseHostName != '': self.dsn = "host=%s dbname=%s user=%s password=%s" % ( databaseHostName, databaseName, databaseUserName, databasePassword) else: self.dsn = "dbname=%s user=%s password=%s" % ( databaseName, databaseUserName, databasePassword) self.logger = logger
def __init__(self, root=".", osModule=os, **kwargs): """ Take note of our root directory and other necessities. Yes, it is perfectly legal to call super(...).__init__() after doing some other code. ... As long as you expect the behavior you get, anyway... """ kwargs.setdefault('minutesPerSlot',1) kwargs.setdefault('subSlotCount',1) # that is: use xxx_0 every time by default super(JsonDumpStorage, self).__init__(root=root,osModule=osModule,**kwargs) tmp = kwargs.get('cleanIndexDirectories','false') self.cleanIndexDirectories = 'true' == tmp.lower() self.jsonSuffix = kwargs.get('jsonSuffix','.json') if not self.jsonSuffix.startswith('.'): self.jsonSuffix = ".%s" % (self.jsonSuffix) self.dumpSuffix = kwargs.get('dumpSuffix','.dump') if not self.dumpSuffix.startswith('.'): self.dumpSuffix = ".%s" % (self.dumpSuffix) self.logger = kwargs.get('logger', socorro_util.FakeLogger())
def setup_config(): config = sutil.DotDict() config.logger = sutil.FakeLogger() config.database_class = Mock() config.transaction_executor_class = Mock() return config
argi = 1 if sys.argv[argi] == '-h': parts = sys.argv[argi + 1].split(':') host = parts[0] if len(parts) == 2: port = int(parts[1]) argi += 2 cmd = sys.argv[argi] args = sys.argv[argi + 1:] connection = HBaseConnectionForCrashReports(host, port, 5000, logger=utl.FakeLogger()) if cmd == 'get_report': if len(args) != 1: usage() sys.exit(1) pp.pprint(connection.get_report(*args)) elif cmd == 'get_json': if len(args) < 1: usage() sys.exit(1) old = len(args) == 2 ppjson(connection.get_json(args[0], old)) elif cmd == 'get_dump':
def testCreateEmailCampaign(): context = getDummyContext() product = 'Foobar' versions = '5' signature = 'JohnHancock' start_date = utc_now() end_date = start_date + timedelta(hours=1) # FIXME where should this go? end_date = datetime(end_date.year, end_date.month, end_date.day, 23, 59, 59, tzinfo=UTC) subject = 'test subject' body = 'test body' author = 'John Doe' email_count = 0 parameters = { 'product': product, 'versions': versions, 'signature': signature, } version_clause = '' if len(versions) > 0: version_clause = " version IN %(versions)s AND " sql = """ SELECT DISTINCT contacts.id, reports.email, reports.client_crash_date AS crash_date, reports.uuid AS ooid, contacts.subscribe_token FROM reports LEFT JOIN email_contacts AS contacts ON reports.email = contacts.email WHERE TIMESTAMP WITH TIME ZONE '%s' <= reports.date_processed AND TIMESTAMP WITH TIME ZONE '%s' > reports.date_processed AND reports.product = %%(product)s AND %s reports.signature = %%(signature)s AND LENGTH(reports.email) > 4 AND contacts.subscribe_status IS NOT FALSE AND contacts.email NOT IN ( SELECT contacted.email FROM email_campaigns AS prev_campaigns JOIN email_campaigns_contacts ON email_campaigns_contacts.email_campaigns_id = prev_campaigns.id JOIN email_contacts AS contacted ON email_campaigns_contacts.email_contacts_id = contacted.id WHERE prev_campaigns.product = %%(product)s AND prev_campaigns.signature = %%(signature)s ) """ % (start_date, end_date, version_clause) dummyCursor = expect.DummyObjectWithExpectations() dummyCursor.expect('mogrify', (sql, parameters), {}, None) dummyCursor.expect('execute', (sql, parameters), {}, None) dummyCursor.expect('fetchall', (), {}, [('0','*****@*****.**','abc','def',None)]) parameters = [product, versions, signature, subject, body, start_date, end_date, email_count, author] logger = util.FakeLogger() table = EmailCampaignsTable(logger) sql = table.insertSql dummyCursor.expect('mogrify', (sql, parameters), {}, None) dummyCursor.expect('execute', (sql, parameters), {}, None) dummyCursor.expect('fetchone', (), {}, ['1234']) campaign = ecc.EmailCampaignCreate(context) campaignId = campaign.create_email_campaign(dummyCursor, product, versions, signature, subject, body, start_date, end_date, author) assert campaignId == ('1234', [{'token': None, 'crash_date': 'abc', 'id': '0', 'ooid': 'def', 'email': '*****@*****.**'}])