def reportDataGenerator(self, sizePerDay, numDays): idGen = dbtestutil.moreUuid() initialDate = dt.datetime(2008, 1, 1, 1, 1, 1, 1, tzinfo=UTC) currentDate = dt.datetime(2008, 1, 1, 1, 1, 1, 1, tzinfo=UTC) milli5 = dt.timedelta(milliseconds=5) milli10 = dt.timedelta(milliseconds=10) buildStrings = ['200712312355', '200712302355', '200712292355'] buildDates = [ dt.datetime(2007, 12, 31, 23, 55, tzinfo=UTC), dt.datetime(2007, 12, 30, 23, 55, tzinfo=UTC), dt.datetime(2007, 12, 29, 23, 55, tzinfo=UTC) ] osNameVersions = [ ('Windows NT', '6.6.6'), ('Windows NT', '6.6.6'), ('Windows', 'v.v.v'), ('Windows', 'v.v.v'), ('Windows', 'v.v.v'), ('Windows', 'v.v.v'), ('Mac OS X', '10.5.5'), ('Mac OS X', '10.5.6'), ('Mac OS X', '10.5.6'), ('Linux', '10.10.10'), ('Linux', '10.10.11'), ] insData = [] for dummyDays in range(numDays): count = 0 while count < sizePerDay: os_name, os_version = osNameVersions[count % len(osNameVersions)] data = { 'uuid': idGen.next(), 'client_crash_date': currentDate, 'date_processed': currentDate + milli5, 'product': 'foxy', 'version': '3.6.9b2', 'build': buildStrings[count % len(buildStrings)], 'url': 'http://www.woo.wow/weee', 'install_age': 89000, 'last_crash': 0, 'uptime': 88000, 'email': None, 'os_name': os_name, 'os_version': os_version, #'build_date': buildDates[count%len(buildDates)], 'user_comments': 'oh help', 'app_notes': "", 'distributor': "", 'distributor_version': "", } insData.append(data) if not count % (3): currentDate += milli10 count += 1 currentDate = initialDate + dt.timedelta(days=1) cursor = self.connection.cursor() addReportData(cursor, insData)
def testMoreUuid(): m = {'hexD':'[0-9a-fA-F]'} p = '^%(hexD)s{8}-%(hexD)s{4}-%(hexD)s{4}-%(hexD)s{4}-%(hexD)s{12}$'%m rep = re.compile(p) gen = dbtu.moreUuid() seen = set() # surely no test set has more than 150K uuids... and we want to run in < 1 second for i in range(150000): d = gen.next() assert 36 == len(d) assert d not in seen assert rep.match(d) seen.add(d)
def testMoreUuid(): m = {'hexD': '[0-9a-fA-F]'} p = '^%(hexD)s{8}-%(hexD)s{4}-%(hexD)s{4}-%(hexD)s{4}-%(hexD)s{12}$' % m rep = re.compile(p) gen = dbtu.moreUuid() seen = set() # surely no test set has more than 150K uuids... and we want to run in < 1 second for i in range(150000): d = gen.next() assert 36 == len(d) assert d not in seen assert rep.match(d) seen.add(d)
def reportDataGenerator(self,sizePerDay,numDays): idGen = dbtestutil.moreUuid() initialDate = dt.datetime(2008,1,1,1,1,1,1,tzinfo=UTC) currentDate = dt.datetime(2008,1,1,1,1,1,1,tzinfo=UTC) milli5 = dt.timedelta(milliseconds=5) milli10 = dt.timedelta(milliseconds=10) buildStrings = ['200712312355','200712302355','200712292355'] buildDates = [dt.datetime(2007,12,31,23,55,tzinfo=UTC),dt.datetime(2007,12,30,23,55,tzinfo=UTC),dt.datetime(2007,12,29,23,55,tzinfo=UTC)] osNameVersions = [('Windows NT','6.6.6'),('Windows NT','6.6.6'),('Windows','v.v.v'),('Windows','v.v.v'),('Windows','v.v.v'),('Windows','v.v.v'), ('Mac OS X','10.5.5'),('Mac OS X','10.5.6'),('Mac OS X','10.5.6'), ('Linux','10.10.10'),('Linux','10.10.11'), ] insData = [] for dummyDays in range(numDays): count = 0 while count < sizePerDay: os_name,os_version = osNameVersions[count % len(osNameVersions)] data = { 'uuid':idGen.next(), 'client_crash_date':currentDate, 'date_processed': currentDate+milli5, 'product': 'foxy', 'version': '3.6.9b2', 'build': buildStrings[count%len(buildStrings)], 'url':'http://www.woo.wow/weee', 'install_age':89000, 'last_crash':0, 'uptime':88000, 'email':None, 'os_name': os_name, 'os_version': os_version, #'build_date': buildDates[count%len(buildDates)], 'user_comments': 'oh help', 'app_notes':"", 'distributor':"", 'distributor_version':"", } insData.append(data) if not count%(3): currentDate += milli10 count += 1 currentDate = initialDate+dt.timedelta(days=1) cursor = self.connection.cursor() addReportData(cursor,insData)
def makeReportData(sizePerDay,numDays): idGen = dbtestutil.moreUuid() osGen = genOs() prodGen = genProd() sigGen = genSig() urlGen = genUrl() bumpGen = genBump(sizePerDay) buildGen = genBuild() baseDate = testBaseDate procOffset = dt.timedelta(days=1,milliseconds=1903) insData = [] for dummyDays in range(numDays): currentDate = baseDate count = 0 while count < sizePerDay: product,version = prodGen.next() os_name,os_version = osGen.next() data = { 'uuid':idGen.next(), 'client_crash_date':currentDate, 'install_age':1100, 'last_crash':0, 'uptime':1000, 'date_processed': currentDate+procOffset, 'user_comments': 'oh help', 'app_notes':"", 'signature':sigGen.next(), 'product':product, 'version':version, 'os_name':os_name, 'os_version':os_version, 'url':urlGen.next()[1], 'hangid':'test123', 'process_type':'plugin', } insData.append(data) currentDate += bumpGen.next() count += 1 baseDate = baseDate+dt.timedelta(days=1) #print "client_crash_date | date_processed | uptm | uuid | inst | u, o, p, s" #for j in insData: # print "%(client_crash_date)s | %(date_processed)s | %(uptime)s | %(uuid)s | %(install_age)s | %(urldims_id)2s,%(osdims_id)2s,%(productdims_id)2s,%(signature)s"%j return insData
def fillReports(self,cursor): """fill enough data to test mtbf behavior: - AVG(uptime); COUNT(date_processed); COUNT(DISTINCT(user_id)) """ self.fillMtbfTables(cursor) # prime the pump sql = 'insert into reports (uuid, uptime, date_processed,product,version,os_name) values(%s,%s,%s,%s,%s,%s)' processTimes = ['00:00:00','05:00:00','10:00:00','15:00:00','20:00:00','23:59:59'] uptimes = [5*x for x in range(1,15)] data = [] uuidGen = dbtestutil.moreUuid() uptimeIndex = 0 for product in self.productDimData: uptime = uptimes[uptimeIndex%len(uptimes)] uptimeIndex += 1 for d,off,ig in self.processingDays: for pt in processTimes: dp = "%s %s"%(d.isoformat(),pt) tup = (uuidGen.next(), uptime,dp,product[0],product[1],product[2]) data.append(tup) cursor.executemany(sql,data) cursor.connection.commit()
def testNewEntryMaxDirectoryEntries0(self): """ testNewEntryMaxDirectoryEntries0(self): - test that we do NOT overflow if maxDirectoryEntries is 0 """ import socorro.unittest.testlib.dbtestutil as dbtestutil gen = dbtestutil.moreUuid() kwargs = self.initKwargs[0] kwargs['maxDirectoryEntries'] = 0 storage = dumpStorage.ProcessedDumpStorage(self.testDir,**kwargs) datedata = [2008,4,5,6,7,8] expectdata = [2008,4,5,6,5] testDate = dt.datetime(*datedata) datepart = "%s_%d"%(os.sep.join(["%02d"%(x) for x in expectdata]),0) expectedDateDir = os.path.join(storage.dateBranch,datepart) for i in range(100): ooid = gen.next() fh = storage.newEntry(ooid,testDate) try: assert os.path.exists(expectedDateDir) assert os.path.islink(os.path.join(expectedDateDir,ooid)) finally: fh.close()