def testTransferOne(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[0], rootDir=self.testMoveFrom) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[0]) oldStorage = JDS.JsonDumpStorage(self.testMoveFrom, **self.initKwargs[0]) itemNumber = 0 xmas = datetime.datetime(2001, 12, 25, 12, 25, tzinfo=UTC) for id in createJDS.jsonFileData.keys(): createLinks = 0 == itemNumber % 2 removeOld = 0 == itemNumber % 3 itemNumber += 1 storage.transferOne(id, oldStorage, createLinks=createLinks, removeOld=removeOld, aDate=xmas) try: storage.getJson(id) except Exception, x: print '(%s): %s' % (type(x), x) assert False, 'Expected to find a transferred json file for %s' % id if createLinks: self.__hasLinkOrFail(storage, id) self.__hasDatePathOrFail(storage, id, xmas) if removeOld: assert_raises(OSError, oldStorage.getJson, id)
def testNewEntryPermissions(self): dirPermissions=0707 dumpPermissions=0500 sfl = SilentFakeLogger() j = JDS.JsonDumpStorage(root=self.testDir,dirPermissions=dirPermissions,dumpPermissions=dumpPermissions,logger=sfl) u = str(socorro_uuid.uuid1()) f1, f2 = j.newEntry(u) f1.close() f2.close() jpath = j.getJson(u) gotPermissions = stat.S_IMODE(os.stat(jpath)[0]) assert stat.S_IMODE(os.stat(jpath)[0]) == dumpPermissions, "%s: Expected %o, got %o" % (jpath, dumpPermissions, gotPermissions) dpath = j.getDump(u) gotPermissions = stat.S_IMODE(os.stat(dpath)[0]) assert stat.S_IMODE(os.stat(dpath)[0]) == dumpPermissions, "%s: Expected %o, got %o" % (dpath, dumpPermissions, gotPermissions) udir = os.path.split(dpath)[0] datePath = os.path.abspath(os.path.join(udir,os.readlink(os.path.splitext(dpath)[0]))) namePath = os.path.abspath(os.path.splitext(dpath)[0]) topPath = os.path.abspath(self.testDir) dailies = os.listdir(topPath) def assertPermVisitor(p): gotPerm = stat.S_IMODE(os.stat(p)[0]) assert dirPermissions == gotPerm, "%s: Expected %0o, got %0o"%(p,dirPermissions,gotPerm) for d in dailies: # visitPath quietly ignores a file as the leaf socorro_fs.visitPath(os.path.join(topPath,d),datePath,assertPermVisitor) socorro_fs.visitPath(os.path.join(topPath,d),namePath,assertPermVisitor)
def testRemove(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2]) counter = 0 for uuid in createJDS.jsonFileData.keys(): if 0 == counter % 3: # test that we don't throw for missing links storage.markAsSeen(uuid) if 1 == counter % 3: # test that we don't throw for one missing file if 0 == counter % 2: os.unlink(storage.getDump(uuid)) else: os.unlink(storage.getJson(uuid)) if 2 == counter % 3: # test that we don't throw for both missing files, but with links os.unlink(storage.getJson(uuid)) os.unlink(storage.getDump(uuid)) storage.remove(uuid) counter += 1 allfiles = [] alllinks = [] for dir, dirs, files in os.walk(self.testDir): for file in files: allfiles.append(file) if os.path.islink(os.path.join(dir, file)): alllinks.append(file) for d in dirs: if os.path.islink(os.path.join(dir, d)): alllinks.append(d) assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks assert_raises(JDS.NoSuchUuidFound, storage.remove, "bogusdatax3yymmdd")
def markAsSeen(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[3], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[3]) for uuid, data in createJDS.jsonFileData.items(): assert os.path.islink( os.sep.join( (storage.dateBranch, data[3], uuid))), 'Expect a link from date to name for %s' % uuid assert os.path.islink( os.sep.join( (storage.nameBranch, data[2], uuid))), 'Expect link from name to timed for %s' % uuid assert not os.path.islink( os.sep.join( (storage.dateBranch, data[3], uuid))), 'Expect no link from date to name for %s' % uuid assert not os.path.islink( os.sep.join( (storage.nameBranch, data[2], uuid))), 'Expect no link from name to date for %s' % uuid try: storage.markAsSeen(createJDS.jsonBadUuid) assert False, 'Expect to throw IOError from attempt to openAndMarkAsSeen(non-existent-uuid)' except IOError: assert True, 'Got expected error from attempt to openAndMarkAsSeen(non-existent-uuid)' except Exception, e: assert False, 'Got unexpected error %s from attempt to openAndMarkAsSeen(non-existent-uuid' % e
def testDestructiveDateWalkNotNow(self): createJDS.createTestSet(self.currenttimes, self.initKwargs[1], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[1]) uuids = self.currenttimes.keys() seenids = [] for id in storage.destructiveDateWalk(): seenids.append(id) assert [] == seenids
def testRemoveAlsoNames(self): """testJsonDumpStorage:TestJsonDumpStorage.testRemoveAlsoNames(self) Try to remove them all, and check that they are indeed all gone. """ createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2], self.testDir) kwargs = self.initKwargs[2] kwargs['cleanIndexDirectories'] = 'True' storage = JDS.JsonDumpStorage(self.testDir, **kwargs) for uuid, data in createJDS.jsonFileData.items(): storage.remove(uuid) assert not os.listdir( storage.root), 'Expected them all to go, but %s' % (os.listdir( storage.root))
def constructorAlt(self, *args, **kwargs): storage = JDS.JsonDumpStorage(self.testDir, **kwargs) assert storage.dateName == kwargs.get( 'dateName', 'date'), 'From kwargs=%s' % kwargs assert storage.indexName == kwargs.get( 'indexName', 'name'), 'From kwargs=%s' % kwargs assert storage.jsonSuffix == '.' + kwargs.get( 'jsonSuffix', 'json' ), 'We will always pass non-dot json suffix. From kwargs=%s' % kwargs assert storage.dumpSuffix == kwargs.get( 'dumpSuffix', '.dump' ), 'We will always pass dot dump suffix. From kwargs=%s' % kwargs assert self.testDir.rstrip( os.sep) == storage.root, 'From kwargs=%s' % kwargs
def testRemoveWithBadlyFormattedDateLink(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2]) uuid = createJDS.jsonFileData.keys()[0] head, json = os.path.split(storage.getJson(uuid)) target = os.readlink(os.path.join(head, uuid)) idx = target.index('/date/') target = "%s%s%s" % (target[:idx + 6], target[idx + 7:idx + 10], target[idx + 10:]) os.unlink(os.path.join(head, uuid)) os.symlink(target, os.path.join(head, uuid)) #print "LINK:%s"%(os.readlink(os.path.join(head,uuid))) # assure that we don't throw for a badly formatted path storage.remove(uuid)
def deferredJobStorageCleanup(config, logger): """ """ try: logger.info("beginning deferredJobCleanup") j = jds.JsonDumpStorage(root=config.deferredStorageRoot) numberOfDaysAsTimeDelta = dt.timedelta( days=int(config.maximumDeferredJobAge)) threshold = utc_now() - numberOfDaysAsTimeDelta logger.info(" removing older than: %s", threshold) j.removeOlderThan(threshold) except (KeyboardInterrupt, SystemExit): logger.debug("got quit message") except: socorro.lib.util.reportExceptionAndContinue(logger) logger.info("deferredJobCleanupLoop done.")
def testGetDump(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[1], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[1]) for uuid, data in createJDS.jsonFileData.items(): dateparts = data[0].split('-') daily = "%4d%02d%02d" % tuple([int(x) for x in dateparts[:3]]) expected = os.sep.join((storage.root, daily, storage.indexName, data[2], uuid + storage.dumpSuffix)) got = storage.getDump(uuid) assert expected == got, 'Expected dump file %s, got %s' % ( expected, got) try: storage.getDump(createJDS.jsonBadUuid) assert False, 'Should throw IOError from attempt to getDump(non-existent-uuid)' except OSError, e: assert True
def testRemoveRemovesOnlyDate(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[2], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2]) dailies = set([]) expectedSubs = [] alldirs = [] allfiles = [] alllinks = [] for uuid, data in createJDS.jsonFileData.items(): dailies.add(''.join(data[0].split('-')[:3])) storage.remove(uuid) for day in dailies: for dir, dirs, files in os.walk( os.sep.join((storage.root, day, storage.dateName))): for file in files: allfiles.append(file) if os.path.islink(os.path.join(dir, file)): alllinks.append(file) for d in dirs: if os.path.islink(os.path.join(dir, d)): alllinks.append(d) alldirs.append(os.path.join(dir, d)) assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles assert [] == alllinks, 'Expcet that all links are gone, but found %s' % alllinks assert [] == alldirs, 'Expect that all date dirs are gone, but found %s' % alldirs for day in dailies: for dir, dirs, files in os.walk( os.sep.join((storage.root, day, storage.indexName))): for file in files: allfiles.append(file) if os.path.islink(os.path.join(dir, file)): alllinks.append(file) for d in dirs: if os.path.islink(os.path.join(dir, d)): alllinks.append(d) alldirs.append(os.path.join(dir, d)) assert [] == allfiles, 'Expect that all removed files are gone, but found %s' % allfiles assert [] == alllinks, 'Expect that all links are gone, but found %s' % alllinks for sub in expectedSubs: assert sub in alldirs, "Expect each subdirectory is still there, but didn't find %s" % sub
def testDestructiveDateWalk(self): createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[0], self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[0]) uuids = createJDS.jsonFileData.keys() uuidsSet = set(uuids) seenids = set() for id in storage.destructiveDateWalk(): assert id in uuids, 'Expect that %s is among the uuids we stored\n%s' % ( id, uuids) seenids.add(id) for id in uuids: assert id in seenids, 'Expect that we found every uuid we stored (%s) from %s' % ( id, seenids) daily = os.listdir(storage.root) for d in daily: assert not storage.dateName in os.listdir( os.path.join( storage.root, d)), 'Expected all date subdirs to be gone, but %s' % d
def testMarkAsSeen(self): """testNewJsonDumpStorage:TestJsonDumpStorage.testMarkAsSeen() somewhat bogus test: Doesn't look for failure modes """ createJDS.createTestSet(createJDS.jsonFileData, self.initKwargs[0], rootDir=self.testDir) storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2]) for ooid in createJDS.jsonFileData.keys(): namePath, parts = storage.namePath(ooid) linkInName = os.path.join(namePath, ooid) assert os.path.islink( linkInName), 'expected %s as link' % linkInName dpath = os.path.join(namePath, os.readlink(linkInName)) linkInDate = os.path.join(dpath, ooid) assert os.path.islink( linkInDate), 'expected %s as link' % linkInDate storage.markAsSeen(ooid) assert not os.path.exists( linkInName), 'expected %s gone' % linkInName assert not os.path.exists( linkInDate), 'expected %s gone' % linkInDate
def testCopyFrom(self): os.makedirs(self.testMoveFrom) fromdata = [ ('aabbccdd-something20071020', '2007-10-20-12-15', 'webalos', True, False), ('aabbccee-something20071020', '2007-10-20-12-15', 'webalos', True, False), ('aabbccff-something20071020', '2007-10-20-10-15', 'webalos', False, True), ] df = jf = None storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[1]) for (uuid, stampS, head, doLink, doRm) in fromdata: jpath = "%s%s%s%s" % (self.testMoveFrom, os.sep, uuid, storage.jsonSuffix) dpath = "%s%s%s%s" % (self.testMoveFrom, os.sep, uuid, storage.dumpSuffix) jf = open(jpath, 'w') df = open(dpath, 'w') jf.write('json file: %s\n' % uuid) df.write('dump file: %s\n' % uuid) jf.close() df.close() stamp = datetime.datetime(*[int(x) for x in stampS.split('-')], tzinfo=UTC) newjpath = None try: ok = storage.copyFrom(uuid, jpath, dpath, head, stamp, doLink, doRm) assert ok, "Expect to succeed with %s" % (uuid) except Exception, e: assert False, 'Expected to not raise "%s" from id %s' % (e, uuid) try: newjpath = storage.getJson(uuid) except Exception, e: assert False, 'getJson(%s) should not raise %s' % (uuid, e)
def testNewEntry(self): storage = JDS.JsonDumpStorage(self.testDir, **self.initKwargs[2]) for uuid, data in createJDS.jsonFileData.items(): datetimedata = [int(x) for x in data[0].split('-')] uuid = ''.join((uuid[:-7], '2', uuid[-6:])) stamp = datetime.datetime(*datetimedata, tzinfo=UTC) try: fj, fd = storage.newEntry(uuid, webheadHostName=data[1], timestamp=stamp) except IOError: assert False, 'Expect to succeed with newEntry(%s,...)' % uuid assert fj, 'Expect a non-null json file handle from newEntry(%s,...)' % uuid loc2 = data[2][0:5] # We are not honoring ooid depth expectFileBase = os.sep.join( (storage.root, storage.dailyPart('', stamp), storage.indexName, loc2)) expectJson = os.path.join(expectFileBase, uuid + storage.jsonSuffix) assert expectJson == fj.name, 'For %s, expect %s, got %s' % ( uuid, expectJson, fj.name) assert fd, 'Expect a non-null dump file handle from newEntry(%s,...)' % uuid expectDump = os.path.join(expectFileBase, uuid + storage.dumpSuffix) assert expectDump == fd.name, 'For %s, expect %s, got %s' % ( uuid, expectDump, fj.name) loc3parts = self.__relativeDateParts(data[0], storage.minutesPerSlot) loc3parts.append(data[3][-len('webhead0x_x'):]) loc3 = os.sep.join(loc3parts) lbase = os.sep.join((storage.root, storage.dailyPart('', stamp), storage.dateName, loc3)) lpath = os.path.join(lbase, uuid) assert os.path.islink( lpath), 'Expect a link from timed to storage for %s' % uuid relNamePath = os.path.join(lbase, os.readlink(lpath)) assert os.path.isdir( relNamePath), 'Expected %s to be a Name directory' % ( relNamePath) lpath = os.path.join(expectFileBase, uuid) assert os.path.islink( lpath), 'Expect link from name storage to timed for %s' % uuid relDatePath = os.path.join(expectFileBase, os.readlink(lpath)) assert os.path.isdir( relDatePath), 'Expected %s to be a Date directory' % ( relDatePath) try: try: fj.write("testing\n") assert True, 'must be able to write to the json file for uuid %s' % uuid except: assert False, 'must not fail to write to the json file for uuid %s' % uuid finally: if fj: fj.close() try: try: fd.write("testing\n") assert True, 'must be able to write to the dump file for uuid %s' % uuid except: assert False, 'must not fail to write to the dump file for uuid %s' % uuid finally: if fd: fd.close()
'BuildID':'bogusBuildID-%02d', } cookie = 0 while True: retMap = {} for k,v in dataMap.items(): retMap[k] = v%cookie yield json.dumps(retMap) cookie += 1 def createTestSet(testData,jsonKwargs,rootDir): try: os.makedirs(rootDir) except OSError,x: if errno.EEXIST != x.errno: raise storage = JDS.JsonDumpStorage(rootDir, **jsonKwargs) jsonIsEmpty = jsonKwargs.get('jsonIsEmpty', False) jsonIsBogus = jsonKwargs.get('jsonIsBogus', True) jsonFileGenerator = jsonKwargs.get('jsonFileGenerator',None) if 'default' == jsonFileGenerator: jsonFileGenerator = minimalJsonFileContents() thedt = utc_now() for uuid,data in testData.items(): if data[0].startswith('+'): if thedt.second >= 58: print "\nSleeping for %d seconds" %(61-thedt.second) time.sleep(61-thedt.second) thedt = utc_now() slot = { '+0': getSlot(storage.minutesPerSlot,thedt.minute), '+5': getSlot(storage.minutesPerSlot,thedt.minute+5),