def testNewEntry(self): storage = dumpStorage.ProcessedDumpStorage(self.testDir,**self.initKwargs[0]) for ooid,(tdate,wh,pathprefix,longDatePath) in createJDS.jsonFileData.items(): dailyPart = ''.join(tdate.split('-')[:3]) expectedDir = os.sep.join((storage.root,dailyPart,storage.indexName,pathprefix)) expectedPath = os.path.join(expectedDir,"%s%s"%(ooid,storage.fileSuffix)) hourPart,slot = self.relativeDateParts(tdate,storage.minutesPerSlot) datepart = "%s_0"%(os.path.join(hourPart,slot)) expectedDateDir = os.sep.join((storage.root,dailyPart,storage.dateName,datepart)) testStamp = datetime.datetime(*[int(x) for x in tdate.split('-')],tzinfo=UTC) fh = None try: fh = storage.newEntry(ooid,testStamp) fh.write(expectedPath) finally: fh.close() assert os.path.exists(expectedPath), 'Expected: gzip file %s but none there'%(expectedPath) try: fh = gzip.open(expectedPath) firstline = fh.readline() assert expectedPath == firstline, 'Expected this file to contain its own path, but %s'%firstline nextline = fh.readline() assert '' == nextline, 'Expected this file to contain ONLY its own path, but %s'%nextline finally: fh.close() dToN = os.path.join(expectedDateDir,ooid) assert os.path.islink(dToN),'Expected %s to be link exists:%s'%(dToN,os.path.exists(dToN)) datapath = os.readlink(os.path.join(expectedDateDir,ooid)) # The next lines prove we have a relative-path link zigpath = os.path.join(expectedDateDir,datapath) assert os.path.isfile(os.path.join(zigpath,"%s%s"%(ooid,storage.fileSuffix))) assert os.path.pardir in zigpath,'But zigpath has no "parent directory" parts?: %s'%(zigpath)
def testRemoveDumpFile(self): storage = dumpStorage.ProcessedDumpStorage(self.testDir, **self.initKwargs[0]) self.createDumpSet(storage) expectedCount = len(createJDS.jsonFileData) dumpFiles = set() # should fail quitely storage.removeDumpFile(createJDS.jsonBadUuid) ooids = createJDS.jsonFileData.keys() for dir, dirs, files in os.walk(storage.root): dumpFiles.update(files) assert expectedCount == len(dumpFiles) #should happily remove them each and all for ooid in ooids: dumpFiles = set() storage.removeDumpFile(ooid) expectedCount -= 1 for dir, dirs, files in os.walk(storage.root): dumpFiles.update(files) assert expectedCount == len( dumpFiles), '\n %s: expected %d, but %d\n - %s' % ( ooid, expectedCount, len(dumpFiles), '\n - '.join(dumpFiles))
def testGetDumpPath(self): storage = dumpStorage.ProcessedDumpStorage(self.testDir, **self.initKwargs[1]) seq = 0 seqs = {} for ooid, (tdate, wh, pathprefix, longdatepath) in createJDS.jsonFileData.items(): hh, slot = self.relativeDateParts(tdate, storage.minutesPerSlot) seqs[ooid] = seq expectedDir = os.sep.join((storage.root, self.dailyFromDate(tdate), storage.dateName, hh, "%s_0" % slot)) expectedPath = os.path.join(expectedDir, "%s%s" % (ooid, storage.fileSuffix)) stamp = datetime.datetime(*[int(x) for x in tdate.split('-')], tzinfo=UTC) fh = storage.newEntry(ooid, stamp) fh.write("Sequence Number %d\n" % seq) fh.close() seq += 1 for ooid in createJDS.jsonFileData.keys(): path = storage.getDumpPath(ooid) f = gzip.open(path, 'r') lines = f.readlines() f.close() assert 1 == len(lines) assert 'Sequence Number %d\n' % (seqs[ooid]) == lines[ 0], 'But expected "Sequence Number %d\n", got "%s"' % ( seqs[ooid], lines[0]) assert_raises(OSError, storage.getDumpPath, createJDS.jsonBadUuid)
def testGetDumpFromFile(self): storage = dumpStorage.ProcessedDumpStorage(self.testDir,**self.initKwargs[0]) self.createDumpSet(storage) o = None for ooid in createJDS.jsonFileData.keys(): o = storage.getDumpFromFile(ooid) bogusData['uuid'] = ooid assert bogusData == o assert_raises(OSError,storage.getDumpFromFile,createJDS.jsonBadUuid)
def constructorAlt(self,*args,**kwargs): storage = dumpStorage.ProcessedDumpStorage(self.testDir,**kwargs) assert self.testDir.rstrip(os.sep) == storage.root,'From kwargs=%s'%kwargs assert storage.indexName == kwargs.get('indexName','name'),'From kwargs=%s'%kwargs suffix = kwargs.get('fileSuffix','.jsonz') if not suffix.startswith('.'):suffix = '.%s'%suffix assert suffix == storage.fileSuffix,'expected "%s", got "%s" From kwargs=%s'%(suffix,storage.fileSuffix,kwargs) compression = int(kwargs.get('gzipCompression','9')) assert compression == storage.gzipCompression storageDepth = int(kwargs.get('storageDepth',2)) assert storageDepth == storage.storageDepth,'Expected %s, got %s'%(storageDepth,storage.storageDepth) mps = int(kwargs.get('minutesPerSlot',1)) assert mps == storage.minutesPerSlot,'Expected %s, got %s'%(mps,storage.minutesPerSlot)
def testSecondNewEntryAfterRemove(self): storage = dumpStorage.ProcessedDumpStorage(self.testDir,**self.initKwargs[0]) ooid,(tdate,ig1,pathprefix,longDatePath) = createJDS.jsonFileData.items()[1] testStamp = datetime.datetime(*[int(x) for x in tdate.split('-')],tzinfo=UTC) fh = storage.newEntry(ooid,testStamp) fh.close() storage.removeDumpFile(ooid) #Next line fails ugly and useless unless we have fixed the problem nh = None try: nh = storage.newEntry(ooid,testStamp) finally: if nh: nh.close()
def testPutDumpToFile(self): """ testPutDumpToFile(self):(slow=2) """ storage = dumpStorage.ProcessedDumpStorage(self.testDir, **self.initKwargs[2]) ooid = '0bae7049-bbff-49f2-dead-7e9fe2081125' # is coded for depth 2, so no special thought needed data = createJDS.jsonFileData[ooid] stamp = datetime.datetime(*[int(x) for x in data[0].split('-')], tzinfo=UTC) expectedPath = os.sep.join( (storage.root, self.dailyFromNow(), storage.indexName, data[2])) expectedFile = os.path.join(expectedPath, ooid + storage.fileSuffix) assert not os.path.exists( expectedPath), 'Better not exist at start of test' data = { "header": "header", "data": ['line ONE', 'lineTWO', 'last line'] } now = utc_now() if now.second > 57: time.sleep(60 - now.second) now = utc_now() storage.putDumpToFile(ooid, data, now) # default timestamp datePath = None seenDirs = set() seenFiles = set() for dirpath, dirnames, filenames in os.walk(storage.root): for f in filenames: if f.startswith(ooid): seenFiles.add(os.path.join(dirpath, f)) for d in dirnames: if d.startswith(ooid): seenDirs.add(os.path.join(dirpath, d)) for p in seenFiles: assert storage.fileSuffix in p assert storage.indexName in p for p in seenDirs: assert ooid == os.path.split(p)[1] assert storage.dateName in p assert os.path.exists( expectedFile), 'Just a nicer way to say your test is FUBAR' f = gzip.open(expectedFile) lines = " ".join(f.readlines()) f.close() assert """{"header": "header", "data": ["line ONE", "lineTWO", "last line"]}""" == lines