def processFile(f, tests, keepGoing, resultMap): retVal = 0 try: logging.info('File: {:s} size: {:d}'.format(f, os.path.getsize(f))) myFi = File.FileRead(f, theFileId=f, keepGoing=keepGoing) #a = r'W:\LISTestData\logPassStd256MB.lis' #myFi = File.FileRead(a, theFileId=a) clkStart = time.clock() myIdx = FileIndexer.FileIndex(myFi) #print(myIdx.longDesc()) print('Index time: {:.3f}'.format(time.clock() - clkStart)) except Exception as err: logging.error(str(err)) traceback.print_exc() else: #print('resultMap', resultMap) #print('tests', tests) for t in tests: #print('t', t) resultMap[t][f] = [] for aLpi in myIdx.genLogPasses(): for t in tests: try: myR = TEST_TYPE[t][0](myFi, aLpi.logPass) except Exception as err: logging.error(str(err)) traceback.print_exc() else: retVal = 1 if myR is not None: resultMap[t][f].append(myR) return retVal
def test_02(self): """TestIndexEflrHeadTail.test_02(): Reel head and tail.""" myFile = self._retFileFromBytes(self._retReelHead()+self._retReelTail()) myIdx = FileIndexer.FileIndex(myFile) #print() #print(myIdx._idx) self.assertEqual(2, len(myIdx)) self.assertEqual([132, 133], myIdx.lrTypeS)
def test_01(self): """TestIndexEflrHeadTail.test_01(): Tape head and tail.""" myFile = self._retFileFromBytes(self._retTapeHead()+self._retTapeTail()) myIdx = FileIndexer.FileIndex(myFile) #print() #print(myIdx._idx) self.assertEqual(2, len(myIdx)) self.assertEqual([130, 131], myIdx.lrTypeS)
def test_10(self): """TestPlotRecordSet.test_10(): value().""" myPrs = FileIndexer.PlotRecordSet() self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertFalse(myPrs.canPlotFromExternalRecords()) myPrs.tellFilm = 1 myPrs.tellPres = 1 myPrs.logPass = '******' self.assertTrue(myPrs)
def _retLisFileAndIndex(self, fpIn): """Returns a LisFile.LisFile() and a FileIndexer.FileIndex() from fpIn. May raises an ExceptionTotalDepthLIS.""" assert (os.path.isfile(fpIn)) logging.info( 'ProcLISPathBase._retLisFileAndIndex(): Reading LIS file {:s}'. format(fpIn)) myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing) myIdx = FileIndexer.FileIndex(myFi) return myFi, myIdx
def test_06(self): """TestPlotRecordSet.test_06(): logPass setter and getter.""" myPrs = FileIndexer.PlotRecordSet() self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertFalse(myPrs.canPlotFromExternalRecords()) self.assertTrue(myPrs.logPass is None) myPrs.logPass = '******' self.assertEqual('1', myPrs.logPass) self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertTrue(myPrs.canPlotFromExternalRecords())
def test_05(self): """TestPlotRecordSet.test_05(): tellPip setter and getter.""" myPrs = FileIndexer.PlotRecordSet() self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertFalse(myPrs.canPlotFromExternalRecords()) self.assertTrue(myPrs.tellPip is None) myPrs.tellPip = 1 self.assertEqual(1, myPrs.tellPip) self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertFalse(myPrs.canPlotFromExternalRecords())
def test_00(self): """TestIndexMarker.test_00(): All marker records.""" myFile = self._retFileFromBytes( self._retSinglePr(b'\x89\x00') + self._retSinglePr(b'\x8A\x00') + self._retSinglePr(b'\x8B\x00') + self._retSinglePr(b'\x8D\x00')) myIdx = FileIndexer.FileIndex(myFile) #print() #print(myIdx._idx) self.assertEqual(4, len(myIdx)) #print(myIdx.lrTypeS) self.assertEqual([137, 138, 139, 141], myIdx.lrTypeS)
def dumpFrameSets(fp, keepGoing, summaryOnly): logging.info('Index.indexFile(): {:s}'.format(fp)) assert(os.path.isfile(fp)) myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) for aLp in myIdx.genLogPasses(): print(aLp) # Load the FrameSet if aLp.logPass.totalFrames == 0: print('No frames to load.') else: aLp.logPass.setFrameSet(myFi, None, None) myFrSet = aLp.logPass.frameSet if not summaryOnly: # Print the channels and units hdrS = [] if myFrSet.isIndirectX: hdrS.append('XAXIS [{:s}]'.format(myFrSet.xAxisDecl.depthUnits)) hdrS.extend(['{:s} [{:s}]'.format(m, u) for m,u in aLp.logPass.genFrameSetHeadings()]) #print('TRACE: len(hdrS)', len(hdrS)) print('\t'.join(hdrS)) for frIdx in range(myFrSet.numFrames): #print('TRACE: len(frame)', len(myFrSet.frame(frIdx))) if myFrSet.isIndirectX: print(myFrSet.xAxisValue(frIdx), '\t', end='') print('\t'.join(['%g' % v for v in myFrSet.frame(frIdx)])) # Accumulate min/mean/max myAccClasses = [ FrameSet.AccCount, FrameSet.AccMin, FrameSet.AccMean, FrameSet.AccMax, FrameSet.AccStDev, FrameSet.AccDec, FrameSet.AccEq, FrameSet.AccInc, FrameSet.AccBias, FrameSet.AccDrift, FrameSet.AccActivity, ] myAcc = myFrSet.accumulate(myAccClasses) print() fmtStr = '{:12s} ' + (' {:>12s}'*len(myAccClasses)) print(fmtStr.format( 'Sc Name', 'Count', 'Min', 'Mean', 'Max', 'Std Dev.', '--', '==', '++', 'Bias', 'Drift', 'Activity', ) ) schNameS = list(aLp.logPass.genFrameSetScNameUnit()) # print(schNameS) for scIdx, aRow in enumerate(myAcc): print('{:4s} [{:4s}]'.format(*schNameS[scIdx]), ' ', ' '.join(['{:12.5g}'.format(v) for v in aRow]))
def _retFileIndexSingleChannel(self): myBa = bytearray(self._retFileHead()) # Add a log pass myLp = self._retLogPassGen() # Stick the DFSR on the array myBa += self.retPrS(myLp.lrBytesDFSR()) # Add some logical records for i in range(4): myBa += self.retPrS(myLp.lrBytes(i * 100, 100)) myBa += self._retFileTail() myFile = self._retFileFromBytes(myBa) myIdx = FileIndexer.FileIndex(myFile) return myFile, myIdx
def test_00(self): """TestIndexUnknownIntFormat.test_00(): Logical Records of unknown internal format, random lengths.""" #print() #print(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT) myB = b'' for aType in LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT: myB += self._retSinglePr(self._retLrRandom(aType)) myFile = self._retFileFromBytes(myB) myIdx = FileIndexer.FileIndex(myFile) #print(myIdx._idx) self.assertEqual(len(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT), len(myIdx)) #print(myIdx.lrTypeS) self.assertEqual(list(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT), myIdx.lrTypeS)
def _processFile(fp, keepGoing, tabMtch, theCntr): assert (os.path.isfile(fp)) logging.info('PlotLogPasses._processFile(): {:s}'.format(fp)) assert (os.path.isfile(fp)) try: myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error('Can not read LIS file {:s} with error: {:s}'.format( fp, err)) else: # print(' Index longDesc() '.center(75, '=')) # print(myIdx.longDesc()) # print(' Index longDesc() DONE '.center(75, '=')) # Iterate through the FileIndexer object retVal = False for anIo in myIdx.genAll(): # print('anIdxObj:', anIo) if anIo.lrType in LogiRec.LR_TYPE_TABLE_DATA \ and tabMtch.lrTypeMatch(anIo.lrType) \ and tabMtch.nameTableMatch(anIo.name): # Read the whole table logical record myFi.seekLr(anIo.tell) try: myLrTable = LogiRec.LrTableRead(myFi) except Exception as err: logging.error( 'Can not create Logical Record, error: {:s}'.format( err)) else: # print('myLrTable', myLrTable) for aRow in myLrTable.genRows(): theCntr.incRow(anIo.lrType, anIo.name, aRow.value) if tabMtch.nameRowMatch(aRow.value): for aCell in aRow.genCells(): theCntr.incCol(anIo.lrType, anIo.name, aCell.mnem) if tabMtch.nameColMatch(aCell.mnem): theCntr.incAll( tabMtch, anIo.lrType, anIo.name, aRow.value, aCell.mnem, aCell.engVal.value, ) # if aCell.mnem == b'TYPE' and aCell.engVal.value == b'CONS': # retVal = True return retVal
def test_03(self): """TestIndexEflrHeadTail.test_03(): Reel/Tape/File head and tail.""" myFile = self._retFileFromBytes(self._retReelHead() + self._retTapeHead() + self._retFileHead() + self._retFileTail() + self._retTapeTail() + self._retReelTail()) myIdx = FileIndexer.FileIndex(myFile) #print() #print(myIdx._idx) self.assertEqual(6, len(myIdx)) #print(myIdx.lrTypeS) self.assertEqual([132, 130, 128, 129, 131, 133], myIdx.lrTypeS)
def dumpFrameSets(fp, keepGoing, summaryOnly, channels): """Dump the frame values to stdout. keepGoing is a bool. SummaryOnly is a bool to emit a summary only, if false all the data and the summary is written out. Channels is a set of Mnems, if non-empty then only these channels, if present, are written out.""" logging.info('Index.indexFile(): {:s}'.format(fp)) assert (os.path.isfile(fp)) myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) for aLp in myIdx.genLogPasses(): print(aLp) # Load the FrameSet if aLp.logPass.totalFrames == 0: print('No frames to load.') else: aLp.logPass.setFrameSet(myFi, None, None) myFrSet = aLp.logPass.frameSet if not summaryOnly: # Print the channels and units hdrS = [] if myFrSet.isIndirectX: hdrS.append('XAXIS [{!r:s}]'.format( myFrSet.xAxisDecl.depthUnits)) indexes = [] if len(channels): for i, (m, u) in enumerate(aLp.logPass.genFrameSetHeadings()): if m in channels: hdrS.append('{!r:s} [{!r:s}]'.format(m, u)) indexes.append(i) else: hdrS.extend([ '{!r:s} [{!r:s}]'.format(m, u) for m, u in aLp.logPass.genFrameSetHeadings() ]) if len(indexes) == len(channels): logging.warning( 'Some channels you specified can not be found: indexes={!r:s} channels={!r:s}' .format(indexes, channels)) #print('TRACE: len(hdrS)', len(hdrS)) print('\t'.join(hdrS)) for frIdx in range(myFrSet.numFrames): #print('TRACE: len(frame)', len(myFrSet.frame(frIdx))) if myFrSet.isIndirectX: print(myFrSet.xAxisValue(frIdx), '\t', end='') if len(indexes): values = [myFrSet.frame(frIdx)[i] for i in indexes] print('\t'.join(['%g' % v for v in values])) else: print('\t'.join( ['%g' % v for v in myFrSet.frame(frIdx)])) # Accumulate min/mean/max myAccClasses = [ FrameSet.AccCount, FrameSet.AccMin, FrameSet.AccMean, FrameSet.AccMax, FrameSet.AccStDev, FrameSet.AccDec, FrameSet.AccEq, FrameSet.AccInc, FrameSet.AccBias, FrameSet.AccDrift, FrameSet.AccActivity, ] myAcc = myFrSet.accumulate(myAccClasses) print() fmtStr = '{:12s} ' + (' {:>12s}' * len(myAccClasses)) print( fmtStr.format( 'Sc Name', 'Count', 'Min', 'Mean', 'Max', 'Std Dev.', '--', '==', '++', 'Bias', 'Drift', 'Activity', )) schNameS = list(aLp.logPass.genFrameSetScNameUnit()) # print(schNameS) for scIdx, aRow in enumerate(myAcc): print('{:4s} [{:4s}]'.format(*schNameS[scIdx]), ' ', ' '.join(['{:12.5g}'.format(v) for v in aRow]))
def _processFile(self, fpIn, fpOut): assert (os.path.isfile(fpIn)) assert (os.path.exists(os.path.dirname(fpOut))) logging.info( 'PlotLogPasses._processFile(): Starting on {:s}'.format(fpIn)) # Read LIS file and create index myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing) try: myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error( 'Can not create index: for "{:s}", error: {:s}'.format( fpIn, err)) return # Iterate through the PlotRecordSet objects for lpIdx, aPrs in enumerate(myIdx.genPlotRecords()): if len(self._lgFormatS) == 0: # Use internal FILM/PRES plotting specification self._plotUsingLISLogicalRecords(myFi, lpIdx, aPrs, fpOut) else: self._plotUsingLgFormats(myFi, lpIdx, aPrs, fpOut) # myPlot, myLogPass, myCONSRecS = self._retPlotFromPlotRecordSet(myFi, aPrs) # for aFilmId in myPlot.filmIdS(): # logging.info('PlotLogPasses._processFile(): FILM ID={:s}.'.format(aFilmId.pStr(strip=True))) # if myPlot.hasDataToPlotLIS(myLogPass, aFilmId): # myOutFilePath = '{:s}_{:04d}_{:s}.svg'.format(fpOut, lpIdx, aFilmId.pStr(strip=True)) # myFout = open(myOutFilePath, 'w') # myCurvIDs, numPoints = myPlot.plotLogPassLIS(myFi, # myLogPass, # myLogPass.xAxisFirstEngVal, # myLogPass.xAxisLastEngVal, # aFilmId, # myFout, # frameStep=1, # title="Plot: {:s} LogPass: {:d} FILM ID={:s}".format( # os.path.abspath(myOutFilePath), # lpIdx, # aFilmId.pStr(strip=True), # ), # lrCONS=myCONSRecS, # ) # assert(myCurvIDs is not None and numPoints is not None) # # So here the essential data that we have to put in the index.html is: # # Key: myOutFilePath or input file fp, lpIdx, aFilmId, # # Value: (myPlot.xScale(aFilmId), myLogPass.xAxisFirstEngVal, myLogPass.xAxisLastEngVal, myCurvIDs) # self.plotLogInfo.addPlotResult( # fpIn, # myOutFilePath, # lpIdx, # aFilmId.pStr(), # myPlot.xScale(aFilmId), # myLogPass.xAxisFirstEngVal, # myLogPass.xAxisLastEngVal, # theCurveS=myCurvIDs, # ptsPlotted=numPoints) # else: # logging.info('PlotLogPasses._processFile(): No data to plot for FILM ID {:s}'.format(aFilmId)) # Count the number of LogPasses, files etc. self.plotLogInfo.logPassCntr += myIdx.numLogPasses() self.plotLogInfo.lisFileCntr += 1 logging.info( 'PlotLogPasses._processFile(): Done with {:s}'.format(fpIn))
def indexFile(fp, numTimes, verbose, keepGoing, convertJson): logging.info('Index.indexFile(): {:s}'.format(fp)) assert(os.path.isfile(fp)) retIt = IndexTimer() try: myLenPickle = -1 myLenJson = -1 timeS = [] for t in range(numTimes): clkStart = time.clock() myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) try: myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error('{:s}'.format(str(err))) continue timeS.append(time.clock() - clkStart) if verbose: print(myIdx.longDesc()) print(' All records '.center(75, '=')) for aLr in myIdx.genAll(): print(str(aLr)) print(' All records DONE '.center(75, '=')) print(' Log Passes '.center(75, '=')) for aLp in myIdx.genLogPasses(): print('LogPass', aLp.logPass.longStr()) print() print(' Log Passes DONE '.center(75, '=')) print(' Plot Records '.center(75, '=')) for aPlotRec in myIdx.genPlotRecords(): print('Plot Record:', aPlotRec) print() print(' Plot Records DONE '.center(75, '=')) #print('CPU time = %8.3f (S)' % timeS[-1]) if t == 0: pikBy = pickle.dumps(myIdx) #print('Pickled: file={:10d} size={:10d} {:8.3f}%'.format( # os.path.getsize(fp), # len(pikBy), # len(pikBy)*100/os.path.getsize(fp) # ) #) myLenPickle = len(pikBy) #print('{:d}\t{:d}\t{:.3f} #Pickled'.format(os.path.getsize(fp), len(pikBy), len(pikBy)*100/os.path.getsize(fp))) if convertJson: jsonObj = myIdx.jsonObject() # pprint.pprint(jsonObj) jsonBytes = json.dumps(jsonObj, sort_keys=True, indent=4) myLenJson = len(jsonBytes) if verbose: print(' JSON [{:d}] '.format(myLenJson).center(75, '=')) print(jsonBytes) print(' JSON DONE '.center(75, '=')) if len(timeS) > 0: refTime = sum(timeS)/len(timeS) if verbose: print(' Min: {:.3f} (s)'.format(min(timeS))) print(' Max: {:.3f} (s)'.format(max(timeS))) print(' Mean: {:.3f} (s)'.format(refTime)) if len(timeS) > 2: timeS = sorted(timeS) #print(timeS) refTime = timeS[((len(timeS)+1)//2)-1] if verbose: print('Median: {:.3f} (s)'.format(refTime)) #print(os.path.getsize(fp), refTime) mySiz = os.path.getsize(fp) sizemb = mySiz / 2**20 rate = refTime * 1000 / sizemb print('File size: {:d} ({:.3f} MB) Reference Time: {:.6f} (s), rate {:.3f} ms/MB file: {:s} pickleLen={:d} jsonLen={:d}'.format( mySiz, sizemb, refTime, rate, fp, myLenPickle, myLenJson, ) ) retIt.addSizeTime(mySiz, refTime) except ExceptionTotalDepthLIS as err: retIt.addErr() traceback.print_exc() return retIt
def test_00(self): """TestFileIndexer.test_00(): Empty file.""" myF = self._retFileFromBytes(b'') myIdx = FileIndexer.FileIndex(myF)
def test_01(self): """TestPlotRecordSet.test_01(): Construction.""" myPrs = FileIndexer.PlotRecordSet() self.assertFalse(myPrs.canPlotFromInternalRecords()) self.assertFalse(myPrs.canPlotFromExternalRecords())