def test_time_20(self): """TestRepCodeFrom68Time.test_time_20(): 1e5 word conversion from FileRead: """ i = 0 numWords = 1e5 myWord = b'\x44\x4c\x80\x00' wordsInPr = int( (PhysRec.PR_MAX_LENGTH - PhysRec.PR_PRH_LENGTH) / len(myWord)) # Suc no Pre: 1 # Pre no Suc: 2 # Suc and Pre: 3 prContStart = PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + len(myWord)*wordsInPr) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(1) \ + (myWord * wordsInPr) # Absent Physical Record trailer prContBody = PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + len(myWord)*wordsInPr) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(3) \ + (myWord * wordsInPr) # Absent Physical Record trailer prContEnd = PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + len(myWord)*wordsInPr) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(2) \ + (myWord * wordsInPr) # Absent Physical Record trailer # How many physical records numPr = int(numWords / wordsInPr) numPrBody = numPr - 2 assert (numPrBody >= 0) # Python code first myBy = io.BytesIO(prContStart + prContBody * numPrBody + prContEnd) myFile = File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=False) i = 0 tS = time.clock() while i < wordsInPr * numPr: pRepCode.read68(myFile) i += 1 tE_P = time.clock() - tS self.assertFalse(myFile.hasLd()) sys.stderr.write('Python: %.3f %8.0f words/S ' % (tE_P, numWords / tE_P)) # Now Cython code myBy = io.BytesIO(prContStart + prContBody * numPrBody + prContEnd) myFile = File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=False) i = 0 tS = time.clock() while i < wordsInPr * numPr: RepCode.read68(myFile) i += 1 tE_C = time.clock() - tS self.assertFalse(myFile.hasLd()) sys.stderr.write('Cython: %.3f %8.0f words/S ' % (tE_C, numWords / tE_C)) sys.stderr.write('%.1f%% (x%.1f) ' % ((100.0 * (tE_C / tE_P)), tE_P / tE_C))
def setup(self): channels = [] for i in range(63): ch_name = '{:02d}'.format(i).encode('ascii') channels.append(b'CH' + ch_name + b'ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4])) b = ( # Logical record header for DFSR bytes([64, 0]) # Entry block 4, value 0 + bytes([4, 1, 66, 0]) # Entry block 12, value -153.0 + bytes([12, 4, 68]) + b'\xbb\xb3\x80\x00' # Entry block 0, value None terminates read + bytes([0, 1, 66, 0]) # Sensor 0 # Mnemonic + b'DEPTServIDServOrdNFEET' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b''.join(channels)) file_obj = write_logical_data_to_physical_records([b]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True)
def setup(self): logical_bytes = ( # Type 129 b'\x81\x00' # File name 6.3 format + b'RUNOne.lis' # Two blanks + b'\x00\x00' # Service sub-level name + b'SubLev' # Version number + b'Vers num' # Date + b'78/03/15' # One blank + b'\x00' # Max Physical record length + b' 1024' # Two blanks + b'\x00\x00' # File Type + b'\x41\x42' # Two blanks + b'\x00\x00' # Previous file name + b'Prev name.') file_bytes = write_logical_data_to_physical_records([ logical_bytes, ]) self.file_read = File.FileRead(theFile=file_bytes, theFileId='MyFile', keepGoing=True)
def setup(self): b = ( # Logical record header for DFSR bytes([64, 0]) # Entry block 4, value 0 + bytes([4, 1, 66, 0]) # Entry block 12, value -153.0 + bytes([12, 4, 68]) + b'\xbb\xb3\x80\x00' # Entry block 0, value None terminates read + bytes([0, 1, 66, 0]) # Sensor 0 # Mnemonic + b'DEPTServIDServOrdNFEET' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH01ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH02ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH03ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH04ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH05ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH06ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4]) + b'CH07ServIDServOrdNNDIM' + b'\x02\xb3\x60\x3b' + bytes([1, 0, 0, 4, 0, 0, 0, 1, 68, 0, 1, 2, 3, 4])) file_obj = write_logical_data_to_physical_records([b]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True)
def processFile(f, tests, keepGoing, resultMap): retVal = 0 try: logging.info('File: {:s} size: {:d}'.format(f, os.path.getsize(f))) myFi = File.FileRead(f, theFileId=f, keepGoing=keepGoing) #a = r'W:\LISTestData\logPassStd256MB.lis' #myFi = File.FileRead(a, theFileId=a) clkStart = time.clock() myIdx = FileIndexer.FileIndex(myFi) #print(myIdx.longDesc()) print('Index time: {:.3f}'.format(time.clock() - clkStart)) except Exception as err: logging.error(str(err)) traceback.print_exc() else: #print('resultMap', resultMap) #print('tests', tests) for t in tests: #print('t', t) resultMap[t][f] = [] for aLpi in myIdx.genLogPasses(): for t in tests: try: myR = TEST_TYPE[t][0](myFi, aLpi.logPass) except Exception as err: logging.error(str(err)) traceback.print_exc() else: retVal = 1 if myR is not None: resultMap[t][f].append(myR) return retVal
def _retStdFile(self, f, prLen=PhysRec.PR_MAX_LENGTH): return File.FileWrite( theFile=self._retFilePath(f), theFileId=self._retFilePath(f), keepGoing=False, hasTif=True, thePrLen=prLen, thePrt=PhysRec.PhysRecTail(hasRecNum=True, fileNum=255, hasCheckSum=True), )
def setup(self, arg): b = bytearray() b.append(arg) b.append(0) file_obj = write_logical_data_to_physical_records([bytes(b)]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True) self.read_class = self.class_map[arg]
def _retLisFileAndIndex(self, fpIn): """Returns a LisFile.LisFile() and a FileIndexer.FileIndex() from fpIn. May raises an ExceptionTotalDepthLIS.""" assert (os.path.isfile(fpIn)) logging.info( 'ProcLISPathBase._retLisFileAndIndex(): Reading LIS file {:s}'. format(fpIn)) myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing) myIdx = FileIndexer.FileIndex(myFi) return myFi, myIdx
def _retFileFromListOfLogicalRecords(self, theLrS, theId='MyFile', flagKg=False): """Given a list of bytes objects that represent Logical Records this returns a LIS File object.""" b = bytearray() for lr in theLrS: b.extend(self.retPrS(lr)) return File.FileRead(theFile=io.BytesIO(b), theFileId=theId, keepGoing=flagKg)
def dumpFrameSets(fp, keepGoing, summaryOnly): logging.info('Index.indexFile(): {:s}'.format(fp)) assert(os.path.isfile(fp)) myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) for aLp in myIdx.genLogPasses(): print(aLp) # Load the FrameSet if aLp.logPass.totalFrames == 0: print('No frames to load.') else: aLp.logPass.setFrameSet(myFi, None, None) myFrSet = aLp.logPass.frameSet if not summaryOnly: # Print the channels and units hdrS = [] if myFrSet.isIndirectX: hdrS.append('XAXIS [{:s}]'.format(myFrSet.xAxisDecl.depthUnits)) hdrS.extend(['{:s} [{:s}]'.format(m, u) for m,u in aLp.logPass.genFrameSetHeadings()]) #print('TRACE: len(hdrS)', len(hdrS)) print('\t'.join(hdrS)) for frIdx in range(myFrSet.numFrames): #print('TRACE: len(frame)', len(myFrSet.frame(frIdx))) if myFrSet.isIndirectX: print(myFrSet.xAxisValue(frIdx), '\t', end='') print('\t'.join(['%g' % v for v in myFrSet.frame(frIdx)])) # Accumulate min/mean/max myAccClasses = [ FrameSet.AccCount, FrameSet.AccMin, FrameSet.AccMean, FrameSet.AccMax, FrameSet.AccStDev, FrameSet.AccDec, FrameSet.AccEq, FrameSet.AccInc, FrameSet.AccBias, FrameSet.AccDrift, FrameSet.AccActivity, ] myAcc = myFrSet.accumulate(myAccClasses) print() fmtStr = '{:12s} ' + (' {:>12s}'*len(myAccClasses)) print(fmtStr.format( 'Sc Name', 'Count', 'Min', 'Mean', 'Max', 'Std Dev.', '--', '==', '++', 'Bias', 'Drift', 'Activity', ) ) schNameS = list(aLp.logPass.genFrameSetScNameUnit()) # print(schNameS) for scIdx, aRow in enumerate(myAcc): print('{:4s} [{:4s}]'.format(*schNameS[scIdx]), ' ', ' '.join(['{:12.5g}'.format(v) for v in aRow]))
def _processFile(fp, keepGoing, tabMtch, theCntr): assert (os.path.isfile(fp)) logging.info('PlotLogPasses._processFile(): {:s}'.format(fp)) assert (os.path.isfile(fp)) try: myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error('Can not read LIS file {:s} with error: {:s}'.format( fp, err)) else: # print(' Index longDesc() '.center(75, '=')) # print(myIdx.longDesc()) # print(' Index longDesc() DONE '.center(75, '=')) # Iterate through the FileIndexer object retVal = False for anIo in myIdx.genAll(): # print('anIdxObj:', anIo) if anIo.lrType in LogiRec.LR_TYPE_TABLE_DATA \ and tabMtch.lrTypeMatch(anIo.lrType) \ and tabMtch.nameTableMatch(anIo.name): # Read the whole table logical record myFi.seekLr(anIo.tell) try: myLrTable = LogiRec.LrTableRead(myFi) except Exception as err: logging.error( 'Can not create Logical Record, error: {:s}'.format( err)) else: # print('myLrTable', myLrTable) for aRow in myLrTable.genRows(): theCntr.incRow(anIo.lrType, anIo.name, aRow.value) if tabMtch.nameRowMatch(aRow.value): for aCell in aRow.genCells(): theCntr.incCol(anIo.lrType, anIo.name, aCell.mnem) if tabMtch.nameColMatch(aCell.mnem): theCntr.incAll( tabMtch, anIo.lrType, anIo.name, aRow.value, aCell.mnem, aCell.engVal.value, ) # if aCell.mnem == b'TYPE' and aCell.engVal.value == b'CONS': # retVal = True return retVal
def test_12__(self): """TestRepCodeFrom68.test_12__(): read68(0xBBB38000) -> -153.0.""" myBy = io.BytesIO( PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + 4) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(0) \ + b'\xbb\xb3\x80\x00' \ # Absent Physical Record trailer ) myFile = File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=True) self.assertEqual(RepCode.read68(myFile), -153.0) self.assertFalse(myFile.hasLd())
def test_11_p(self): """TestRepCodeFrom68Python.test_11_p(): read68(0x444C8000) -> 153.0 Python.""" myBy = io.BytesIO( PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + 4) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(0) \ + b'\x44\x4c\x80\x00' \ # Absent Physical Record trailer ) myFile = File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=True) self.assertEqual(pRepCode.read68(myFile), 153.0) self.assertFalse(myFile.hasLd())
def test_12_c(self): """TestRepCodeFrom68Cython.test_12_c(): read68(0xBBB38000) -> -153.0 Cython.""" myBy = io.BytesIO( PhysRec.PR_PRH_LEN_FORMAT.pack(PhysRec.PR_PRH_LENGTH + 4) \ + PhysRec.PR_PRH_ATTR_FORMAT.pack(0) \ + b'\xbb\xb3\x80\x00' \ # Absent Physical Record trailer ) myFile = File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=True) # self.assertEqual(cRepCode.read68(myFile), -153.0) try: assert (cRepCode.read68(myFile) == -153.0) self.fail('AttributeError not raised.') except AttributeError: pass self.assertFalse(myFile.hasLd())
def scanFile(fp, isVerbose, keepGoing, theS=sys.stdout): try: myFile = File.FileRead(fp, fp, keepGoing) except File.ExceptionFile as err: print('Can not open file, error: %s' % str(err)) return myFactory = LogiRec.LrFactoryRead() while not myFile.isEOF: myTellLr = myFile.tellLr() try: myLr = myFactory.retLrFromFile(myFile) if myLr is not None: print('0x{:08x}'.format(myTellLr), str(myLr)) if isVerbose: dumpLr(myLr) except LogiRec.ExceptionLr as err: pass #logging.error('LR at 0x{:08x}: {:s}'.format(myTellLr, err)) myFile.skipToNextLr()
def setup(self): logical_bytes = ( # Type 133 b'\x85\x00' # Service name + b'SERVCE' # + b'\x00\x00\x00\x00\x00\x00' # + b'79/06/15' # + b'\x00\x00' # Origin + b'ORGN' # + b'\x00\x00' # Reel name + b'REELNAME' # + b'\x00\x00' # Reel continuation number + b'01' # + b'\x00\x00' # Previous reel name + b'NextName' # + b'\x00\x00' # Comments, 74 characters + b'_123456789_123456789_123456789_123456789_123456789_123456789_123456789_123' ) file_bytes = write_logical_data_to_physical_records([ logical_bytes, ]) self.file_read = File.FileRead(theFile=file_bytes, theFileId='MyFile', keepGoing=True)
def setup(self): myT = LogiRec.LrTableWrite( 34, b'PRES', ( b'MNEM', b'OUTP', b'STAT', b'TRAC', b'CODI', b'DEST', b'MODE', b'FILT', b'LEDG', b'REDG', ), ( (b'40 ', b'TEST', b'ALLO', b'T1 ', b'LLIN', b'2 ', b'SHIF', 0.5, (-40.0, b'MV '), (40.0, b'MV ')), (b'20 ', b'TEST', b'ALLO', b'T2 ', b'HDAS', b'2 ', b'SHIF', 0.5, (-20.0, b'MV '), (20.0, b'MV ')), (b'10 ', b'TEST', b'ALLO', b'T3 ', b'LGAP', b'2 ', b'WRAP', 0.5, (-10.0, b'MV '), (10.0, b'MV ')), (b'5 ', b'TEST', b'ALLO', b'T2 ', b'HSPO', b'2 ', b'WRAP', 0.5, (-5.0, b'MV '), (5.0, b'MV ')), (b'2.5 ', b'TEST', b'ALLO', b'T3 ', b'LSPO', b'2 ', b'WRAP', 0.5, (-2.5, b'MV '), (2.5, b'MV ')), ), ) ba = bytearray([34, 0]) for b in myT.genLisBytes(): ba += b file_obj = write_logical_data_to_physical_records([bytes(ba)]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True)
def dumpFrameSets(fp, keepGoing, summaryOnly, channels): """Dump the frame values to stdout. keepGoing is a bool. SummaryOnly is a bool to emit a summary only, if false all the data and the summary is written out. Channels is a set of Mnems, if non-empty then only these channels, if present, are written out.""" logging.info('Index.indexFile(): {:s}'.format(fp)) assert (os.path.isfile(fp)) myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) myIdx = FileIndexer.FileIndex(myFi) for aLp in myIdx.genLogPasses(): print(aLp) # Load the FrameSet if aLp.logPass.totalFrames == 0: print('No frames to load.') else: aLp.logPass.setFrameSet(myFi, None, None) myFrSet = aLp.logPass.frameSet if not summaryOnly: # Print the channels and units hdrS = [] if myFrSet.isIndirectX: hdrS.append('XAXIS [{!r:s}]'.format( myFrSet.xAxisDecl.depthUnits)) indexes = [] if len(channels): for i, (m, u) in enumerate(aLp.logPass.genFrameSetHeadings()): if m in channels: hdrS.append('{!r:s} [{!r:s}]'.format(m, u)) indexes.append(i) else: hdrS.extend([ '{!r:s} [{!r:s}]'.format(m, u) for m, u in aLp.logPass.genFrameSetHeadings() ]) if len(indexes) == len(channels): logging.warning( 'Some channels you specified can not be found: indexes={!r:s} channels={!r:s}' .format(indexes, channels)) #print('TRACE: len(hdrS)', len(hdrS)) print('\t'.join(hdrS)) for frIdx in range(myFrSet.numFrames): #print('TRACE: len(frame)', len(myFrSet.frame(frIdx))) if myFrSet.isIndirectX: print(myFrSet.xAxisValue(frIdx), '\t', end='') if len(indexes): values = [myFrSet.frame(frIdx)[i] for i in indexes] print('\t'.join(['%g' % v for v in values])) else: print('\t'.join( ['%g' % v for v in myFrSet.frame(frIdx)])) # Accumulate min/mean/max myAccClasses = [ FrameSet.AccCount, FrameSet.AccMin, FrameSet.AccMean, FrameSet.AccMax, FrameSet.AccStDev, FrameSet.AccDec, FrameSet.AccEq, FrameSet.AccInc, FrameSet.AccBias, FrameSet.AccDrift, FrameSet.AccActivity, ] myAcc = myFrSet.accumulate(myAccClasses) print() fmtStr = '{:12s} ' + (' {:>12s}' * len(myAccClasses)) print( fmtStr.format( 'Sc Name', 'Count', 'Min', 'Mean', 'Max', 'Std Dev.', '--', '==', '++', 'Bias', 'Drift', 'Activity', )) schNameS = list(aLp.logPass.genFrameSetScNameUnit()) # print(schNameS) for scIdx, aRow in enumerate(myAcc): print('{:4s} [{:4s}]'.format(*schNameS[scIdx]), ' ', ' '.join(['{:12.5g}'.format(v) for v in aRow]))
def indexFile(fp, numTimes, verbose, keepGoing, convertJson): logging.info('Index.indexFile(): {:s}'.format(fp)) assert(os.path.isfile(fp)) retIt = IndexTimer() try: myLenPickle = -1 myLenJson = -1 timeS = [] for t in range(numTimes): clkStart = time.clock() myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing) try: myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error('{:s}'.format(str(err))) continue timeS.append(time.clock() - clkStart) if verbose: print(myIdx.longDesc()) print(' All records '.center(75, '=')) for aLr in myIdx.genAll(): print(str(aLr)) print(' All records DONE '.center(75, '=')) print(' Log Passes '.center(75, '=')) for aLp in myIdx.genLogPasses(): print('LogPass', aLp.logPass.longStr()) print() print(' Log Passes DONE '.center(75, '=')) print(' Plot Records '.center(75, '=')) for aPlotRec in myIdx.genPlotRecords(): print('Plot Record:', aPlotRec) print() print(' Plot Records DONE '.center(75, '=')) #print('CPU time = %8.3f (S)' % timeS[-1]) if t == 0: pikBy = pickle.dumps(myIdx) #print('Pickled: file={:10d} size={:10d} {:8.3f}%'.format( # os.path.getsize(fp), # len(pikBy), # len(pikBy)*100/os.path.getsize(fp) # ) #) myLenPickle = len(pikBy) #print('{:d}\t{:d}\t{:.3f} #Pickled'.format(os.path.getsize(fp), len(pikBy), len(pikBy)*100/os.path.getsize(fp))) if convertJson: jsonObj = myIdx.jsonObject() # pprint.pprint(jsonObj) jsonBytes = json.dumps(jsonObj, sort_keys=True, indent=4) myLenJson = len(jsonBytes) if verbose: print(' JSON [{:d}] '.format(myLenJson).center(75, '=')) print(jsonBytes) print(' JSON DONE '.center(75, '=')) if len(timeS) > 0: refTime = sum(timeS)/len(timeS) if verbose: print(' Min: {:.3f} (s)'.format(min(timeS))) print(' Max: {:.3f} (s)'.format(max(timeS))) print(' Mean: {:.3f} (s)'.format(refTime)) if len(timeS) > 2: timeS = sorted(timeS) #print(timeS) refTime = timeS[((len(timeS)+1)//2)-1] if verbose: print('Median: {:.3f} (s)'.format(refTime)) #print(os.path.getsize(fp), refTime) mySiz = os.path.getsize(fp) sizemb = mySiz / 2**20 rate = refTime * 1000 / sizemb print('File size: {:d} ({:.3f} MB) Reference Time: {:.6f} (s), rate {:.3f} ms/MB file: {:s} pickleLen={:d} jsonLen={:d}'.format( mySiz, sizemb, refTime, rate, fp, myLenPickle, myLenJson, ) ) retIt.addSizeTime(mySiz, refTime) except ExceptionTotalDepthLIS as err: retIt.addErr() traceback.print_exc() return retIt
def _processFile(self, fpIn, fpOut): assert (os.path.isfile(fpIn)) assert (os.path.exists(os.path.dirname(fpOut))) logging.info( 'PlotLogPasses._processFile(): Starting on {:s}'.format(fpIn)) # Read LIS file and create index myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing) try: myIdx = FileIndexer.FileIndex(myFi) except ExceptionTotalDepthLIS as err: logging.error( 'Can not create index: for "{:s}", error: {:s}'.format( fpIn, err)) return # Iterate through the PlotRecordSet objects for lpIdx, aPrs in enumerate(myIdx.genPlotRecords()): if len(self._lgFormatS) == 0: # Use internal FILM/PRES plotting specification self._plotUsingLISLogicalRecords(myFi, lpIdx, aPrs, fpOut) else: self._plotUsingLgFormats(myFi, lpIdx, aPrs, fpOut) # myPlot, myLogPass, myCONSRecS = self._retPlotFromPlotRecordSet(myFi, aPrs) # for aFilmId in myPlot.filmIdS(): # logging.info('PlotLogPasses._processFile(): FILM ID={:s}.'.format(aFilmId.pStr(strip=True))) # if myPlot.hasDataToPlotLIS(myLogPass, aFilmId): # myOutFilePath = '{:s}_{:04d}_{:s}.svg'.format(fpOut, lpIdx, aFilmId.pStr(strip=True)) # myFout = open(myOutFilePath, 'w') # myCurvIDs, numPoints = myPlot.plotLogPassLIS(myFi, # myLogPass, # myLogPass.xAxisFirstEngVal, # myLogPass.xAxisLastEngVal, # aFilmId, # myFout, # frameStep=1, # title="Plot: {:s} LogPass: {:d} FILM ID={:s}".format( # os.path.abspath(myOutFilePath), # lpIdx, # aFilmId.pStr(strip=True), # ), # lrCONS=myCONSRecS, # ) # assert(myCurvIDs is not None and numPoints is not None) # # So here the essential data that we have to put in the index.html is: # # Key: myOutFilePath or input file fp, lpIdx, aFilmId, # # Value: (myPlot.xScale(aFilmId), myLogPass.xAxisFirstEngVal, myLogPass.xAxisLastEngVal, myCurvIDs) # self.plotLogInfo.addPlotResult( # fpIn, # myOutFilePath, # lpIdx, # aFilmId.pStr(), # myPlot.xScale(aFilmId), # myLogPass.xAxisFirstEngVal, # myLogPass.xAxisLastEngVal, # theCurveS=myCurvIDs, # ptsPlotted=numPoints) # else: # logging.info('PlotLogPasses._processFile(): No data to plot for FILM ID {:s}'.format(aFilmId)) # Count the number of LogPasses, files etc. self.plotLogInfo.logPassCntr += myIdx.numLogPasses() self.plotLogInfo.lisFileCntr += 1 logging.info( 'PlotLogPasses._processFile(): Done with {:s}'.format(fpIn))
def _retFileFromBytes(self, theB, theId='MyFile', flagKg=False): """Returns bytes object wrapped as a file.""" return File.FileRead(theFile=io.BytesIO(theB), theFileId=theId, keepGoing=flagKg)
def _retFilePrS(self, theB, prLen=1024): """Given a bytes() object this returns a file with them encapsulated in a single Physical Record.""" myBy = io.BytesIO(self.retPrS(theB)) return File.FileRead(theFile=myBy, theFileId='MyFile', keepGoing=True)
def setup(self, arg): b = bytes([LogiRec.LR_TYPE_BLANK_RECORD, 0]) + b' ' * arg file_obj = write_logical_data_to_physical_records([b]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True)
def setup(self): b = ( # Logical record header for DFSR bytes([64, 0]) # Entry block 4, value 0 + bytes([4, 1, 66, 0]) # Entry block 12, value -153.0 + bytes([12, 4, 68]) + b'\xbb\xb3\x80\x00' # Entry block 0, value None terminates read + bytes([0, 1, 66, 0]) # Sensor 0 # Mnemonic + b'DEPT' # Service ID + b'ServID' # Service order number + b'ServOrdN' # Units + b'FEET' # API codes 45, 310, 01, 1 # Decimal 45310011 is 0x02b3603b + b'\x02\xb3\x60\x3b' # File number: 256 + bytes([1, 0]) # LIS size in bytes: 4 bytes + bytes([0, 4]) # Padding '0' + b'000' # Samples: 1 super samples + b'\x01' # Representation code + bytes([ 68, ]) # Process indicators + bytes([0, 1, 2, 3, 4]) # Sensor 1 # Mnemonic + b'GR ' # Service ID + b'ServID' # Service order number + b'ServOrdN' # Units + b'GAPI' # API codes 45, 310, 01, 1 # Decimal 45310011 is 0x02b3603b + b'\x02\xb3\x60\x3b' # File number: 256 + bytes([1, 0]) # LIS size in bytes: 4 samples * 6 burst samples * 4 bytes = 96 bytes + bytes([0, 96]) # Padding '0' + b'000' # Samples: 4 super samples + bytes([ 4, ]) # Representation code + bytes([ 68, ]) # Process indicators + bytes([0, 1, 2, 3, 4])) file_obj = write_logical_data_to_physical_records([b]) self.file_read = File.FileRead(theFile=file_obj, theFileId='MyFile', keepGoing=True)