Exemple #1
0
def processFile(f, tests, keepGoing, resultMap):
    retVal = 0
    try:
        logging.info('File: {:s} size: {:d}'.format(f, os.path.getsize(f)))
        myFi = File.FileRead(f, theFileId=f, keepGoing=keepGoing)
        #a = r'W:\LISTestData\logPassStd256MB.lis'
        #myFi = File.FileRead(a, theFileId=a)
        clkStart = time.clock()
        myIdx = FileIndexer.FileIndex(myFi)
        #print(myIdx.longDesc())
        print('Index time: {:.3f}'.format(time.clock() - clkStart))
    except Exception as err:
        logging.error(str(err))
        traceback.print_exc()
    else:
        #print('resultMap', resultMap)
        #print('tests', tests)
        for t in tests:
            #print('t', t)
            resultMap[t][f] = []
        for aLpi in myIdx.genLogPasses():
            for t in tests:
                try:
                    myR = TEST_TYPE[t][0](myFi, aLpi.logPass)
                except Exception as err:
                    logging.error(str(err))
                    traceback.print_exc()
                else:
                    retVal = 1
                    if myR is not None:
                        resultMap[t][f].append(myR)
    return retVal
Exemple #2
0
 def test_02(self):
     """TestIndexEflrHeadTail.test_02(): Reel head and tail."""
     myFile = self._retFileFromBytes(self._retReelHead()+self._retReelTail())
     myIdx = FileIndexer.FileIndex(myFile)
     #print()
     #print(myIdx._idx)
     self.assertEqual(2, len(myIdx))
     self.assertEqual([132, 133], myIdx.lrTypeS)
Exemple #3
0
 def test_01(self):
     """TestIndexEflrHeadTail.test_01(): Tape head and tail."""
     myFile = self._retFileFromBytes(self._retTapeHead()+self._retTapeTail())
     myIdx = FileIndexer.FileIndex(myFile)
     #print()
     #print(myIdx._idx)
     self.assertEqual(2, len(myIdx))
     self.assertEqual([130, 131], myIdx.lrTypeS)
Exemple #4
0
 def _retLisFileAndIndex(self, fpIn):
     """Returns a LisFile.LisFile() and a FileIndexer.FileIndex() from fpIn.
     May raises an ExceptionTotalDepthLIS."""
     assert (os.path.isfile(fpIn))
     logging.info(
         'ProcLISPathBase._retLisFileAndIndex(): Reading LIS file {:s}'.
         format(fpIn))
     myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing)
     myIdx = FileIndexer.FileIndex(myFi)
     return myFi, myIdx
 def test_00(self):
     """TestIndexMarker.test_00(): All marker records."""
     myFile = self._retFileFromBytes(
         self._retSinglePr(b'\x89\x00') + self._retSinglePr(b'\x8A\x00') +
         self._retSinglePr(b'\x8B\x00') + self._retSinglePr(b'\x8D\x00'))
     myIdx = FileIndexer.FileIndex(myFile)
     #print()
     #print(myIdx._idx)
     self.assertEqual(4, len(myIdx))
     #print(myIdx.lrTypeS)
     self.assertEqual([137, 138, 139, 141], myIdx.lrTypeS)
def dumpFrameSets(fp, keepGoing, summaryOnly):
    logging.info('Index.indexFile(): {:s}'.format(fp))
    assert(os.path.isfile(fp))
    myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing)
    myIdx = FileIndexer.FileIndex(myFi)
    for aLp in myIdx.genLogPasses():
        print(aLp)
        # Load the FrameSet
        if aLp.logPass.totalFrames == 0:
            print('No frames to load.')
        else:
            aLp.logPass.setFrameSet(myFi, None, None)
            myFrSet = aLp.logPass.frameSet
            if not summaryOnly:
                # Print the channels and units
                hdrS = []
                if myFrSet.isIndirectX:
                    hdrS.append('XAXIS [{:s}]'.format(myFrSet.xAxisDecl.depthUnits))
                hdrS.extend(['{:s} [{:s}]'.format(m, u) for m,u in aLp.logPass.genFrameSetHeadings()])
                #print('TRACE: len(hdrS)', len(hdrS))
                print('\t'.join(hdrS))
                for frIdx in range(myFrSet.numFrames):
                    #print('TRACE: len(frame)', len(myFrSet.frame(frIdx)))
                    if myFrSet.isIndirectX:
                        print(myFrSet.xAxisValue(frIdx), '\t', end='')
                    print('\t'.join(['%g' % v for v in myFrSet.frame(frIdx)]))
            # Accumulate min/mean/max
            myAccClasses = [
                    FrameSet.AccCount,
                    FrameSet.AccMin,
                    FrameSet.AccMean,
                    FrameSet.AccMax,
                    FrameSet.AccStDev,
                    FrameSet.AccDec,
                    FrameSet.AccEq,
                    FrameSet.AccInc,
                    FrameSet.AccBias,
                    FrameSet.AccDrift,
                    FrameSet.AccActivity,
            ]
            myAcc = myFrSet.accumulate(myAccClasses)
            print()
            fmtStr = '{:12s} ' + (' {:>12s}'*len(myAccClasses)) 
            print(fmtStr.format(
                    'Sc Name', 'Count', 'Min', 'Mean', 'Max', 'Std Dev.', '--', '==', '++', 'Bias', 'Drift', 'Activity',
                )
            )
            schNameS = list(aLp.logPass.genFrameSetScNameUnit())
#            print(schNameS)
            for scIdx, aRow in enumerate(myAcc):
                print('{:4s} [{:4s}]'.format(*schNameS[scIdx]),
                      ' ',
                      ' '.join(['{:12.5g}'.format(v) for v in aRow]))
 def _retFileIndexSingleChannel(self):
     myBa = bytearray(self._retFileHead())
     # Add a log pass
     myLp = self._retLogPassGen()
     # Stick the DFSR on the array
     myBa += self.retPrS(myLp.lrBytesDFSR())
     # Add some logical records
     for i in range(4):
         myBa += self.retPrS(myLp.lrBytes(i * 100, 100))
     myBa += self._retFileTail()
     myFile = self._retFileFromBytes(myBa)
     myIdx = FileIndexer.FileIndex(myFile)
     return myFile, myIdx
Exemple #8
0
 def test_00(self):
     """TestIndexUnknownIntFormat.test_00(): Logical Records of unknown internal format, random lengths."""
     #print()
     #print(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT)
     myB = b''
     for aType in LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT:
         myB += self._retSinglePr(self._retLrRandom(aType))
     myFile = self._retFileFromBytes(myB)
     myIdx = FileIndexer.FileIndex(myFile)
     #print(myIdx._idx)
     self.assertEqual(len(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT), len(myIdx))
     #print(myIdx.lrTypeS)
     self.assertEqual(list(LogiRec.LR_TYPE_UNKNOWN_INTERNAL_FORMAT), myIdx.lrTypeS)
def _processFile(fp, keepGoing, tabMtch, theCntr):
    assert (os.path.isfile(fp))
    logging.info('PlotLogPasses._processFile(): {:s}'.format(fp))
    assert (os.path.isfile(fp))
    try:
        myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing)
        myIdx = FileIndexer.FileIndex(myFi)
    except ExceptionTotalDepthLIS as err:
        logging.error('Can not read LIS file {:s} with error: {:s}'.format(
            fp, err))
    else:
        #        print(' Index longDesc() '.center(75, '='))
        #        print(myIdx.longDesc())
        #        print(' Index longDesc() DONE '.center(75, '='))
        # Iterate through the FileIndexer object
        retVal = False
        for anIo in myIdx.genAll():
            #            print('anIdxObj:', anIo)
            if anIo.lrType in LogiRec.LR_TYPE_TABLE_DATA \
            and tabMtch.lrTypeMatch(anIo.lrType) \
            and tabMtch.nameTableMatch(anIo.name):
                # Read the whole table logical record
                myFi.seekLr(anIo.tell)
                try:
                    myLrTable = LogiRec.LrTableRead(myFi)
                except Exception as err:
                    logging.error(
                        'Can not create Logical Record, error: {:s}'.format(
                            err))
                else:
                    #                    print('myLrTable', myLrTable)
                    for aRow in myLrTable.genRows():
                        theCntr.incRow(anIo.lrType, anIo.name, aRow.value)
                        if tabMtch.nameRowMatch(aRow.value):
                            for aCell in aRow.genCells():
                                theCntr.incCol(anIo.lrType, anIo.name,
                                               aCell.mnem)
                                if tabMtch.nameColMatch(aCell.mnem):
                                    theCntr.incAll(
                                        tabMtch,
                                        anIo.lrType,
                                        anIo.name,
                                        aRow.value,
                                        aCell.mnem,
                                        aCell.engVal.value,
                                    )


#                                    if aCell.mnem == b'TYPE' and aCell.engVal.value == b'CONS':
#                                        retVal = True
        return retVal
 def test_03(self):
     """TestIndexEflrHeadTail.test_03(): Reel/Tape/File head and tail."""
     myFile = self._retFileFromBytes(self._retReelHead() +
                                     self._retTapeHead() +
                                     self._retFileHead() +
                                     self._retFileTail() +
                                     self._retTapeTail() +
                                     self._retReelTail())
     myIdx = FileIndexer.FileIndex(myFile)
     #print()
     #print(myIdx._idx)
     self.assertEqual(6, len(myIdx))
     #print(myIdx.lrTypeS)
     self.assertEqual([132, 130, 128, 129, 131, 133], myIdx.lrTypeS)
Exemple #11
0
def dumpFrameSets(fp, keepGoing, summaryOnly, channels):
    """Dump the frame values to stdout.

    keepGoing is a bool.
    SummaryOnly is a bool to emit a summary only, if false all the data and the summary is written out.
    Channels is a set of Mnems, if non-empty then only these channels, if present, are written out."""
    logging.info('Index.indexFile(): {:s}'.format(fp))
    assert (os.path.isfile(fp))
    myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing)
    myIdx = FileIndexer.FileIndex(myFi)
    for aLp in myIdx.genLogPasses():
        print(aLp)
        # Load the FrameSet
        if aLp.logPass.totalFrames == 0:
            print('No frames to load.')
        else:
            aLp.logPass.setFrameSet(myFi, None, None)
            myFrSet = aLp.logPass.frameSet
            if not summaryOnly:
                # Print the channels and units
                hdrS = []
                if myFrSet.isIndirectX:
                    hdrS.append('XAXIS [{!r:s}]'.format(
                        myFrSet.xAxisDecl.depthUnits))
                indexes = []
                if len(channels):
                    for i, (m,
                            u) in enumerate(aLp.logPass.genFrameSetHeadings()):
                        if m in channels:
                            hdrS.append('{!r:s} [{!r:s}]'.format(m, u))
                            indexes.append(i)
                else:
                    hdrS.extend([
                        '{!r:s} [{!r:s}]'.format(m, u)
                        for m, u in aLp.logPass.genFrameSetHeadings()
                    ])
                if len(indexes) == len(channels):
                    logging.warning(
                        'Some channels you specified can not be found: indexes={!r:s} channels={!r:s}'
                        .format(indexes, channels))
                #print('TRACE: len(hdrS)', len(hdrS))
                print('\t'.join(hdrS))
                for frIdx in range(myFrSet.numFrames):
                    #print('TRACE: len(frame)', len(myFrSet.frame(frIdx)))
                    if myFrSet.isIndirectX:
                        print(myFrSet.xAxisValue(frIdx), '\t', end='')
                    if len(indexes):
                        values = [myFrSet.frame(frIdx)[i] for i in indexes]
                        print('\t'.join(['%g' % v for v in values]))
                    else:
                        print('\t'.join(
                            ['%g' % v for v in myFrSet.frame(frIdx)]))
            # Accumulate min/mean/max
            myAccClasses = [
                FrameSet.AccCount,
                FrameSet.AccMin,
                FrameSet.AccMean,
                FrameSet.AccMax,
                FrameSet.AccStDev,
                FrameSet.AccDec,
                FrameSet.AccEq,
                FrameSet.AccInc,
                FrameSet.AccBias,
                FrameSet.AccDrift,
                FrameSet.AccActivity,
            ]
            myAcc = myFrSet.accumulate(myAccClasses)
            print()
            fmtStr = '{:12s} ' + (' {:>12s}' * len(myAccClasses))
            print(
                fmtStr.format(
                    'Sc Name',
                    'Count',
                    'Min',
                    'Mean',
                    'Max',
                    'Std Dev.',
                    '--',
                    '==',
                    '++',
                    'Bias',
                    'Drift',
                    'Activity',
                ))
            schNameS = list(aLp.logPass.genFrameSetScNameUnit())
            #            print(schNameS)
            for scIdx, aRow in enumerate(myAcc):
                print('{:4s} [{:4s}]'.format(*schNameS[scIdx]), ' ',
                      ' '.join(['{:12.5g}'.format(v) for v in aRow]))
Exemple #12
0
    def _processFile(self, fpIn, fpOut):
        assert (os.path.isfile(fpIn))
        assert (os.path.exists(os.path.dirname(fpOut)))
        logging.info(
            'PlotLogPasses._processFile(): Starting on {:s}'.format(fpIn))
        # Read LIS file and create index
        myFi = File.FileRead(fpIn, theFileId=fpIn, keepGoing=self._keepGoing)
        try:
            myIdx = FileIndexer.FileIndex(myFi)
        except ExceptionTotalDepthLIS as err:
            logging.error(
                'Can not create index: for "{:s}", error: {:s}'.format(
                    fpIn, err))
            return
        # Iterate through the PlotRecordSet objects
        for lpIdx, aPrs in enumerate(myIdx.genPlotRecords()):
            if len(self._lgFormatS) == 0:
                # Use internal FILM/PRES plotting specification
                self._plotUsingLISLogicalRecords(myFi, lpIdx, aPrs, fpOut)
            else:
                self._plotUsingLgFormats(myFi, lpIdx, aPrs, fpOut)

#            myPlot, myLogPass, myCONSRecS = self._retPlotFromPlotRecordSet(myFi, aPrs)
#            for aFilmId in myPlot.filmIdS():
#                logging.info('PlotLogPasses._processFile(): FILM ID={:s}.'.format(aFilmId.pStr(strip=True)))
#                if myPlot.hasDataToPlotLIS(myLogPass, aFilmId):
#                    myOutFilePath = '{:s}_{:04d}_{:s}.svg'.format(fpOut, lpIdx, aFilmId.pStr(strip=True))
#                    myFout = open(myOutFilePath, 'w')
#                    myCurvIDs, numPoints = myPlot.plotLogPassLIS(myFi,
#                            myLogPass,
#                            myLogPass.xAxisFirstEngVal,
#                            myLogPass.xAxisLastEngVal,
#                            aFilmId,
#                            myFout,
#                            frameStep=1,
#                            title="Plot: {:s} LogPass: {:d} FILM ID={:s}".format(
#                                os.path.abspath(myOutFilePath),
#                                lpIdx,
#                                aFilmId.pStr(strip=True),
#                            ),
#                            lrCONS=myCONSRecS,
#                        )
#                    assert(myCurvIDs is not None and numPoints is not None)
#                    # So here the essential data that we have to put in the index.html is:
#                    # Key: myOutFilePath or input file fp, lpIdx, aFilmId,
#                    # Value: (myPlot.xScale(aFilmId), myLogPass.xAxisFirstEngVal, myLogPass.xAxisLastEngVal, myCurvIDs)
#                    self.plotLogInfo.addPlotResult(
#                        fpIn,
#                        myOutFilePath,
#                        lpIdx,
#                        aFilmId.pStr(),
#                        myPlot.xScale(aFilmId),
#                        myLogPass.xAxisFirstEngVal,
#                        myLogPass.xAxisLastEngVal,
#                        theCurveS=myCurvIDs,
#                        ptsPlotted=numPoints)
#                else:
#                    logging.info('PlotLogPasses._processFile(): No data to plot for FILM ID {:s}'.format(aFilmId))

# Count the number of LogPasses, files etc.
        self.plotLogInfo.logPassCntr += myIdx.numLogPasses()
        self.plotLogInfo.lisFileCntr += 1
        logging.info(
            'PlotLogPasses._processFile(): Done with {:s}'.format(fpIn))
 def test_00(self):
     """TestFileIndexer.test_00(): Empty file."""
     myF = self._retFileFromBytes(b'')
     myIdx = FileIndexer.FileIndex(myF)
Exemple #14
0
def indexFile(fp, numTimes, verbose, keepGoing, convertJson):
    logging.info('Index.indexFile(): {:s}'.format(fp))
    assert(os.path.isfile(fp))
    retIt = IndexTimer()
    try:
        myLenPickle = -1
        myLenJson = -1
        timeS = []
        for t in range(numTimes):
            clkStart = time.clock()
            myFi = File.FileRead(fp, theFileId=fp, keepGoing=keepGoing)
            try:
                myIdx = FileIndexer.FileIndex(myFi)
            except ExceptionTotalDepthLIS as err:
                logging.error('{:s}'.format(str(err)))
                continue
            timeS.append(time.clock() - clkStart)
            if verbose:
                print(myIdx.longDesc())
                print(' All records '.center(75, '='))
                for aLr in myIdx.genAll():
                    print(str(aLr))
                print(' All records DONE '.center(75, '='))
                print(' Log Passes '.center(75, '='))
                for aLp in myIdx.genLogPasses():
                    print('LogPass', aLp.logPass.longStr())
                    print()
                print(' Log Passes DONE '.center(75, '='))
                print(' Plot Records '.center(75, '='))
                for aPlotRec in myIdx.genPlotRecords():
                    print('Plot Record:', aPlotRec)
                    print()
                print(' Plot Records DONE '.center(75, '='))
            #print('CPU time = %8.3f (S)' % timeS[-1])
            if t == 0:
                pikBy = pickle.dumps(myIdx)
                #print('Pickled: file={:10d} size={:10d} {:8.3f}%'.format(
                #    os.path.getsize(fp),
                #    len(pikBy),
                #    len(pikBy)*100/os.path.getsize(fp)
                #    )
                #)
                myLenPickle = len(pikBy)
                #print('{:d}\t{:d}\t{:.3f} #Pickled'.format(os.path.getsize(fp), len(pikBy), len(pikBy)*100/os.path.getsize(fp)))
                if convertJson:
                    jsonObj = myIdx.jsonObject()
                    # pprint.pprint(jsonObj)
                    jsonBytes = json.dumps(jsonObj, sort_keys=True, indent=4)
                    myLenJson = len(jsonBytes)
                    if verbose:
                        print(' JSON [{:d}] '.format(myLenJson).center(75, '='))
                        print(jsonBytes)
                        print(' JSON DONE '.center(75, '='))
        if len(timeS) > 0:
            refTime = sum(timeS)/len(timeS)
            if verbose:
                print('   Min: {:.3f} (s)'.format(min(timeS)))
                print('   Max: {:.3f} (s)'.format(max(timeS)))
                print('  Mean: {:.3f} (s)'.format(refTime))
            if len(timeS) > 2:
                timeS = sorted(timeS)
                #print(timeS)
                refTime = timeS[((len(timeS)+1)//2)-1]
                if verbose:
                    print('Median: {:.3f} (s)'.format(refTime))
            #print(os.path.getsize(fp), refTime)
            mySiz = os.path.getsize(fp)
            sizemb = mySiz / 2**20
            rate = refTime * 1000 / sizemb
            print('File size: {:d} ({:.3f} MB) Reference Time: {:.6f} (s), rate {:.3f} ms/MB file: {:s} pickleLen={:d} jsonLen={:d}'.format(
                    mySiz,
                    sizemb,
                    refTime,
                    rate,
                    fp,
                    myLenPickle,
                    myLenJson,
                )
            )
            retIt.addSizeTime(mySiz, refTime)
    except ExceptionTotalDepthLIS as err:
        retIt.addErr()
        traceback.print_exc()
    return retIt