Beispiel #1
0
def CheckSingleConversions(sepObj, forceObj, zsnsrObj, deflObj):
    """
    Tests certain common single conversion works 

    Args:
        sepObj : Wave Data Object associated with separation
        force : Wave Data Object associated with force
        zsnsrObj : Wave Data Object associated with zsnsr    
        deflObj : Wave Data Object associated with deflObj    

    Returns:
        None
    """
    deflVObj = GetDeflVObj(deflObj)
    ZsnsrConv, DeflVConv = CypherUtil.ConvertSepForceToZsnsrDeflV(
        sepObj, forceObj)
    # check the converted ones match the cypher-converted ones
    assert ArrClose(ZsnsrConv, zsnsrObj.DataY)
    assert ArrClose(DeflVConv, deflVObj.DataY)
    # check "reverse": {ZSnsr,DeflV} to {Sep,Force}
    # note we have converted deflObj to deflV, above
    SepConv, ForceConv = CypherUtil.ConvertZsnsrDeflVToSepForce(
        zsnsrObj, deflVObj)
    # check the converted ones match the cypher-converted ones
    sepDat = sepObj.DataY
    forceDat = forceObj.DataY
    assert ArrClose(SepConv, sepDat)
    assert ArrClose(ForceConv, forceDat)
Beispiel #2
0
def ChekcWaveGroupMatches(mGroup, GroupNote, NoteExpected, Time, Sep, Force):
    """
    Tests that given wave group has the give note and data

    Args:
        mGroup:  dictionary of <name>:<WaveObj>, with Sep and Force
        GroupNote: note of the group
        NoteExpected; Expected note
        Time: Time we should have (array)
        Sep: Separation we should have (array)
        Force: Force we should have (array)

    Returns:
        None
    """
    # make sure the force and sep are as we expect
    forceRead = mGroup['force']
    sepRead = mGroup['sep']
    assert ArrClose(Sep, sepRead.DataY), "Separation Incorrect"
    assert ArrClose(Force, forceRead.DataY), "Separation Incorrect"
    # construct the time
    timeRead = forceRead.GetXArray()
    assert ArrClose(Time, timeRead), "Time Incorrect"
    # POST: data saved OK. how about the notes?
    assert (ProcessSingleWave.NotesEqual(GroupNote,NoteExpected)) ,\
        "Notes not saved"
Beispiel #3
0
def AssertMetaCorrect(ids, AssociatedWaveData, mSqlObj):
    """
    Tests the TraceMeta table is consistent with what we wanted. 

    Args:
        ids: Namespace (ie argpase.Namespace(**idDict) desired ids for table, 
      
        AssociatedWaveData: The associated waves we want to push

        mSqlObj: the sql object, having the session etc.

    Returns:
        None
    """
    mCls, sess = SqlDataModel.GetClassesAndSess(mSqlObj)
    # get the trace meta table
    metaRow = sess.query(mCls.TraceMeta).filter(
        mCls.TraceMeta.idTraceMeta == ids.idTraceMeta).all()
    assert (len(metaRow) == 1), "TraceMeta not pushed"
    # POST: exactly only row
    thisRow = metaRow[0]
    # Pick the first note for our meta
    # XXX change to more general?
    MetaNote = AssociatedWaveData.values()[0].Note
    mCols = [str(c).split(".")[1] for c in mCls.TraceMeta.__table__.columns]
    expected = TraceMetaConverter.ConvertToTableObj(mCls, MetaNote,
                                                    ids.__dict__)
    expectObj = mCls.TraceMeta(**expected)
    for c in mCols:
        SqlVal = getattr(thisRow, c)
        Expect = getattr(expectObj, c)
        if (Expect is None):
            Expect = getattr(ids, c)
        # POST: have the property. try comparing as a numeric first
        try:
            Sql = float(SqlVal)
            Expect = float(Expect)
            # XXX TODO: lower (relative) tolerance, since Invols truncates?
            assert ArrClose(Sql,Expect) , \
                "Property {:s} doesn't match, {:.7g} vs {:.7g}".\
                format(c,Expect,Sql)
        except (TypeError, ValueError) as e:
            # string or datetime
            assert SqlVal == Expect ,\
                "Property {:s} doesn't match, {:s} vs {:s}".format(c,
                                                                   str(SqlVal),
                                                                   str(Expect))
Beispiel #4
0
def AssertParamsCorrect(ids, ParamVals, ParamMeta, mSqlObj):
    """
    Tests that the parameter values used are consistent. Specifically

    (0) Are the parameter values and indices correct, in terms of ids and values
    (1) Are the Parameter linkers correct, in terms of ids and values
    (2) Are the correct number of parameters present.

    Args:
        ids: Namespace (ie argpase.Namespace(**idDict) desired ids for table, 
      
        ParamVals: the actual ParamData object, with the x,y, and index 

        ParamMeta: The (ParamVals-Sorted) meta information for each parameter

        mSqlObj: the sql object, having the session etc.

    Returns:
        None
    """
    # determine the ids for the meta parameters, order by parameter number
    mCls, sess = SqlDataModel.GetClassesAndSess(mSqlObj)
    mParams = sess.query(mCls.LinkModelParams).\
              filter(mCls.LinkModelParams.idModel ==ids.idModel).all()
    nParams = len(mParams)
    assert nParams == len(ParamMeta) ,\
        "Wrong number of parameters found [{:d} vs {:d}]".format(nParams,
                                                                 len(ParamVals))
    # POST: correct number of parameters. Are the the correct ID?
    idSql = [p.idParamMeta for p in mParams]
    idMeta = ParamMeta.GetSqlParamIdsModelId(ids.idModel, mSqlObj)
    assert set(idSql) == set(idMeta) ,\
        "Inserted parameters don't match expected"
    # POST: we are all referring to the same parameters.
    # See if we actually inserted anything for this model
    linkParams = sess.query(mCls.LinkTraceParam).\
            filter(mCls.LinkTraceParam.idTraceModel ==ids.idTraceModel).all()
    # exactly the number of parameters we added.
    nParamVals = len(ParamVals)
    nLinks = len(linkParams)
    assert(nLinks == nParamVals) ,\
        "Wrong number of parameters for trace model [{:d} vs {:d}].".\
        format(nParamVals,nLinks)
    # POST: some number of parameters. Get their ids
    idParamVals = [v.idParameterValue for v in linkParams]
    sqlOrderedByNumber = SqlDataModel.\
            GetSqlParamsOrderedByNumber(mSqlObj,ids.idTraceModel,idMeta)
    for pSql, pExpected in zip(sqlOrderedByNumber, ParamVals):
        # check the indices and data value (assumed to be x value!) match
        sqlIdx = float(pSql.DataIndex)
        expIdx = pExpected.index
        # XXX TODO: assert incides are integral?
        assert ArrClose(sqlIdx, int(sqlIdx)), "Index should be integral"
        assert ArrClose(sqlIdx,expIdx), \
            "Sql/Expect indices {:.1f}/{:.1f} don't match".format(sqlIdx,expIdx)
        # POST: indices match. how about x data?
        sqlDataVal = float(pSql.DataValues)
        expDataVal = float(pExpected.x)
        assert ArrClose(pExpected.x,expDataVal) , \
            "Data values don't match ({:.7g},{:.7g})".format(pExpected.x,
                                                             pSql.DataValues)
        # POST: they match. check we have the appropriate linker to the model
        linkTab = mCls.LinkTraceParam
        linkResult = sess.query(linkTab).\
            filter(linkTab.idParameterValue ==pSql.idParameterValue).all()
        assert (len(linkResult) == 1) ,\
            "Expect exactly one link between a parameter and a model"
        # POST: exactly and only one link
        mLink = linkResult[0]
        assert (mLink.idTraceModel == ids.idTraceModel) ,\
            "Wrong trace model associated with linker vable"
Beispiel #5
0
def run():
    """
    Tests saving and loading of a pxp file is idempotent and preserves the 
    'normal' data and meta data

    Args:
        None
    Returns:
        None
    """
    # dont raise anything on an error. we will handle not being able to connect
    # to jila
    demoName = "IgorPythonReader"
    localPath = "./LocalData/"
    inDir, outDir = IgorUtil.DemoJilaOrLocal(demoName, localPath)
    inFileMultiple = inDir + "ParseWaveTest.pxp"
    inFileSingle = inDir + "SingleWaveTest.pxp"
    # readin the 'multiple wave' file:
    mWaves = PxpLoader.LoadAllWavesFromPxp(inFileMultiple)
    n = len(mWaves)
    # Save them out as hdf5 files
    BinaryHDF5Io.MultiThreadedSave(mWaves, outDir)
    # Read them back; make sure we have the same thing. Note we assume the
    # initial read went OK (see the igor.py project)
    FileNames = [outDir + BinaryHDF5Io.GetFileSaveName(w) for w in mWaves]
    reRead = [BinaryHDF5Io.LoadHdfFileIntoWaveObj(f) for f in FileNames]
    # make sure we get the same thing reading back as what we loaded.
    # note that this test saving *and* loading.
    numMatches = sum(reRead[i] == mWaves[i] for i in range(n))
    assert numMatches == n, "File IO broken; waves saved or loaded improperly"
    # POST: saving and loading doesn't alter the original data
    # group them all
    mGrouping = PxpLoader.GroupWavesByEnding(mWaves)
    # loop through all the groupings and save them off
    ConcatData = []
    match = True
    for traceId, assocWaves in mGrouping.items():
        mArrs = assocWaves.values()
        tmpConcat = BinaryHDF5Io.ConcatenateWaves(mArrs)
        nEle = len(mArrs)
        for i in range(nEle):
            # skipe the first element (time) in concatenated
            concatDat = tmpConcat.DataY[:, i + 1]
            originalDat = mArrs[i].DataY
            assert ArrClose(concatDat,originalDat) , \
                "Concatenated Data doesn't match."
        # check that the time is correct
        reference = mArrs[0]
        nY = reference.DataY.size
        time = np.linspace(0, nY, nY, endpoint=False) * reference.DeltaX()
        # first is time
        assert (nY == time.size) and ArrClose(tmpConcat.DataY[:,0],time) , \
            "Time doesn't match."
        # check all the times are consistent (transitive betwen first column
        # of dataY, time, and tmpConcat.GetXArray())
        timeConcat = tmpConcat.GetXArray()
        assert (nY == timeConcat.size) and (ArrClose(timeConcat,time)) ,\
            "Time doesn't match"
        ConcatData.append(tmpConcat)
    # save the concatenated files. Since saving works (first test)
    # and concatenation works (second test), woo!
    BinaryHDF5Io.MultiThreadedSave(ConcatData, outDir)
Beispiel #6
0
def CheckSepForceConversions(sepObj, forceObj, zsnsrObj, deflObj):
    """
    Tests conversions to separation and force

    Args:
        See CheckSingleConversions

    Returns:
        None
    """
    deflVObj = GetDeflVObj(deflObj)
    # test a variety of dictionaries, ensure we can convert properly
    # for many different types
    mDicts = [
        # Separation and force (ie: idempotent)
        {
            'sep': sepObj,
            'force': forceObj
        },
        # A vareity of 'normal' combinations
        {
            'sep': sepObj,
            'defl': deflObj
        },
        {
            'zsnsr': zsnsrObj,
            'deflV': deflVObj
        },
        {
            'sep': sepObj,
            'deflV': deflVObj
        },
        {
            'zsnsr': zsnsrObj,
            'defl': deflObj
        },
        # the entire kitchen sink
        {
            'zsnsr': zsnsrObj,
            'defl': deflObj,
            'sep': sepObj,
            'force': forceObj,
            'deflV': deflVObj
        },
        # capitalize stuff weird
        {
            'zSNsr': zsnsrObj,
            'deFL': deflObj,
            'sEP': sepObj,
            'foRCe': forceObj,
            'dEFlV': deflVObj
        }
    ]
    sepDat = sepObj.DataY
    forceDat = forceObj.DataY
    for d in mDicts:
        Sep, Force = CypherUtil.GetSepForce(d)
        ErrorMsg = "Combination of {:s} didn't work!".format(d.keys())
        assert ArrClose(Sep, sepDat), ErrorMsg
        assert ArrClose(Force, forceDat), ErrorMsg
    # POST: all cobinations work. Check conversion of just force
    forceDicts = [{"deflv": deflVObj}, {"defl": deflObj}, {"force": forceObj}]
    for d in mDicts:
        Force = CypherUtil.GetForce(d)
        ErrorMsg = "Combination of {:s} didn't work!".format(d.keys())
        assert ArrClose(Force, Force), ErrorMsg
Beispiel #7
0
def CheckWaveGroupSave(sepObj, forceObj, zsnsrObj, deflObj, outDir):
    """
    Tests conversions and operations related to saving a WaveGroup

    Args:
        See CheckSingleConversions

    Returns:
        None
    """
    deflVObj = GetDeflVObj(deflObj)
    assocWaves = {
        'zSNsr': zsnsrObj,
        'deFL': deflObj,
        'sEP': sepObj,
        'foRCe': forceObj,
        'dEFlV': deflVObj
    }
    # note we initialize with a bunk source file. we just want to check
    # concatenation works
    WaveGroup = Model.WaveDataGroup(AssociatedWaves=assocWaves)
    Group = WaveGroup.CreateTimeSepForceWaveObject()
    Time = forceObj.GetXArray()
    Sep = sepObj.DataY
    Force = forceObj.DataY
    assert ArrClose(Time,Group.DataY[:,0]),\
        "Concatenated Separation Incorrect"
    assert ArrClose(Sep,Group.DataY[:,1]) , \
        "Concatenated Separation Incorrect"
    assert ArrClose(Force,Group.DataY[:,2]) ,\
        "Concatenated Force Incorrect"
    # POST: creating the time-sep object works. now we need to test
    # saving the entire group as a group.
    mFile = BinaryHDF5Io.SaveWaveGroupAsTimeSepForceHDF5(outDir, WaveGroup)
    # read back in the file
    mGroup = BinaryHDF5Io.ReadWaveIntoWaveGroup(mFile)
    ExpectNote = Group.Note
    ActualNote = mGroup.Note()
    ChekcWaveGroupMatches(mGroup, ActualNote, ExpectNote, Time, Sep, Force)
    # POST: note is OK too.
    # add in 'high bandwidth' (interpolated) data
    # and make sure the reading goes fine for that as well.
    # nInterp is factor to interpolate by
    nInterp = 2
    deltaLow = Time[1] - Time[0]
    timeHighBW = np.arange(0, nInterp * Time.size, 1) * deltaLow / nInterp
    sepHighBW = np.interp(timeHighBW, Time, Sep)
    forceHighBW = np.interp(timeHighBW, Time, Force)
    # set the high bandwidth associated waves
    Note = mGroup.Note()
    AssocHighBW = {
        'sep': ProcessSingleWave.WaveObj(DataY=sepHighBW, Note=Note),
        'force': ProcessSingleWave.WaveObj(DataY=forceHighBW, Note=Note)
    }
    # make the wave think it is high res so the times work out
    # (note a real high-res wave will already know its deltaX)
    AssocHighBW['force'].Note["NumPtsPerSec"] *= nInterp
    AssocHighBW['sep'].Note["NumPtsPerSec"] *= nInterp
    ExpectedHiResNote = AssocHighBW['force'].Note
    mGroup.HighBandwidthSetAssociatedWaves(AssocHighBW)
    assert mGroup.HasHighBandwidth(), "Missing High Bandwith"
    # get the force...
    forceHighBwTest = mGroup.HighBandwidthGetForce()
    assert ArrClose(forceHighBwTest, forceHighBW), "High force wrong"
    # POST: force looks OK. Time to save the thing and check it works!
    mFileHighBw = BinaryHDF5Io.SaveWaveGroupAsTimeSepForceHDF5(outDir, mGroup)
    # read the file back in ...
    mGroupHighBW = BinaryHDF5Io.ReadWaveIntoWaveGroup(mFile)
    # Mk, check that the low-res stuff is the same
    ChekcWaveGroupMatches(mGroup, mGroup.Note(), ExpectNote, Time, Sep, Force)
    # now check the high-res stuff
    GroupNote = mGroup.HighBandwidthWaves['force'].Note
    ChekcWaveGroupMatches(mGroup.HighBandwidthWaves, GroupNote,
                          ExpectedHiResNote, timeHighBW, sepHighBW,
                          forceHighBW)