def HandleLoadDirectory(self):
     """
     Function to interactively (GUI window) handle a file load button click. 
     loads directories from a directory
     
     Args:
         None
     Returns:
         None
     """
     fn = str(
         pg.QtGui.QFileDialog.getExistingDirectory(
             caption="Load from a directory", directory=""))
     if fn == '':
         return
     # load in all the files
     toLoad = PxpLoader.LoadPxpFilesFromDirectory(fn)
     # group them by ending
     byDir = PxpLoader.GroupWavesByEnding(toLoad)
     self.Model.AddNewWaves(byDir, fn)
 def LoadPxpAndAddToModel(self, FileName):
     """
     Function to load a pxp file and add associated waves
     
     Args:
         FileName : Full path to the file to load
     Returns:
         None
     """
     mWaves = PxpLoader.LoadPxp(FileName)
     # XXX call the model, which should update the view
     self.Model.AddNewWaves(mWaves, FileName)
Exemplo n.º 3
0
def run():
    """
    Tests that converting between data types 
    (e.g. force<-->deflv or sep<-->zsns)  and concatenating them works properly

    Dependent on PythonReader.py test to make sure the files can
    be loaded and saved properly

    Args:
        AssociatedWaveData: a WaveDataGroup object (see Model)

    Returns:
        None
    """

    inDir, outDir = IgorUtil.DemoJilaOrLocal("IgorPythonConvert",
                                             localPath="./LocalData")
    inFileMultiple = inDir + "ParseWaveTest.pxp"
    # readin the 'multiple wave' file:
    mWaves = PxpLoader.LoadAllWavesFromPxp(inFileMultiple)
    # group them all
    mGrouping = PxpLoader.GroupWavesByEnding(mWaves)
    recquiredExt = ["sep", "zsnsr", "force", "defl"]
    for waveid, associatedWaves in mGrouping.items():
        mBreak = False
        for ext in recquiredExt:
            if (ext not in associatedWaves):
                mBreak = True
        assert not mBreak, "Demo data flawed; not all needed extensions found"
        # POST: found all extensions
        # get all the 'ground truth' waves. Convert back and forth,
        # make sure everything matches..
        sepObj = associatedWaves["sep"]
        forceObj = associatedWaves["force"]
        zsnsrObj = associatedWaves["zsnsr"]
        deflObj = associatedWaves["defl"]
        # convert deflObj to deflV by mutliplying by 1/invols..
        CheckSingleConversions(sepObj, forceObj, zsnsrObj, deflObj)
        CheckSepForceConversions(sepObj, forceObj, zsnsrObj, deflObj)
        CheckWaveGroupSave(sepObj, forceObj, zsnsrObj, deflObj, outDir)
Exemplo n.º 4
0
def InteractivePxpLoad():
    """
    Function to handle a file load button click. loads waves in a pxp file

    Args:
        None
    Returns:
        None
    """
    fn = str(
        pg.QtGui.QFileDialog.getOpenFileName(
            caption="Load an Igor File",
            directory="",
            filter="Packed Experiment Files (*.pxp)"))
    if fn == '':
        return
    mWaves = PxpLoader.LoadPxp(fn)
def run():
    """
    Tests that the larger data manager is doing what it should 
    """
    # first, just check that the data is doing what it should in isolation,
    # regardless of the model
    FileToLoad = "./LocalData/ParseWaveTest.pxp"
    WaveDict = PxpLoader.LoadPxp(FileToLoad)
    mgr = LargeDataManager.LargeDataManager()
    f = foo()
    method = lambda x: f.method(x)
    mgr.AddData(WaveDict, method)
    for idV, WaveGroup in WaveDict.items():
        assert idV in mgr, "Id not added to {:s}".format(mgr.keys())
        # get the data as a wavedatagroup (time,sep,force)
        data = mgr[idV]
        # convert to time sep and force.
        expectedData = WaveDataGroup(WaveGroup)
        # check that the data and expected data have the same time,sep,force
        expectedData.EqualityTimeSepForce(data)
Exemplo n.º 6
0
def run():
    """
    Tests saving and loading of a pxp file is idempotent and preserves the 
    'normal' data and meta data

    Args:
        None
    Returns:
        None
    """
    # dont raise anything on an error. we will handle not being able to connect
    # to jila
    demoName = "IgorPythonReader"
    localPath = "./LocalData/"
    inDir, outDir = IgorUtil.DemoJilaOrLocal(demoName, localPath)
    inFileMultiple = inDir + "ParseWaveTest.pxp"
    inFileSingle = inDir + "SingleWaveTest.pxp"
    # readin the 'multiple wave' file:
    mWaves = PxpLoader.LoadAllWavesFromPxp(inFileMultiple)
    n = len(mWaves)
    # Save them out as hdf5 files
    BinaryHDF5Io.MultiThreadedSave(mWaves, outDir)
    # Read them back; make sure we have the same thing. Note we assume the
    # initial read went OK (see the igor.py project)
    FileNames = [outDir + BinaryHDF5Io.GetFileSaveName(w) for w in mWaves]
    reRead = [BinaryHDF5Io.LoadHdfFileIntoWaveObj(f) for f in FileNames]
    # make sure we get the same thing reading back as what we loaded.
    # note that this test saving *and* loading.
    numMatches = sum(reRead[i] == mWaves[i] for i in range(n))
    assert numMatches == n, "File IO broken; waves saved or loaded improperly"
    # POST: saving and loading doesn't alter the original data
    # group them all
    mGrouping = PxpLoader.GroupWavesByEnding(mWaves)
    # loop through all the groupings and save them off
    ConcatData = []
    match = True
    for traceId, assocWaves in mGrouping.items():
        mArrs = assocWaves.values()
        tmpConcat = BinaryHDF5Io.ConcatenateWaves(mArrs)
        nEle = len(mArrs)
        for i in range(nEle):
            # skipe the first element (time) in concatenated
            concatDat = tmpConcat.DataY[:, i + 1]
            originalDat = mArrs[i].DataY
            assert ArrClose(concatDat,originalDat) , \
                "Concatenated Data doesn't match."
        # check that the time is correct
        reference = mArrs[0]
        nY = reference.DataY.size
        time = np.linspace(0, nY, nY, endpoint=False) * reference.DeltaX()
        # first is time
        assert (nY == time.size) and ArrClose(tmpConcat.DataY[:,0],time) , \
            "Time doesn't match."
        # check all the times are consistent (transitive betwen first column
        # of dataY, time, and tmpConcat.GetXArray())
        timeConcat = tmpConcat.GetXArray()
        assert (nY == timeConcat.size) and (ArrClose(timeConcat,time)) ,\
            "Time doesn't match"
        ConcatData.append(tmpConcat)
    # save the concatenated files. Since saving works (first test)
    # and concatenation works (second test), woo!
    BinaryHDF5Io.MultiThreadedSave(ConcatData, outDir)
Exemplo n.º 7
0
def run():
    """
    Function that tests the hookup to the database is working 100%. Nukes the 
    test (ie: debug) schema, tries everything from scratch. Note that this 
    depends on the Cypher converter and reader/writer being Tested 
    (See Demos:PythonReader)

    Tests:
    
    (1) Is binary data saved properly?
    (2) Is the 'user' meta data (e.g. tip type) saved properly?
    (3a) Are the parametrs saved correctly?
    (3b) Are the parameters updated properly, after saving?
    (4a) Is the meta information saved properly?
    (4b) Is the meta information updated properly, after saving?

    Args:
        None

    Returns:
        None
    """
    # # First, we refresh / reload the dataabase, model, and Sql Session
    # nuke and rebuild the debugging database
    debugDataBase = "DebugCypher"
    PathToBootstrap = SqlUtil.SQL_BASE_LOCAL
    databaseStr = PathToBootstrap + debugDataBase
    SqlBootstrap.run()
    # read in data to ship off
    FileToLoad = "./LocalData/ParseWaveTest.pxp"
    data = PxpLoader.LoadPxp(FileToLoad)
    # Create a high bandwidth model
    TestModel = HighBandwidthModel.HighBandwidthModel()
    # Create all the Sql Meta Parameters
    ParamMeta, _ = TestModel.GetParameterMetaInfo()
    # Get the Valid ids for each table
    mSqlObj = SqlUtil.InitSqlGetSessionAndClasses(databaseStr=databaseStr)
    mCls, sess = SqlDataModel.GetClassesAndSess(mSqlObj)
    # get all of the meta IDs, using  sqlalchemy.inspection.inspect
    # in our database, the primary key is just one field.
    getId = lambda x: inspect(x).identity[0]
    # make a dictionary for each tables, for the valid ids...
    metaDict = GetMetaTableOptions(mCls, sess)
    # # next, we create functions to randomize the input.
    # function to shuffle a list and take the first element (effectively
    # random pick).
    shuffleIds = lambda listOfIds: np.random.choice(listOfIds)
    # loop through data, adding parameters and checking they match.
    nToCheck = len(data.keys())
    constr = Parameter.ParameterData
    # function to make an (index,x,y) pair for each parameter (randomized)
    getRandomParam = lambda n: [
        constr(rand.randint(low=0, high=n), rand.randn(), rand.rand())
        for i in range(n)
    ]
    getRandomIds = lambda: dict([(key, shuffleIds(val))
                                 for key, val in metaDict.items()])
    maxParams = len(ParamMeta)
    # get a random number of parameters (for testing updating)
    nRandParams = lambda: rand.randint(low=1, high=maxParams)
    # number of updates to try for each datapoint
    nUpdates = 10
    for i, (waveId, assocWaves) in enumerate(data.items()):
        print("Sql Testing {:d}/{:d}".format(i + 1, nToCheck))
        mRows = GetAllTableRows(mSqlObj)
        # get the parameteter values, maximum index given here...
        n = 100
        ParamVals = getRandomParam(maxParams)
        # get a random assortment of valid ids to use in pushing
        MetaIds = getRandomIds()
        AssociatedWaveData = Model.WaveDataGroup(assocWaves)
        # push everything
        ModelName = TestModel.ModelName()
        idNameSpaceFirst = PushAndCheck(ModelName, AssociatedWaveData, MetaIds,
                                        ParamMeta, ParamVals, mSqlObj)
        # POST: initial push worked. try re-pushing. First, count how many
        # things are in each table (note: none of these numbers should change)
        # get another random assortment of parameters and meta information to
        # push (but with the same source and model; an *update*)
        for i in range(nUpdates):
            nParams = nRandParams()
            ParamVals = getRandomParam(nParams)
            # get a random assortment of valid ids to use in pushing
            MetaIds = getRandomIds()
            idNameSpaceSecond = PushAndCheck(ModelName, AssociatedWaveData,
                                             MetaIds, ParamMeta, ParamVals,
                                             mSqlObj)
            idsToCheck = [
                lambda x: x.idTraceMeta, lambda x: x.idTraceData,
                lambda x: x.idTraceModel
            ]
            for idFunc in idsToCheck:
                orig = idFunc(idNameSpaceFirst)
                new = idFunc(idNameSpaceSecond)
                assert orig == new, "Update incorrectly changed a field."
        ParamVals = getRandomParam(maxParams)
        PushAndCheck(ModelName, AssociatedWaveData, MetaIds, ParamMeta,
                     ParamVals, mSqlObj)
    print("Passed (1) Data Save")
    print("Passed (2) Sql Linker Tables")
    print("Passed (3) Parameter Passing")
    print("Passed (4) Trace Meta ")