Пример #1
0
def GetListOfFiles(mModel="HighBandwidthNUG2",mSqlObj=None):
    """
    Get the list of files associated with this model

    Args:
       mModel: the name of the model to get
       mSqlObj: output of InitSqlGetSessionAndClasses
    Returns:
       the list of data files associated with the model
    """
    if (mSqlObj is None):
        mSqlObj = SqlUtil.InitSqlGetSessionAndClasses()
    mDataFiles = SqlUtil.getModelDataFilesInfo(mModel,serialize=True,
                                               mSqlObj=mSqlObj)
    return mDataFiles
Пример #2
0
def run(Database="DebugCypher", PathToBootstrap=SqlUtil.SQL_BASE_LOCAL):
    """
    Nuke the database of choice (debug by default) and rebuild

    Args:
        Database: name of the database to nuke and rebuild. 

        PathToBootstrap: Path the whichever databasse we want 
    
    """
    ToBootstrap = PathToBootstrap + Database
    mSqlObj = SqlUtil.InitSqlGetSessionAndClasses(databaseStr=ToBootstrap)
    mCls, sess = SqlDataModel.GetClassesAndSess(mSqlObj)
    tables = SqlDataModel.GetAllTables(mSqlObj)
    engine = mSqlObj._engine
    for tbl in reversed(tables):
        # delete and truncate (latter resets counter)
        engine.execute(tbl.delete())
        engine.execute("ALTER TABLE {:s} AUTO_INCREMENT = 1".format(tbl))
    #POST: every table is cleared
    args = (sess, mCls)
    AddDefaultTipPack(*args)
    AddDefaultTipTypes(*args)
    AddDefaultMolecules(*args)
    AddDefaultRatings(*args)
    AddDefaultUsers(*args)
    AddDefaultTipPreps(*args)
    AddDefaultSamplePreps(*args)
    AddDefaultTipManifests(*args)
    AddDefaultSamples(*args)
    AddDefaultModels(*args)
    AddDefaultExpertiments(*args)
Пример #3
0
def GetMetaTableOptions():
    """
    Get all of the meta table information, *and* their defaults.

    Args:
         None
    
    Returns:
         A list TableOpts, one for each of the meta tables
    """
    mSqlObj = SqlUtil.InitSqlGetSessionAndClasses()
    # get the session and classes
    session = mSqlObj._sess
    mCls = mSqlObj._mCls
    # get the Sql Object for each meta table
    metaTablesFuncs = GetMetaTableFuncs()
    mDefaults = []
    for mFunc in metaTablesFuncs:
        mTab = mFunc(mCls)
        #XXX assume that  there is exactly 1 primary key, the first col
        primaryCol = list(mTab.__table__.columns)[0]
        data = session.query(mTab).order_by(GetColName(primaryCol)).all()
        newObj = TableOpts(mTab, data)
        mDefaults.append(newObj)
    return mDefaults
Пример #4
0
 def PushToDatabase(self):
     """
     Pushes all of the current data to the database. If necessary, saves
     out the binary files as well. 
     
     Args:
         None
     Returns:
         the id values returned by this
     """
     # get all of the meta information we need to uniquely identify
     # this data
     ModelName = self.ModelName()
     AssociatedWaveData = self.CurrentWave()
     MetaViewParams = self.View.GetSqlMetaParams()
     ParameterDescriptions =  self.ParamMeta
     CurrentParams =  self.CurrentParams
     mSqlObj = SqlUtil.InitSqlGetSessionAndClasses()
     namespace = SqlDataModel.PushToDatabase(ModelName,
                                             AssociatedWaveData,
                                             MetaViewParams,
                                             ParameterDescriptions,
                                             CurrentParams,
                                             SqlObj=mSqlObj)
     AssociatedWaveData.SetSqlIds(namespace)
     return namespace
Пример #5
0
def GetClassesAndSess(mSqlObj):
    """
    Given a (possibly None) sql object from PyUtil.SqlUtil, returns the class
    and session object
    
    Args:
        ModelName: a 
    Returns:
        The associated table dictionary for the Model
    """
    if (mSqlObj is None):
        mSqlObj = SqlUtil.InitSqlGetSessionAndClasses()
    _, mCls, sess = mSqlObj.connClassSess()
    # POST: have a valid session. Get the first valid object (or none, if none)
    return mCls, sess
Пример #6
0
def GetAllSourceFilesAndLabels(mModel="HighBandwidthNUG2"):
    """
    Gets all the source files and labels associated with thw given model

    Args:
        mModel: the model name to get
    Returns: 
        tuple of two <list of files, list of labels for each file>
    """
    mSqlObj = SqlUtil.InitSqlGetSessionAndClasses()
    mFiles = GetListOfFiles(mModel,mSqlObj=mSqlObj)
    vals = GetAllParamValues(mFiles,mSqlObj)
    labels = GetLabelsFromParamVals(vals)
    fileNames = [f.FileTimSepFor for f in mFiles]
    return fileNames,labels
Пример #7
0
def GetAllParamValues(mDataFiles,mSqlObj):
    """
    Given a list of data files, returns the parameter values associated with
    then

    Args:
        mDataFiles: output of GetListOfFiles
        mSqlObj: connection object to use, output of InitSqlGetSessionAndClasses
    Returns:
        list; element i is the in-order list of ParameterVaue objects 
        corresponding to file i
    """
    toRet = []
    for f in mDataFiles:
        vals,_ = SqlUtil.GetTraceParams(mSqlObj,f)
        serialVal = sqlSerialize(vals)
        toRet.append([val for val in serialVal])
    return toRet
Пример #8
0
def run():
    """
    Function that tests the hookup to the database is working 100%. Nukes the 
    test (ie: debug) schema, tries everything from scratch. Note that this 
    depends on the Cypher converter and reader/writer being Tested 
    (See Demos:PythonReader)

    Tests:
    
    (1) Is binary data saved properly?
    (2) Is the 'user' meta data (e.g. tip type) saved properly?
    (3a) Are the parametrs saved correctly?
    (3b) Are the parameters updated properly, after saving?
    (4a) Is the meta information saved properly?
    (4b) Is the meta information updated properly, after saving?

    Args:
        None

    Returns:
        None
    """
    # # First, we refresh / reload the dataabase, model, and Sql Session
    # nuke and rebuild the debugging database
    debugDataBase = "DebugCypher"
    PathToBootstrap = SqlUtil.SQL_BASE_LOCAL
    databaseStr = PathToBootstrap + debugDataBase
    SqlBootstrap.run()
    # read in data to ship off
    FileToLoad = "./LocalData/ParseWaveTest.pxp"
    data = PxpLoader.LoadPxp(FileToLoad)
    # Create a high bandwidth model
    TestModel = HighBandwidthModel.HighBandwidthModel()
    # Create all the Sql Meta Parameters
    ParamMeta, _ = TestModel.GetParameterMetaInfo()
    # Get the Valid ids for each table
    mSqlObj = SqlUtil.InitSqlGetSessionAndClasses(databaseStr=databaseStr)
    mCls, sess = SqlDataModel.GetClassesAndSess(mSqlObj)
    # get all of the meta IDs, using  sqlalchemy.inspection.inspect
    # in our database, the primary key is just one field.
    getId = lambda x: inspect(x).identity[0]
    # make a dictionary for each tables, for the valid ids...
    metaDict = GetMetaTableOptions(mCls, sess)
    # # next, we create functions to randomize the input.
    # function to shuffle a list and take the first element (effectively
    # random pick).
    shuffleIds = lambda listOfIds: np.random.choice(listOfIds)
    # loop through data, adding parameters and checking they match.
    nToCheck = len(data.keys())
    constr = Parameter.ParameterData
    # function to make an (index,x,y) pair for each parameter (randomized)
    getRandomParam = lambda n: [
        constr(rand.randint(low=0, high=n), rand.randn(), rand.rand())
        for i in range(n)
    ]
    getRandomIds = lambda: dict([(key, shuffleIds(val))
                                 for key, val in metaDict.items()])
    maxParams = len(ParamMeta)
    # get a random number of parameters (for testing updating)
    nRandParams = lambda: rand.randint(low=1, high=maxParams)
    # number of updates to try for each datapoint
    nUpdates = 10
    for i, (waveId, assocWaves) in enumerate(data.items()):
        print("Sql Testing {:d}/{:d}".format(i + 1, nToCheck))
        mRows = GetAllTableRows(mSqlObj)
        # get the parameteter values, maximum index given here...
        n = 100
        ParamVals = getRandomParam(maxParams)
        # get a random assortment of valid ids to use in pushing
        MetaIds = getRandomIds()
        AssociatedWaveData = Model.WaveDataGroup(assocWaves)
        # push everything
        ModelName = TestModel.ModelName()
        idNameSpaceFirst = PushAndCheck(ModelName, AssociatedWaveData, MetaIds,
                                        ParamMeta, ParamVals, mSqlObj)
        # POST: initial push worked. try re-pushing. First, count how many
        # things are in each table (note: none of these numbers should change)
        # get another random assortment of parameters and meta information to
        # push (but with the same source and model; an *update*)
        for i in range(nUpdates):
            nParams = nRandParams()
            ParamVals = getRandomParam(nParams)
            # get a random assortment of valid ids to use in pushing
            MetaIds = getRandomIds()
            idNameSpaceSecond = PushAndCheck(ModelName, AssociatedWaveData,
                                             MetaIds, ParamMeta, ParamVals,
                                             mSqlObj)
            idsToCheck = [
                lambda x: x.idTraceMeta, lambda x: x.idTraceData,
                lambda x: x.idTraceModel
            ]
            for idFunc in idsToCheck:
                orig = idFunc(idNameSpaceFirst)
                new = idFunc(idNameSpaceSecond)
                assert orig == new, "Update incorrectly changed a field."
        ParamVals = getRandomParam(maxParams)
        PushAndCheck(ModelName, AssociatedWaveData, MetaIds, ParamMeta,
                     ParamVals, mSqlObj)
    print("Passed (1) Data Save")
    print("Passed (2) Sql Linker Tables")
    print("Passed (3) Parameter Passing")
    print("Passed (4) Trace Meta ")