コード例 #1
0
def PEATSAJobs(prjs, resubmit=False):
    """Submit PEATSA runs for all projects or merge results if done"""
    for name in prjs:
        print name
        DB = PDatabase(local=os.path.join(savepath,name))
        pdb = DB['wt'].Structure
        PS = PEATSAPlugin()
        PS.main(DB=DB)
        if hasattr(DB.meta,'peatsa_jobs') and resubmit == False:
            if 'mycalc' in DB.meta.peatsa_jobs:
                print 'job is present'
                #try to merge results
                S = PEATTableModel(DB)
                job,n = PS.getJob('mycalc')
                PS.mergeResults(job, 'prediction', S)
                DB.commit()
                print 'merged results'
        else:
            mutlist = []
            for p in DB.getRecs():
                mutlist.append(DB.get(p).Mutations)
            #print mutlist
            pdbfile = PS.writetempPDB()
            #we add source project data so exp data can be read from summary
            prjdata = {'server':'enzyme.ucd.ie','username':'******',
                              'project':name,'password':'******','port':'8080'}
            PS.submitJob(name='mycalc', pdbname=DB.meta.refprotein, pdbfile=pdbfile, 
                         mutations=mutlist, calcs=['stability'],
                         meta={'protein':name,'expcol':'Exp','project':prjdata})
        #required to end process
        PS.jobManager.stopLogging()
        DB.close()
    return
コード例 #2
0
def saveDBCopy(DB, filename, callback=None):
    """Save local copy of a remote or another local DB"""
    import copy
    total = len(DB.getRecs())
    if filename == '' or filename == None:
        return False
    if os.path.exists(filename):
        for i in ['.lock', '.index', '']:
            os.remove(filename + i)
    newDB = PDatabase(local=filename)
    newDB = copyDB(DB, newDB)
    newDB.commit()
    newDB.close()
    return newDB
コード例 #3
0
ファイル: Utils.py プロジェクト: shambo001/peat
def saveDBCopy(DB, filename, callback=None):
    """Save local copy of a remote or another local DB"""
    import copy
    total = len(DB.getRecs())
    if filename == '' or filename == None:
        return False
    if os.path.exists(filename):
        for i in ['.lock','.index','']:
            os.remove(filename+i)
    newDB = PDatabase(local=filename)
    newDB = copyDB(DB, newDB)
    newDB.commit()    
    newDB.close() 
    return newDB
コード例 #4
0
def createProjects(files):
    """Create multiple projects at once from csv files"""

    for filename in files:
        print filename
        name = os.path.splitext(filename)[0]
        #create/open db
        DB = PDatabase(local=os.path.join(savepath,name))
        DB.add('wt')
        #add wt pdb
        stream = DBActions.fetchPDB(name)
        DBActions.addPDBFile(DB, 'wt', pdbdata=stream, pdbname=name, gui=False)
        DB.meta.refprotein = 'wt'
        DB.meta.info['protein'] = name
        #import data from csv
        DB.importCSV(os.path.join(cpath,filename), namefield='Mutations')
        print 'imported ok'
        DB.deleteField('PDB')
        DB.commit()
        DB.close()
        print 'done'
    return
コード例 #5
0
ファイル: zodbtest.py プロジェクト: shambo001/peat
def importOldProj(datadir,local=None, server=None,
                    project=None, username=None):
    """Import old peat projects"""
    import PEAT_DB.Database as peatDB
    from PEAT_DB.PEAT_dict import PEAT_dict, sub_dict    
    import copy
    if local != None:
        newDB = PDatabase(local=local)
    elif server != None:
        newDB = PDatabase(server=server, username=username, port=8080,
                          password='******', project=project)

    print newDB
    PT = peatDB.Database(datadir, Tk=False)
    oldDB = PT.DB
    print 'got old peat_db with %s proteins' %len(PT.proteins)

    print PT.DB.keys()
    #import meta stuff like userfields, table
    for p in newDB.meta.special:
        if not p in PT.DB.keys():
            continue
        print 'adding',p
        for k in PT.DB[p]:
            newDB.meta[p][k] = copy.deepcopy(PT.DB[p][k])
    newDB.meta._p_changed = 1

    for p in PT.proteins:
        if p in newDB.meta.special:
            continue

        name = oldDB[p]['Name']         
        rec = PEATRecord(name=name)
        for col in oldDB[p].keys():
            cdata = oldDB[p][col]
            recdata = {}
            if col == 'name':
                cdata = oldDB[p]['Name']
  
            if oldDB['userfields'].has_key(col) and oldDB['userfields'][col]['field_type'] in ekintypes:
                E=EkinProject(data=cdata)
                E.length = len(E.datasets)
                if len(E.datasets)==0:
                    continue
                cdata = E

            if type(cdata) == sub_dict:
                for k in cdata.keys():
                    recdata[k] = copy.deepcopy(cdata[k])
            else:
                recdata = cdata
            if cdata != '' and cdata != None:
                rec.addField(col, data=recdata)
        newDB.add(p,rec)
    print newDB.meta.userfields
    #remove any file cols, too hard to import
    for m in newDB.meta.userfields.keys()[:]:
        if newDB.meta.userfields[m]['field_type'] == 'File':
            newDB.deleteField(m)
    newDB.commit(user='******', note='import')
    newDB.close()
    print 'import done'

    return
コード例 #6
0
def importOldProj(datadir,
                  local=None,
                  server=None,
                  project=None,
                  username=None):
    """Import old peat projects"""
    import PEAT_DB.Database as peatDB
    from PEAT_DB.PEAT_dict import PEAT_dict, sub_dict
    import copy
    if local != None:
        newDB = PDatabase(local=local)
    elif server != None:
        newDB = PDatabase(server=server,
                          username=username,
                          port=8080,
                          password='******',
                          project=project)

    print newDB
    PT = peatDB.Database(datadir, Tk=False)
    oldDB = PT.DB
    print 'got old peat_db with %s proteins' % len(PT.proteins)

    print PT.DB.keys()
    #import meta stuff like userfields, table
    for p in newDB.meta.special:
        if not p in PT.DB.keys():
            continue
        print 'adding', p
        for k in PT.DB[p]:
            newDB.meta[p][k] = copy.deepcopy(PT.DB[p][k])
    newDB.meta._p_changed = 1

    for p in PT.proteins:
        if p in newDB.meta.special:
            continue

        name = oldDB[p]['Name']
        rec = PEATRecord(name=name)
        for col in oldDB[p].keys():
            cdata = oldDB[p][col]
            recdata = {}
            if col == 'name':
                cdata = oldDB[p]['Name']

            if oldDB['userfields'].has_key(col) and oldDB['userfields'][col][
                    'field_type'] in ekintypes:
                E = EkinProject(data=cdata)
                E.length = len(E.datasets)
                if len(E.datasets) == 0:
                    continue
                cdata = E

            if type(cdata) == sub_dict:
                for k in cdata.keys():
                    recdata[k] = copy.deepcopy(cdata[k])
            else:
                recdata = cdata
            if cdata != '' and cdata != None:
                rec.addField(col, data=recdata)
        newDB.add(p, rec)
    print newDB.meta.userfields
    #remove any file cols, too hard to import
    for m in newDB.meta.userfields.keys()[:]:
        if newDB.meta.userfields[m]['field_type'] == 'File':
            newDB.deleteField(m)
    newDB.commit(user='******', note='import')
    newDB.close()
    print 'import done'

    return
コード例 #7
0
def summarise(projects):
    
    summDB = PDatabase(local='summary.fs')
    C = CorrelationAnalyser()
    figs = []
    for f in range(4):
        figs.append(plt.figure())
    
    gs = gridspec.GridSpec(5, 5, wspace=0.3, hspace=0.5)    
    i=0
    data=[]    
    print 'processing %s projects' %len(projects)
    for p in projects:
        print 'structure:',p
        DB = PDatabase(local=os.path.join(savepath,p))
        S = PEATTableModel(DB)           
        
        try:
            exp,pre = S.getColumns(['Exp','prediction'],allowempty=False)
            errs = [j[0]-j[1] for j in zip(exp,pre)]
        except:
            print 'no results'
            continue
            
        #DB.close()
        #add link to proj
        summDB.add(p)
        summDB.addField('project',fieldtype='Project')
        summDB[p]['project'] = {'server':'enzyme.ucd.ie','username':'******',
                              'project':p,'password':'******','port':'8080'}
        print summDB.isChanged()
        #stats
        cc,rmse,meanerr = C.getStats(pre,exp)
        #ttest for mean errs 0        
        ttp = round(stats.ttest_1samp(errs, 0)[1],2)
        #normality of errs
        w,swp = C.ShapiroWilk(errs)
        x={'name':p,'mutants':len(pre),'rmse':rmse,'corrcoef':cc,'meanerr':meanerr,
           'ttest':ttp,'shapirowilk':swp}
           
        '''ax = figs[0].add_subplot(gs[0, i])
        C.plotCorrelation(pre,exp,title=p,ms=2,axeslabels=False,ax=ax)
        ax = figs[1].add_subplot(gs[0, i])
        C.showHistogram([pre,exp],title=p,labels=['pre','exp'],ax=ax)                
        ax = figs[2].add_subplot(gs[0, i])
        C.plotNorm(errs,title=p,lw=1,ax=ax)
        #qqplot
        ax = figs[3].add_subplot(gs[0, i])
        C.QQplot(errs,title=p,ax=ax)'''
        
        #get PDB info
        parser = PDBParser()
        descr = parser.getDescription(p)
        x.update(descr)
        data.append(x)       
        i+=1              
        
    summDB.importDict(data)
    print summDB.isChanged()
    summDB.commit()    
    
    #add all peatsa jobs to summary proj also
    '''print 'adding peatsa job info'
    PS = PEATSAPlugin()
    PS.main(DB=summDB)
    #summDB.meta.peatsa_jobs = None
    #from ZODB.PersistentMapping import PersistentMapping
    #summDB.meta.peatsa_jobs = PersistentMapping()    
    PS.checkJobsDict()
    PS.jobManager.stopLogging()
    for p in projects:
        #print summDB.meta
        DB = PDatabase(local=os.path.join(savepath,p))
        job = DB.meta.peatsa_jobs['mycalc']
        summDB.meta.peatsa_jobs[p] = job
        print job
        #DB.close()
    print summDB.isChanged()
    print summDB.meta.peatsa_jobs
    summDB.commit()'''

    #for i in range(len(figs)):
    #    figs[i].savefig('fig%s.png' %i)
    #plt.show()
        
    return