def getStackIdFromSubStackName(substackname, sessionname, msg=True): """ For a given run name and session name find stack id """ sessiondata = apDatabase.getSessionDataFromSessionName(sessionname) stackrunq = appiondata.ApStackRunData() stackrunq['session'] = sessiondata stackq = appiondata.ApStackData() stackq['substackname'] = substackname runsinstackq = appiondata.ApRunsInStackData() runsinstackq['stackRun'] = stackrunq runsinstackq['stack'] = stackq runsindatas = runsinstackq.query() if not runsindatas: return None if len(runsindatas) == 1: ### simpe case stackid = runsindatas[0]['stack'].dbid else: for runsindata in runsindatas: print runsindata apDisplay.printError("Found too many sub-stacks for specified criteria") apDisplay.printMsg("Found stack id %d with substackname %s from session %s"%(stackid, substackname, sessionname)) return stackid
def commitScaledStack(stackdata, params): #make new params query newstackparamsq = appiondata.ApStackParamsData() for key in newstackparamsq.keys(): if key != 'bin': newstackparamsq[key] = stackdata[0]['stackRun']['stackParams'][key] newstackparamsq['bin'] = params['bin'] #make new stack query newstackq = appiondata.ApStackData() newstackq['path'] = appiondata.ApPathData( path=os.path.abspath(params['newstackpath'])) newstackq['name'] = params['newstackname'] newstackq['description'] = params['description'] newstackdata = newstackq.query() if newstackdata: print "A stack with these parameters already exists" return #make new run query #first check that run name doesn't already exist newstackrunq = appiondata.ApStackRunData() newstackrunq['stackRunName'] = os.path.basename( os.getcwd()) #use cwd for run name newstackrundata = newstackrunq.query() if newstackrundata: print "A stack run with this name (the current directory name) already exists. Exiting" sys.exit() newstackrunq = appiondata.ApStackRunData() newstackrunq['stackRunName'] = os.path.basename( os.getcwd()) #use cwd for run name newstackrunq['stackParams'] = newstackparamsq newstackrunq['session'] = stackdata[0]['stackRun']['session'] if "selectionrun" in stackdata[0]['stackRun'].keys: newstackrunq['selectionrun'] = stackdata[0]['stackRun']['selectionrun'] newstackrunq['syntheticStackParams'] = stackdata[0]['stackRun'][ 'syntheticStackParams'] #make new runs in stack query and insert also inserts stack and stack run newrisq = appiondata.ApRunsInStackData() newrisq['stack'] = newstackq newrisq['stackRun'] = newstackrunq newrisq.insert() #loop in reverse order so that order of ptcls in db is like that of orig for particle in range(len(stackdata) - 1, -1, -1): stackparticleq = appiondata.ApStackParticleData() stackparticleq['particleNumber'] = stackdata[particle][ 'particleNumber'] stackparticleq['stack'] = newstackq stackparticleq['stackRun'] = newstackrunq stackparticleq['particle'] = stackdata[particle]['particle'] #print stackparticleq stackparticleq.insert() return
def checkForPreviousStack(stackname, stackpath=None): if stackpath is None: spath = os.path.dirname(stackname) else: spath = stackpath stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(spath)) stackq['name'] = os.path.basename(stackname) stackdata = stackq.query(results=1) if stackdata: apDisplay.printError("A stack with name "+stackname+" and path "+spath+" already exists!") return
def getStackIdFromPath(stackpath): """ For a given path find stack id """ path = os.path.dirname(stackpath) name = os.path.basename(stackpath) pathq = appiondata.ApPathData() pathq['path'] = path stackq = appiondata.ApStackData() stackq['name'] = name stackq['path'] = pathq stackdatas = stackq.query() if len(stackdatas) > 1: apDisplay.printError("More than one stack has path: "+stackpath) return stackdatas[0].dbid
def commitStack(self, stackid): startpart = self.partnum stackq = appiondata.ApStackData() oldstackdata = apStack.getOnlyStackData(stackid) stackq['name'] = self.params['stackfilename'] stackq['path'] = appiondata.ApPathData( path=os.path.abspath(self.params['rundir'])) stackq['description'] = self.params[ 'description'] + " ... combined stack ids " + str( self.params['stacks']) stackq['substackname'] = self.params['runname'] stackq['hidden'] = False stackq['pixelsize'] = self.newpixelsize * 1e-10 stackq['boxsize'] = self.newboxsize rinstackdata = apStack.getRunsInStack(stackid) for run in rinstackdata: rinstackq = appiondata.ApRunsInStackData() rinstackq['stack'] = stackq rinstackq['stackRun'] = run['stackRun'] rinstackq.insert() stpartsdata = apStack.getStackParticlesFromId(stackid) apDisplay.printMsg("inserting " + str(len(stpartsdata)) + " particles into DB") for particle in stpartsdata: stpartq = appiondata.ApStackParticleData() stpartq['particleNumber'] = self.partnum stpartq['stack'] = stackq stpartq['stackRun'] = particle['stackRun'] stpartq['particle'] = particle['particle'] stpartq.insert() self.partnum += 1 if self.partnum % 1000 == 0: sys.stderr.write(".") sys.stderr.write("\n") apDisplay.printMsg("commited particles " + str(startpart) + "-" + str(self.partnum)) return
def insertSubTomoRun(sessiondata, selectionrunid, stackid, name, invert=False, subbin=1): if selectionrunid: qpick = appiondata.ApSelectionRunData() pickdata = qpick.direct_query(selectionrunid) else: pickdata = None if stackid: qstack = appiondata.ApStackData() stackdata = qstack.direct_query(stackid) else: stackdata = None qrun = appiondata.ApSubTomogramRunData(session=sessiondata, pick=pickdata, stack=stackdata, runname=name, invert=invert, subbin=subbin) return publish(qrun)
def uploadResults(self): if self.params['commit'] is False: return # Get the new file order fh = open("sort_junk.sel", 'r') lines = fh.readlines() i = 0 fileorder = {} for line in lines: args = line.split() if (len(args) > 1): match = re.match('[A-Za-z]+([0-9]+)\.[A-Za-z]+', (args[0].split('/'))[-1]) if (match): filenumber = int(match.groups()[0]) fileorder[i] = filenumber i += 1 fh.close() # Produce a new stack oldstack = apStack.getOnlyStackData(self.params['stackid'], msg=False) newstack = appiondata.ApStackData() newstack['path'] = appiondata.ApPathData( path=os.path.abspath(self.params['rundir'])) newstack['name'] = "sorted.hed" if newstack.query(results=1): apDisplay.printError( "A stack with these parameters already exists") # Fill in data and submit newstack['oldstack'] = oldstack newstack['hidden'] = False newstack['substackname'] = self.params['runname'] newstack['description'] = self.params['description'] newstack['pixelsize'] = oldstack['pixelsize'] newstack['boxsize'] = oldstack['boxsize'] newstack['junksorted'] = True newstack.insert() # Insert stack images apDisplay.printMsg("Inserting stack particles") count = 0 total = len(fileorder.keys()) if total == 0: apDisplay.printError( "No particles can be inserted in the sorted stack") for i in fileorder.keys(): count += 1 if count % 100 == 0: sys.stderr.write( "\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write( str(count) + " of " + (str(total)) + " complete") # Get particle from the old stack oldparticle = apStack.getStackParticle(self.params['stackid'], fileorder[i] + 1) # Insert particle newparticle = appiondata.ApStackParticleData() newparticle['particleNumber'] = i + 1 newparticle['stack'] = newstack newparticle['stackRun'] = oldparticle['stackRun'] newparticle['particle'] = oldparticle['particle'] newparticle['mean'] = oldparticle['mean'] newparticle['stdev'] = oldparticle['stdev'] newparticle.insert() apDisplay.printMsg( "\n" + str(total) + " particles have been inserted into the sorted stack") # Insert runs in stack apDisplay.printMsg("Inserting Runs in Stack") runsinstack = apStack.getRunsInStack(self.params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = newstack newrunsq['stackRun'] = run['stackRun'] newrunsq.insert() apDisplay.printMsg("finished") return
def createStackData(self): apDisplay.printColor("Starting upload of stack", "blue") pathq = appiondata.ApPathData() pathq['path'] = self.params['rundir'] manq = appiondata.ApManualParamsData() manq['diam'] = self.params['diameter'] manq['trace'] = False selectq = appiondata.ApSelectionRunData() selectq['name'] = 'fakestack_' + self.params['runname'] selectq['hidden'] = True selectq['path'] = pathq selectq['session'] = self.sessiondata selectq['manparams'] = manq stackq = appiondata.ApStackData() stackq['name'] = "start.hed" stackq['path'] = pathq stackq['description'] = self.params['description'] stackq['hidden'] = False stackq['pixelsize'] = self.params['apix'] * 1e-10 stackq['boxsize'] = self.boxsize stackq['centered'] = False stackparamq = appiondata.ApStackParamsData() stackparamq['boxSize'] = self.boxsize stackparamq['bin'] = 1 stackparamq['phaseFlipped'] = self.params['ctfcorrect'] if self.params['ctfcorrect'] is True: stackparamq['fileType'] = "manual" stackparamq['fileType'] = "imagic" stackparamq['normalized'] = self.params['normalize'] stackparamq['lowpass'] = 0 stackparamq['highpass'] = 0 stackrunq = appiondata.ApStackRunData() stackrunq['stackRunName'] = self.params['runname'] stackrunq['stackParams'] = stackparamq stackrunq['selectionrun'] = selectq stackrunq['session'] = self.sessiondata runsinstackq = appiondata.ApRunsInStackData() runsinstackq['stack'] = stackq runsinstackq['stackRun'] = stackrunq if self.params['commit'] is True: runsinstackq.insert() if 'syncstackid' in self.params.keys( ) and self.params['syncstackid']: stackdata = runsinstackq['stack'] stack2data = apStack.getOnlyStackData( self.params['syncstackid']) syncq = appiondata.ApSyncStackData( stack1=stackdata, stack2=stack2data, synctype=self.params['synctype']) syncq.insert() ### for each particle sys.stderr.write("Starting particle upload") for i in range(self.numpart): if i % 100 == 0: sys.stderr.write(".") partq = appiondata.ApParticleData() partq['image'] = None #We have no image, see if this works??? partq['selectionrun'] = selectq partq['xcoord'] = int(i % 1000) partq['ycoord'] = int(i / 1000) partq['diameter'] = self.params['diameter'] stackpartq = appiondata.ApStackParticleData() stackpartq['particleNumber'] = i + 1 stackpartq['stack'] = stackq stackpartq['stackRun'] = stackrunq stackpartq['particle'] = partq stackpartq['mean'] = 0.0 stackpartq['stdev'] = 1.0 if self.params['commit'] is True: stackpartq.insert() sys.stderr.write("\n") return
def commitMaskedStack(params, oldstackparts, newname=False): """ commit a substack to database required params: stackid description commit rundir mask """ oldstackdata = getOnlyStackData(params['stackid'], msg=False) #create new stack data stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(params['rundir'])) stackq['name'] = oldstackdata['name'] # use new stack name if provided if newname: stackq['name'] = newname stackdata=stackq.query(results=1) if stackdata: apDisplay.printWarning("A stack with these parameters already exists") return stackq['oldstack'] = oldstackdata stackq['hidden'] = False stackq['substackname'] = params['runname'] stackq['description'] = params['description'] stackq['pixelsize'] = oldstackdata['pixelsize'] stackq['boxsize'] = oldstackdata['boxsize'] stackq['mask'] = params['mask'] if 'correctbeamtilt' in params.keys(): stackq['beamtilt_corrected'] = params['correctbeamtilt'] ## insert now before datamanager cleans up referenced data stackq.insert() #Insert particles apDisplay.printMsg("Inserting stack particles") count = 0 newpartnum = 1 total = len(oldstackparts) for part in oldstackparts: count += 1 if count % 100 == 0: sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write(str(count)+" of "+(str(total))+" complete") # Insert particle newstackq = appiondata.ApStackParticleData() newstackq.update(part) newstackq['particleNumber'] = newpartnum newstackq['stack'] = stackq if params['commit'] is True: newstackq.insert() newpartnum += 1 sys.stderr.write("\n") if newpartnum == 0: apDisplay.printError("No particles were inserted for the stack") apDisplay.printMsg("Inserted "+str(newpartnum-1)+" stack particles into the database") apDisplay.printMsg("Inserting Runs in Stack") runsinstack = getRunsInStack(params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = stackq newrunsq['stackRun'] = run['stackRun'] if params['commit'] is True: newrunsq.insert() else: apDisplay.printWarning("Not commiting to the database") apDisplay.printMsg("finished") return
def commitSubStack(params, newname=False, centered=False, oldstackparts=None, sorted=False): """ commit a substack to database required params: stackid description commit rundir keepfile """ oldstackdata = getOnlyStackData(params['stackid'], msg=False) #create new stack data stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(params['rundir'])) stackq['name'] = oldstackdata['name'] # use new stack name if provided if newname: stackq['name'] = newname stackdata=stackq.query(results=1) if stackdata: apDisplay.printWarning("A stack with these parameters already exists") return stackq['oldstack'] = oldstackdata stackq['hidden'] = False stackq['substackname'] = params['runname'] stackq['description'] = params['description'] stackq['pixelsize'] = oldstackdata['pixelsize'] stackq['boxsize'] = oldstackdata['boxsize'] if 'correctbeamtilt' in params.keys(): stackq['beamtilt_corrected'] = params['correctbeamtilt'] if sorted is True: stackq['junksorted'] = True if centered is True: stackq['centered'] = True if 'mask' in params: stackq['mask'] = params['mask'] if 'maxshift' in params: stackq['maxshift'] = params['maxshift'] ## insert now before datamanager cleans up referenced data stackq.insert() #Insert particles listfile = params['keepfile'] ### read list and sort f=open(listfile,'r') listfilelines = [] for line in f: sline = line.strip() if re.match("[0-9]+", sline): listfilelines.append(int(sline.split()[0])+1) else: apDisplay.printWarning("Line in listfile is not int: "+str(line)) listfilelines.sort() total = len(listfilelines) f.close() ## index old stack particles by number part_by_number = {} if oldstackparts is not None: for part in oldstackparts: part_by_number[part['particleNumber']] = part apDisplay.printMsg("Inserting stack particles") count = 0 newpartnum = 1 for origpartnum in listfilelines: count += 1 if count % 100 == 0: sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write(str(count)+" of "+(str(total))+" complete") # Find corresponding particle in old stack # Use previously queried particles if possible, otherwise # do new query here (very slow if millions of prtls in DB) try: oldstackpartdata = part_by_number[origpartnum] except KeyError: oldstackpartdata = getStackParticle(params['stackid'], origpartnum) # Insert particle newstackq = appiondata.ApStackParticleData() newstackq.update(oldstackpartdata) newstackq['particleNumber'] = newpartnum newstackq['stack'] = stackq if params['commit'] is True: newstackq.insert() newpartnum += 1 sys.stderr.write("\n") if newpartnum == 0: apDisplay.printError("No particles were inserted for the stack") apDisplay.printMsg("Inserted "+str(newpartnum-1)+" stack particles into the database") apDisplay.printMsg("Inserting Runs in Stack") runsinstack = getRunsInStack(params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = stackq newrunsq['stackRun'] = run['stackRun'] if params['commit'] is True: newrunsq.insert() else: apDisplay.printWarning("Not commiting to the database") apDisplay.printMsg("finished") return
def start(self): subtomorunq = appiondata.ApSubTomogramRunData() subtomorundata = subtomorunq.direct_query(self.params['subtomoId']) volshape, totalbin, pixelsize = apTomo.getSubvolumeInfo(subtomorundata) if volshape is None: apDisplay.printError('No subvolume exists for the subtomoId') sessionname = subtomorundata['session']['name'] stackq = appiondata.ApStackData() stackdata = stackq.direct_query(self.params['stackId']) diameter = apStack.getStackParticleDiameter(stackdata) diameterpixel = diameter * 1e-10 / pixelsize halfwidth = diameterpixel / 4 ztolerance = halfwidth zbackgroundrange = max(((volshape[0] - diameterpixel * 3) / 2, 10)) if self.params['commit']: avgrundata = apTomo.insertTomoAverageRun( self.params['runname'], self.params['rundir'], subtomorundata, stackdata, halfwidth, self.params['description'], ) profiles = {} sumvol = numpy.zeros(volshape) substacktype, conditionstackdata = apStack.findSubStackConditionData( stackdata) if substacktype in ['clustersub', 'alignsub']: alignstack = apStack.getAlignStack(substacktype, conditionstackdata) alignpackage = apAlignment.getAlignPackage(alignstack['alignrun']) stackprtls = apStack.getStackParticlesFromId(stackdata.dbid) i = 0 for stackp in stackprtls: alignp = apAlignment.getAlignParticle(stackp, alignstack) shift = apAlignment.getAlignShift(alignp, alignpackage) subtomodata = apTomo.getSubTomogramData(subtomorundata, stackp) subtomofile = os.path.join(subtomodata['path']['path'], subtomodata['name'] + '.rec') subvolume = apTomo.getTomoVolume(subtomodata) if subvolume is not None: zcenter = volshape[0] / 2 profile = apTomo.getParticleCenterZProfile( subvolume, shift, halfwidth, zbackgroundrange) subtomoid = subtomodata.dbid profiles[subtomoid] = profile center = apTomo.gaussianCenter(profile) if center > zcenter - ztolerance and center < zcenter + ztolerance: i += 1 shiftz = zcenter - center transformedvolume = apTomo.transformTomo( subvolume, subtomofile, alignpackage, alignp, shiftz, totalbin) ## write transformed mrc file to check the result filename = os.path.join( self.params['rundir'], './transformed%05d.mrc' % subtomoid) mrc.write(transformedvolume, filename) sumvol += transformedvolume t = numpy.sum(transformedvolume, axis=0) filename = os.path.join(self.params['rundir'], './p%05d.mrc' % subtomoid) mrc.write(transformedvolume, filename) if self.params['commit']: apTomo.insertTomoAvgParticle( avgrundata, subtomodata, alignp, shiftz) if i < 1: apDisplay.printError('no subtomogram qualifies for averaging') else: avgvol = sumvol / i avgvolfilename = sessionname + "_" + self.params['runname'] + ".mrc" avgvolpath = os.path.join(self.params['rundir'], avgvolfilename) mrc.write(avgvol, avgvolpath) if not os.path.isfile(avgvolpath): apDisplay.printError("tomogram not exist") apTomo.makeMovie(avgvolpath, self.params['maxsize']) apTomo.makeProjection(avgvolpath, self.params['maxsize']) proshape = profile.shape for id in profiles.keys(): out = open('profile_%05d.txt' % id, 'w') for z in range(0, proshape[0]): str = "%5d\t" % z str += "%6.3f\t" % profiles[id][z] str += "\n" out.write(str) out.close()
def commitSubStack(params, newname=False, centered=False, oldstackparts=None, sorted=False, included=None): """ commit a substack to database required params: stackid description commit rundir keepfile 'included' param is a list of included particles, starting at 0 """ t0 = time.time() oldstackdata = getOnlyStackData(params['stackid'], msg=False) apDisplay.printColor( "got old stackdata in " + apDisplay.timeString(time.time() - t0), "cyan") t0 = time.time() #create new stack data stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData( path=os.path.abspath(params['rundir'])) stackq['name'] = oldstackdata['name'] # use new stack name if provided if newname: stackq['name'] = newname stackdata = stackq.query(results=1) if stackdata: apDisplay.printWarning("A stack with these parameters already exists") return stackq['oldstack'] = oldstackdata stackq['hidden'] = False stackq['substackname'] = params['runname'] stackq['description'] = params['description'] stackq['pixelsize'] = oldstackdata['pixelsize'] stackq['boxsize'] = oldstackdata['boxsize'] if 'correctbeamtilt' in params.keys(): stackq['beamtilt_corrected'] = params['correctbeamtilt'] if sorted is True: stackq['junksorted'] = True if centered is True: stackq['centered'] = True if 'mask' in params: stackq['mask'] = params['mask'] if 'maxshift' in params: stackq['maxshift'] = params['maxshift'] ## insert now before datamanager cleans up referenced data stackq.insert() apDisplay.printMsg("created new stackdata in %s\n" % (apDisplay.timeString(time.time() - t0))) newstackid = stackq.dbid t0 = time.time() # get list of included particles apDisplay.printMsg("Getting list of particles to include") if included: listfilelines = [p + 1 for p in included] else: ### read list listfilelines = [] listfile = params['keepfile'] f = open(listfile, 'r') for line in f: sline = line.strip() if re.match("[0-9]+", sline): listfilelines.append(int(sline.split()[0]) + 1) else: apDisplay.printWarning("Line in listfile is not int: " + str(line)) f.close() listfilelines.sort() total = len(listfilelines) apDisplay.printMsg("Completed in " + apDisplay.timeString(time.time() - t0) + "\n") ## index old stack particles by number apDisplay.printMsg("Retrieving original stack information") t0 = time.time() part_by_number = {} # get stack data from original particles if not oldstackparts: sqlcmd = "SELECT * FROM ApStackParticleData " + \ "WHERE `REF|ApStackData|stack` = %i"%(params['stackid']) # This result gives dictionary, not data object oldstackparts = sinedon.directq.complexMysqlQuery('appiondata', sqlcmd) for part in oldstackparts: part_by_number[part['particleNumber']] = part apDisplay.printMsg("Completed in " + apDisplay.timeString(time.time() - t0) + "\n") apDisplay.printMsg("Assembling database insertion command") t0 = time.time() count = 0 newpartnum = 1 partlistvals = [] for origpartnum in listfilelines: count += 1 oldstackpartdata = part_by_number[origpartnum] sqlParams = ['particleNumber', 'REF|ApStackData|stack'] vals = [newpartnum, newstackid] for k, v in oldstackpartdata.iteritems(): # First need to convert the keys to column names k = sinedon.directq.datakeyToSqlColumnName(oldstackpartdata, k) if k in [ 'DEF_id', 'DEF_timestamp', 'particleNumber', 'REF|ApStackData|stack' ]: continue sqlParams.append(k) # oldstackpartdata can either be sinedon data object # as passed through the function call # or a pure dictionary from directq.complexMysqlQuery # In the latter case v is just a long integer, not # data reference. if 'REF|' in k and hasattr(v, 'dbid'): # if it is a sinedon data object v = v.dbid vals.append(v) partlistvals.append("('" + "','".join(str(x) for x in vals) + "')") newpartnum += 1 apDisplay.printMsg("Inserting particle information into database") sqlstart = "INSERT INTO `ApStackParticleData` (`" + \ "`,`".join(sqlParams)+ "`) VALUES " # break up command into groups of 100K inserts # this is a workaround for the max_allowed_packet at 16MB n = 100000 sqlinserts = [partlistvals[i:i+n] \ for i in range(0, len(partlistvals), n)] if params['commit'] is True: for sqlinsert in sqlinserts: sqlcmd = sqlstart + ",".join(sqlinsert) sinedon.directq.complexMysqlQuery('appiondata', sqlcmd) sys.stderr.write("\n") if newpartnum == 0: apDisplay.printError("No particles were inserted for the stack") apDisplay.printColor("Inserted "+str(newpartnum-1)+ \ " stack particles into the database in "+ \ apDisplay.timeString(time.time()-t0),"cyan") apDisplay.printMsg("\nInserting Runs in Stack") runsinstack = getRunsInStack(params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = stackq newrunsq['stackRun'] = run['stackRun'] if params['commit'] is True: newrunsq.insert() else: apDisplay.printWarning("Not committing to the database") apDisplay.printMsg("finished") return
def uploadData(self, ctfpartlist): ### read mean /stdev for uploading self.getPartMeanTree(os.path.join(self.params['rundir'], self.params['finalstack']), ctfpartlist) sessiondata = apDatabase.getSessionDataFromSessionName(self.params['sessionname']) if self.params['projectid'] is not None: projectnum = self.params['projectid'] else: projectnum = apProject.getProjectIdFromSessionName(self.params['sessionname']) ### create synthetic stack object ... not saving global params like runname, session, project, description, etc. here; that's in ApStackData syntheticq = appiondata.ApSyntheticStackParamsData() ### get number of fakestack runs numentries = len(syntheticq) syntheticq['modelid'] = appiondata.ApInitialModelData.direct_query(self.params['modelid']) syntheticq['boxsize'] = self.params['box'] syntheticq['apix'] = self.params['apix'] syntheticq['projcount'] = self.params['projcount'] syntheticq['projstdev'] = self.params['projstdev'] syntheticq['shiftrad'] = self.params['shiftrad'] syntheticq['rotang'] = self.params['rotang'] syntheticq['flip'] = self.params['flip'] syntheticq['kilovolts'] = self.params['kv'] syntheticq['spher_aber'] = self.params['cs'] syntheticq['defocus_x'] = self.params['df1'] syntheticq['defocus_y'] = self.params['df2'] syntheticq['randomdef'] = self.params['randomdef'] if self.params['randomdef'] is True: syntheticq['randomdef_std'] = self.params['randomdef_std'] syntheticq['astigmatism'] = self.params['astigmatism'] syntheticq['snr1'] = self.params['snr1'] syntheticq['snrtot'] = self.params['snrtot'] syntheticq['envelope'] = os.path.basename(self.params['envelopefile']) syntheticq['ace2correct'] = self.params['ace2correct'] syntheticq['ace2correct_rand'] = self.params['ace2correct_rand'] if self.params['ace2correct_rand'] is True: syntheticq['ace2correct_std'] = self.params['ace2correct_std'] syntheticq['ace2estimate'] = self.params['ace2estimate'] syntheticq['lowpass'] = self.params['lpfilt'] syntheticq['highpass'] = self.params['hpfilt'] syntheticq['norm'] = self.params['norm'] ### fill stack parameters stparamq = appiondata.ApStackParamsData() stparamq['boxSize'] = self.params['box'] stparamq['bin'] = 1 stparamq['fileType'] = "imagic" stparamq['defocpair'] = 0 stparamq['lowpass'] = self.params['lpfilt'] stparamq['highpass'] = self.params['hpfilt'] stparamq['norejects'] = 1 if self.params['invert'] is True: stparamq['inverted'] = 1 else: stparamq['inverted'] = 0 if self.params['ace2correct'] is True or self.params['ace2correct_rand'] is True: stparamq['phaseFlipped'] = 1 stparamq['fliptype'] = "ace2part" else: stparamq['phaseFlipped'] = 0 stparamq['normalized'] = self.params['norm'] paramslist = stparamq.query() ### create a stack object stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### see if stack already exists in the database (just checking path & name) uniqstackdatas = stackq.query(results=1) ### create a stackRun object runq = appiondata.ApStackRunData() runq['stackRunName'] = self.params['runname'] runq['session'] = sessiondata ### see if stack run already exists in the database (just checking runname & session) uniqrundatas = runq.query(results=1) ### finish stack object stackq['name'] = self.params['finalstack'] stackq['description'] = self.params['description'] stackq['hidden'] = 0 stackq['pixelsize'] = self.params['apix'] * 1e-10 stackq['boxsize'] = self.params['box'] self.stackdata = stackq ### finish stackRun object runq['stackParams'] = stparamq runq['syntheticStackParams'] = syntheticq self.stackrundata = runq ### create runinstack object rinstackq = appiondata.ApRunsInStackData() rinstackq['stackRun'] = runq ### if not in the database, make sure run doesn't already exist if not uniqstackdatas and not uniqrundatas: if self.params['commit'] is True: apDisplay.printColor("Inserting stack parameters into database", "cyan") rinstackq['stack'] = stackq rinstackq.insert() else: apDisplay.printWarning("NOT INSERTING stack parameters into database") elif uniqrundatas and not uniqstackdatas: apDisplay.printError("Weird, run data without stack already in the database") else: rinstack = rinstackq.query(results=1) prevrinstackq = appiondata.ApRunsInStackData() prevrinstackq['stackRun'] = uniqrundatas[0] prevrinstackq['stack'] = uniqstackdatas[0] prevrinstack = prevrinstackq.query(results=1) ## if no runinstack found, find out which parameters are wrong: if not rinstack: for i in uniqrundatas[0]: print "r =======",i,"========" if uniqrundatas[0][i] != runq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,uniqrundatas[0][i],runq[i] for i in uniqrundatas[0]['stackParams']: print "p =======",i,"========" if uniqrundatas[0]['stackParams'][i] != stparamq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i, uniqrundatas[0]['stackParams'][i], stparamq[i] for i in uniqstackdatas[0]: print "s =======",i,"========" if uniqstackdatas[0][i] != stackq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,uniqstackdatas[0][i],stackq[i] for i in prevrinstack[0]: print "rin =======",i,"========" if prevrinstack[0][i] != rinstackq[i]: print i,prevrinstack[0][i],rinstackq[i] apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,prevrinstack[0][i],rinstackq[i] apDisplay.printError("All parameters for a particular stack must be identical! \n"+\ "please check your parameter settings.") apDisplay.printWarning("Stack already exists in database! Will try and appending new particles to stack") ### create a fake selection run # selectq = appiondata.ApSelectionRunData() # selectq['session'] = sessiondata # selectq['name'] = "fakerun" # self.selectq = selectq if self.params['commit'] is True: apDisplay.printColor("Inserting fake selection parameters into the database", "cyan") # selectq.insert() else: apDisplay.printWarning("NOT INSERTING fake selection parameters into the database") partNumber = 0 ### loop over the particles and insert if self.params['commit'] is True: apDisplay.printColor("inserting particle parameters into database", "cyan") else: apDisplay.printWarning("NOT INSERTING particle parameters into database") for i in range(len(ctfpartlist)): partNumber += 1 partfile = ctfpartlist[i] partmeandict = self.partmeantree[i] partq = appiondata.ApParticleData() # partq['selectionrun'] = selectq partq['xcoord'] = partNumber stpartq = appiondata.ApStackParticleData() ### check unique params stpartq['stack'] = self.stackdata stpartq['stackRun'] = self.stackrundata stpartq['particleNumber'] = partNumber stpartdata = stpartq.query(results=1) if stpartdata: apDisplay.printError("trying to insert a duplicate particle") stpartq['particle'] = partq stpartq['mean'] = partmeandict['mean'] stpartq['stdev'] = partmeandict['stdev'] if self.params['commit'] is True: stpartq.insert() return