def getStackParticleDiameter(stackdata): stackpdata = appiondata.ApStackParticleData() stackpdata['stack'] = stackdata results = stackpdata.query(results=1) if results: stackp = results[0] return apParticle.getParticleDiameter(stackp['particle'])
def commitScaledStack(stackdata, params): #make new params query newstackparamsq = appiondata.ApStackParamsData() for key in newstackparamsq.keys(): if key != 'bin': newstackparamsq[key] = stackdata[0]['stackRun']['stackParams'][key] newstackparamsq['bin'] = params['bin'] #make new stack query newstackq = appiondata.ApStackData() newstackq['path'] = appiondata.ApPathData( path=os.path.abspath(params['newstackpath'])) newstackq['name'] = params['newstackname'] newstackq['description'] = params['description'] newstackdata = newstackq.query() if newstackdata: print "A stack with these parameters already exists" return #make new run query #first check that run name doesn't already exist newstackrunq = appiondata.ApStackRunData() newstackrunq['stackRunName'] = os.path.basename( os.getcwd()) #use cwd for run name newstackrundata = newstackrunq.query() if newstackrundata: print "A stack run with this name (the current directory name) already exists. Exiting" sys.exit() newstackrunq = appiondata.ApStackRunData() newstackrunq['stackRunName'] = os.path.basename( os.getcwd()) #use cwd for run name newstackrunq['stackParams'] = newstackparamsq newstackrunq['session'] = stackdata[0]['stackRun']['session'] if "selectionrun" in stackdata[0]['stackRun'].keys: newstackrunq['selectionrun'] = stackdata[0]['stackRun']['selectionrun'] newstackrunq['syntheticStackParams'] = stackdata[0]['stackRun'][ 'syntheticStackParams'] #make new runs in stack query and insert also inserts stack and stack run newrisq = appiondata.ApRunsInStackData() newrisq['stack'] = newstackq newrisq['stackRun'] = newstackrunq newrisq.insert() #loop in reverse order so that order of ptcls in db is like that of orig for particle in range(len(stackdata) - 1, -1, -1): stackparticleq = appiondata.ApStackParticleData() stackparticleq['particleNumber'] = stackdata[particle][ 'particleNumber'] stackparticleq['stack'] = newstackq stackparticleq['stackRun'] = newstackrunq stackparticleq['particle'] = stackdata[particle]['particle'] #print stackparticleq stackparticleq.insert() return
def getAlignParticle(stackpdata,alignstackdata): oldstack = stackpdata['stack']['oldstack'] particledata = stackpdata['particle'] oldstackpdata = appiondata.ApStackParticleData(stack=oldstack,particle=particledata) q = appiondata.ApAlignParticleData(alignstack=alignstackdata,stackpart=oldstackpdata) results = q.query(readimages=False) if results: return results[0]
def getOneParticleFromStackId(stackid, particlenumber=1, msg=True): if msg is True: apDisplay.printMsg("querying one stack particle from stackid="+str(stackid)+" on "+time.asctime()) stackdata=appiondata.ApStackData.direct_query(stackid) stackq=appiondata.ApStackParticleData() stackq['stack'] = stackdata stackq['particleNumber'] = particlenumber stackparticledata=stackq.query(results=1) if len(stackparticledata) == 0: return None return stackparticledata[0]
def getStackParticleFromData(stackid, partdata, nodie=False): stackparticleq = appiondata.ApStackParticleData() stackparticleq['stack'] = appiondata.ApStackData.direct_query(stackid) stackparticleq['particle'] = partdata stackparticledata = stackparticleq.query() if not stackparticledata: if nodie is True: return apDisplay.printError("partid="+str(partdata.dbid)+" was not found in stackid="+str(stackid)) if len(stackparticledata) > 1: apDisplay.printError("There's a problem with this stack. More than one particle with the same particledata.") return stackparticledata[0]
def getStackPartID(self, stackpartnum, reconrunid, stackid=None): if stackid is None: stackid = apStack.getStackIdFromRecon(reconrunid, msg=False) stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = appiondata.ApStackData.direct_query(stackid) stackpartq['particleNumber'] = stackpartnum stackpartdata = stackpartq.query(results=1) if not stackpartdata: apDisplay.printError( "Failed to get Stack Particle ID for Number " + str(partnum)) return stackpartdata[0].dbid
def getStackParticleTiltPair(stackid, partnum, tiltstackid=None): """ takes a stack id and particle number (1+) spider-style returns the stack particle number for the tilt pair """ #print stackid, partnum if tiltstackid is None: tiltstackid = stackid t0 = time.time() stackpartdata1 = apStack.getStackParticle(stackid, partnum) partdata = stackpartdata1['particle'] ### figure out if its particle 1 or 2 tiltpartq1 = appiondata.ApTiltParticlePairData() tiltpartq1['particle1'] = partdata tiltpartdatas1 = tiltpartq1.query(results=1, readimages=False) tiltpartq2 = appiondata.ApTiltParticlePairData() tiltpartq2['particle2'] = partdata tiltpartdatas2 = tiltpartq2.query(results=1, readimages=False) if not tiltpartdatas1 and tiltpartdatas2: #print "image1" otherpart = tiltpartdatas2[0]['particle1'] elif tiltpartdatas1 and not tiltpartdatas2: #print "image2" otherpart = tiltpartdatas1[0]['particle2'] else: print partdata print tiltpartdatas1 print tiltpartdatas2 apDisplay.printError("failed to get tilt pair data") ### get tilt stack particle tiltstackdata = apStack.getOnlyStackData(tiltstackid, msg=False) stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = tiltstackdata stackpartq['particle'] = otherpart stackpartdatas2 = stackpartq.query(results=1, readimages=False) if not stackpartdatas2: #print otherpart.dbid #apDisplay.printError("particle "+str(partnum)+" has no tilt pair in stackid="+str(tiltstackid)) return None stackpartdata = stackpartdatas2[0] #print partnum,"-->",stackpartnum if time.time() - t0 > 1.0: apDisplay.printMsg("long getStackPartTiltPair " + apDisplay.timeString(time.time() - t0)) return stackpartdata
def getStackParticleFromParticleId(particleid, stackid, nodie=False): """ Provided a Stack Id & an ApParticle Id, find the stackparticle Id """ stackdata = appiondata.ApStackParticleData() stackdata['particle'] = appiondata.ApParticleData.direct_query(particleid) stackdata['stack'] = appiondata.ApStackData.direct_query(stackid) stackpnum = stackdata.query() if not stackpnum: if nodie is True: return apDisplay.printError("partnum="+str(particleid)+" was not found in stackid="+str(stackid)) if len(stackpnum) > 1: apDisplay.printError("There's a problem with this stack. More than one particle with the same number.") return stackpnum[0]
def getStackParticle(stackid, partnum, nodie=False): if partnum <= 0: apDisplay.printMsg("cannot get particle %d from stack %d"%(partnum,stackid)) #apDisplay.printMsg("getting particle %d from stack %d"%(partnum,stackid)) stackparticleq = appiondata.ApStackParticleData() stackparticleq['stack'] = appiondata.ApStackData.direct_query(stackid) stackparticleq['particleNumber'] = partnum stackparticledata = stackparticleq.query() if not stackparticledata: if nodie is True: return apDisplay.printError("partnum="+str(partnum)+" was not found in stackid="+str(stackid)) if len(stackparticledata) > 1: apDisplay.printError("There's a problem with this stack. More than one particle with the same number.") return stackparticledata[0]
def scanAppionDB(self): if self.appion_dbtools.tableExists('ApCtfData'): results = appiondata.ApCtfData().query(results=1) if results: ctfdata = results[0] if ctfdata.timestamp > self.checktime: print "\033[35m%s has new ApCtfData in %d days %d hours\033[0m" % (self.appion_dbtools.getDatabaseName(),-self.deltadays,-self.deltahours) if self.appion_dbtools.tableExists('ApStackRunData'): results = appiondata.ApStackRunData().query(results=1) if results: stackrundata = results[0] if stackrundata['stackParams']['phaseFlipped']: stackpartr = appiondata.ApStackParticleData(stackRun=stackrundata).query(results=1) if stackpartr: stackpartdata = stackpartr[0] if stackpartdata.timestamp > self.checktime: print "\033[35m%s has new particle inserted to Stack with phase flip in %d days %d hours\033[0m" % (self.appion_dbtools.getDatabaseName(),-self.deltadays,-self.deltahours)
def getStackParticlesFromId(stackid, msg=True): t0 = time.time() if msg is True: apDisplay.printMsg("querying stack particles from stackid="+str(stackid)+" at "+time.asctime()) stackdata = appiondata.ApStackData.direct_query(stackid) stackq = appiondata.ApStackParticleData() stackq['stack'] = stackdata stackpartdata = stackq.query(readimages=False) if not stackpartdata: apDisplay.printWarning("failed to get particles of stackid="+str(stackid)) if msg is True: apDisplay.printMsg("sorting particles") stackpartdata.sort(sortStackParts) if msg is True: apDisplay.printMsg("received "+str(len(stackpartdata)) +" stack particles in "+apDisplay.timeString(time.time()-t0)) return stackpartdata
def getImageParticles(imagedata,stackid,nodie=True): """ Provided a Stack Id & imagedata, to find particles """ particleq = appiondata.ApParticleData(image=imagedata) stackpdata = appiondata.ApStackParticleData() stackpdata['particle'] = particleq stackpdata['stack'] = appiondata.ApStackData.direct_query(stackid) stackps = stackpdata.query() particles = [] if not stackps: if nodie is True: return particles,None apDisplay.printError("partnum="+str(particleid)+" was not found in stackid="+str(stackid)) for stackp in stackps: particles.append(stackp['particle']) return particles,stackps
def getImageIdsFromStack(stackid, msg=True): if stackid < 1: return [] t0 = time.time() stackdata = appiondata.ApStackData.direct_query(stackid) stackq=appiondata.ApStackParticleData() stackq['stack'] = stackdata stackparticledata=stackq.query() stackimages = [] if msg is True: apDisplay.printMsg("querying particle images from stackid="+str(stackid)+" on "+time.asctime()) for sp in stackparticledata: spimagedata = sp['particle']['image'] spimageid = spimagedata.dbid if spimageid not in stackimages: stackimages.append(spimageid) if msg is True: apDisplay.printMsg("Found %d images from stackid=%d" % (len(stackimages),stackid)) return stackimages
def commitStack(self, stackid): startpart = self.partnum stackq = appiondata.ApStackData() oldstackdata = apStack.getOnlyStackData(stackid) stackq['name'] = self.params['stackfilename'] stackq['path'] = appiondata.ApPathData( path=os.path.abspath(self.params['rundir'])) stackq['description'] = self.params[ 'description'] + " ... combined stack ids " + str( self.params['stacks']) stackq['substackname'] = self.params['runname'] stackq['hidden'] = False stackq['pixelsize'] = self.newpixelsize * 1e-10 stackq['boxsize'] = self.newboxsize rinstackdata = apStack.getRunsInStack(stackid) for run in rinstackdata: rinstackq = appiondata.ApRunsInStackData() rinstackq['stack'] = stackq rinstackq['stackRun'] = run['stackRun'] rinstackq.insert() stpartsdata = apStack.getStackParticlesFromId(stackid) apDisplay.printMsg("inserting " + str(len(stpartsdata)) + " particles into DB") for particle in stpartsdata: stpartq = appiondata.ApStackParticleData() stpartq['particleNumber'] = self.partnum stpartq['stack'] = stackq stpartq['stackRun'] = particle['stackRun'] stpartq['particle'] = particle['particle'] stpartq.insert() self.partnum += 1 if self.partnum % 1000 == 0: sys.stderr.write(".") sys.stderr.write("\n") apDisplay.printMsg("commited particles " + str(startpart) + "-" + str(self.partnum)) return
def getParticlesFromStack(self, stackdata, imgdata, is_defocpair=False): """ For image (or defocal pair), imgdata get particles in corresponding stack """ if is_defocpair is True: sibling, shiftpeak = apDefocalPairs.getShiftFromImage( imgdata, self.params['sessionname']) if shiftpeak is None: return [], {'shiftx': 0, 'shifty': 0, 'scale': 1} shiftdata = { 'shiftx': shiftpeak['shift'][0], 'shifty': shiftpeak['shift'][1], 'scale': shiftpeak['scalefactor'] } searchimgdata = sibling else: searchimgdata = imgdata shiftdata = {'shiftx': 0, 'shifty': 0, 'scale': 1} partq = appiondata.ApParticleData() partq['image'] = searchimgdata stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = stackdata stackpartq['particle'] = partq stackpartdatas = stackpartq.query() partdatas = [] partorder = [] for stackpartdata in stackpartdatas: if self.params['partlimit'] and self.params[ 'partlimit'] < stackpartdata['particleNumber']: continue partdata = stackpartdata['particle'] partdatas.append(partdata) partorder.append(stackpartdata['particleNumber']) partdatas.reverse() partorder.reverse() self.writeStackParticleOrderFile(partorder) return partdatas, shiftdata
def uploadData(self, ctfpartlist): ### read mean /stdev for uploading self.getPartMeanTree(os.path.join(self.params['rundir'], self.params['finalstack']), ctfpartlist) sessiondata = apDatabase.getSessionDataFromSessionName(self.params['sessionname']) if self.params['projectid'] is not None: projectnum = self.params['projectid'] else: projectnum = apProject.getProjectIdFromSessionName(self.params['sessionname']) ### create synthetic stack object ... not saving global params like runname, session, project, description, etc. here; that's in ApStackData syntheticq = appiondata.ApSyntheticStackParamsData() ### get number of fakestack runs numentries = len(syntheticq) syntheticq['modelid'] = appiondata.ApInitialModelData.direct_query(self.params['modelid']) syntheticq['boxsize'] = self.params['box'] syntheticq['apix'] = self.params['apix'] syntheticq['projcount'] = self.params['projcount'] syntheticq['projstdev'] = self.params['projstdev'] syntheticq['shiftrad'] = self.params['shiftrad'] syntheticq['rotang'] = self.params['rotang'] syntheticq['flip'] = self.params['flip'] syntheticq['kilovolts'] = self.params['kv'] syntheticq['spher_aber'] = self.params['cs'] syntheticq['defocus_x'] = self.params['df1'] syntheticq['defocus_y'] = self.params['df2'] syntheticq['randomdef'] = self.params['randomdef'] if self.params['randomdef'] is True: syntheticq['randomdef_std'] = self.params['randomdef_std'] syntheticq['astigmatism'] = self.params['astigmatism'] syntheticq['snr1'] = self.params['snr1'] syntheticq['snrtot'] = self.params['snrtot'] syntheticq['envelope'] = os.path.basename(self.params['envelopefile']) syntheticq['ace2correct'] = self.params['ace2correct'] syntheticq['ace2correct_rand'] = self.params['ace2correct_rand'] if self.params['ace2correct_rand'] is True: syntheticq['ace2correct_std'] = self.params['ace2correct_std'] syntheticq['ace2estimate'] = self.params['ace2estimate'] syntheticq['lowpass'] = self.params['lpfilt'] syntheticq['highpass'] = self.params['hpfilt'] syntheticq['norm'] = self.params['norm'] ### fill stack parameters stparamq = appiondata.ApStackParamsData() stparamq['boxSize'] = self.params['box'] stparamq['bin'] = 1 stparamq['fileType'] = "imagic" stparamq['defocpair'] = 0 stparamq['lowpass'] = self.params['lpfilt'] stparamq['highpass'] = self.params['hpfilt'] stparamq['norejects'] = 1 if self.params['invert'] is True: stparamq['inverted'] = 1 else: stparamq['inverted'] = 0 if self.params['ace2correct'] is True or self.params['ace2correct_rand'] is True: stparamq['phaseFlipped'] = 1 stparamq['fliptype'] = "ace2part" else: stparamq['phaseFlipped'] = 0 stparamq['normalized'] = self.params['norm'] paramslist = stparamq.query() ### create a stack object stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir'])) ### see if stack already exists in the database (just checking path & name) uniqstackdatas = stackq.query(results=1) ### create a stackRun object runq = appiondata.ApStackRunData() runq['stackRunName'] = self.params['runname'] runq['session'] = sessiondata ### see if stack run already exists in the database (just checking runname & session) uniqrundatas = runq.query(results=1) ### finish stack object stackq['name'] = self.params['finalstack'] stackq['description'] = self.params['description'] stackq['hidden'] = 0 stackq['pixelsize'] = self.params['apix'] * 1e-10 stackq['boxsize'] = self.params['box'] self.stackdata = stackq ### finish stackRun object runq['stackParams'] = stparamq runq['syntheticStackParams'] = syntheticq self.stackrundata = runq ### create runinstack object rinstackq = appiondata.ApRunsInStackData() rinstackq['stackRun'] = runq ### if not in the database, make sure run doesn't already exist if not uniqstackdatas and not uniqrundatas: if self.params['commit'] is True: apDisplay.printColor("Inserting stack parameters into database", "cyan") rinstackq['stack'] = stackq rinstackq.insert() else: apDisplay.printWarning("NOT INSERTING stack parameters into database") elif uniqrundatas and not uniqstackdatas: apDisplay.printError("Weird, run data without stack already in the database") else: rinstack = rinstackq.query(results=1) prevrinstackq = appiondata.ApRunsInStackData() prevrinstackq['stackRun'] = uniqrundatas[0] prevrinstackq['stack'] = uniqstackdatas[0] prevrinstack = prevrinstackq.query(results=1) ## if no runinstack found, find out which parameters are wrong: if not rinstack: for i in uniqrundatas[0]: print "r =======",i,"========" if uniqrundatas[0][i] != runq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,uniqrundatas[0][i],runq[i] for i in uniqrundatas[0]['stackParams']: print "p =======",i,"========" if uniqrundatas[0]['stackParams'][i] != stparamq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i, uniqrundatas[0]['stackParams'][i], stparamq[i] for i in uniqstackdatas[0]: print "s =======",i,"========" if uniqstackdatas[0][i] != stackq[i]: apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,uniqstackdatas[0][i],stackq[i] for i in prevrinstack[0]: print "rin =======",i,"========" if prevrinstack[0][i] != rinstackq[i]: print i,prevrinstack[0][i],rinstackq[i] apDisplay.printError("the value for parameter '"+str(i)+"' is different from before") else: print i,prevrinstack[0][i],rinstackq[i] apDisplay.printError("All parameters for a particular stack must be identical! \n"+\ "please check your parameter settings.") apDisplay.printWarning("Stack already exists in database! Will try and appending new particles to stack") ### create a fake selection run # selectq = appiondata.ApSelectionRunData() # selectq['session'] = sessiondata # selectq['name'] = "fakerun" # self.selectq = selectq if self.params['commit'] is True: apDisplay.printColor("Inserting fake selection parameters into the database", "cyan") # selectq.insert() else: apDisplay.printWarning("NOT INSERTING fake selection parameters into the database") partNumber = 0 ### loop over the particles and insert if self.params['commit'] is True: apDisplay.printColor("inserting particle parameters into database", "cyan") else: apDisplay.printWarning("NOT INSERTING particle parameters into database") for i in range(len(ctfpartlist)): partNumber += 1 partfile = ctfpartlist[i] partmeandict = self.partmeantree[i] partq = appiondata.ApParticleData() # partq['selectionrun'] = selectq partq['xcoord'] = partNumber stpartq = appiondata.ApStackParticleData() ### check unique params stpartq['stack'] = self.stackdata stpartq['stackRun'] = self.stackrundata stpartq['particleNumber'] = partNumber stpartdata = stpartq.query(results=1) if stpartdata: apDisplay.printError("trying to insert a duplicate particle") stpartq['particle'] = partq stpartq['mean'] = partmeandict['mean'] stpartq['stdev'] = partmeandict['stdev'] if self.params['commit'] is True: stpartq.insert() return
def commitSubStack(params, newname=False, centered=False, oldstackparts=None, sorted=False): """ commit a substack to database required params: stackid description commit rundir keepfile """ oldstackdata = getOnlyStackData(params['stackid'], msg=False) #create new stack data stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(params['rundir'])) stackq['name'] = oldstackdata['name'] # use new stack name if provided if newname: stackq['name'] = newname stackdata=stackq.query(results=1) if stackdata: apDisplay.printWarning("A stack with these parameters already exists") return stackq['oldstack'] = oldstackdata stackq['hidden'] = False stackq['substackname'] = params['runname'] stackq['description'] = params['description'] stackq['pixelsize'] = oldstackdata['pixelsize'] stackq['boxsize'] = oldstackdata['boxsize'] if 'correctbeamtilt' in params.keys(): stackq['beamtilt_corrected'] = params['correctbeamtilt'] if sorted is True: stackq['junksorted'] = True if centered is True: stackq['centered'] = True if 'mask' in params: stackq['mask'] = params['mask'] if 'maxshift' in params: stackq['maxshift'] = params['maxshift'] ## insert now before datamanager cleans up referenced data stackq.insert() #Insert particles listfile = params['keepfile'] ### read list and sort f=open(listfile,'r') listfilelines = [] for line in f: sline = line.strip() if re.match("[0-9]+", sline): listfilelines.append(int(sline.split()[0])+1) else: apDisplay.printWarning("Line in listfile is not int: "+str(line)) listfilelines.sort() total = len(listfilelines) f.close() ## index old stack particles by number part_by_number = {} if oldstackparts is not None: for part in oldstackparts: part_by_number[part['particleNumber']] = part apDisplay.printMsg("Inserting stack particles") count = 0 newpartnum = 1 for origpartnum in listfilelines: count += 1 if count % 100 == 0: sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write(str(count)+" of "+(str(total))+" complete") # Find corresponding particle in old stack # Use previously queried particles if possible, otherwise # do new query here (very slow if millions of prtls in DB) try: oldstackpartdata = part_by_number[origpartnum] except KeyError: oldstackpartdata = getStackParticle(params['stackid'], origpartnum) # Insert particle newstackq = appiondata.ApStackParticleData() newstackq.update(oldstackpartdata) newstackq['particleNumber'] = newpartnum newstackq['stack'] = stackq if params['commit'] is True: newstackq.insert() newpartnum += 1 sys.stderr.write("\n") if newpartnum == 0: apDisplay.printError("No particles were inserted for the stack") apDisplay.printMsg("Inserted "+str(newpartnum-1)+" stack particles into the database") apDisplay.printMsg("Inserting Runs in Stack") runsinstack = getRunsInStack(params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = stackq newrunsq['stackRun'] = run['stackRun'] if params['commit'] is True: newrunsq.insert() else: apDisplay.printWarning("Not commiting to the database") apDisplay.printMsg("finished") return
def uploadResults(self): if self.params['commit'] is False: return # Get the new file order fh = open("sort_junk.sel", 'r') lines = fh.readlines() i = 0 fileorder = {} for line in lines: args = line.split() if (len(args) > 1): match = re.match('[A-Za-z]+([0-9]+)\.[A-Za-z]+', (args[0].split('/'))[-1]) if (match): filenumber = int(match.groups()[0]) fileorder[i] = filenumber i += 1 fh.close() # Produce a new stack oldstack = apStack.getOnlyStackData(self.params['stackid'], msg=False) newstack = appiondata.ApStackData() newstack['path'] = appiondata.ApPathData( path=os.path.abspath(self.params['rundir'])) newstack['name'] = "sorted.hed" if newstack.query(results=1): apDisplay.printError( "A stack with these parameters already exists") # Fill in data and submit newstack['oldstack'] = oldstack newstack['hidden'] = False newstack['substackname'] = self.params['runname'] newstack['description'] = self.params['description'] newstack['pixelsize'] = oldstack['pixelsize'] newstack['boxsize'] = oldstack['boxsize'] newstack['junksorted'] = True newstack.insert() # Insert stack images apDisplay.printMsg("Inserting stack particles") count = 0 total = len(fileorder.keys()) if total == 0: apDisplay.printError( "No particles can be inserted in the sorted stack") for i in fileorder.keys(): count += 1 if count % 100 == 0: sys.stderr.write( "\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write( str(count) + " of " + (str(total)) + " complete") # Get particle from the old stack oldparticle = apStack.getStackParticle(self.params['stackid'], fileorder[i] + 1) # Insert particle newparticle = appiondata.ApStackParticleData() newparticle['particleNumber'] = i + 1 newparticle['stack'] = newstack newparticle['stackRun'] = oldparticle['stackRun'] newparticle['particle'] = oldparticle['particle'] newparticle['mean'] = oldparticle['mean'] newparticle['stdev'] = oldparticle['stdev'] newparticle.insert() apDisplay.printMsg( "\n" + str(total) + " particles have been inserted into the sorted stack") # Insert runs in stack apDisplay.printMsg("Inserting Runs in Stack") runsinstack = apStack.getRunsInStack(self.params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = newstack newrunsq['stackRun'] = run['stackRun'] newrunsq.insert() apDisplay.printMsg("finished") return
def createStackData(self): apDisplay.printColor("Starting upload of stack", "blue") pathq = appiondata.ApPathData() pathq['path'] = self.params['rundir'] manq = appiondata.ApManualParamsData() manq['diam'] = self.params['diameter'] manq['trace'] = False selectq = appiondata.ApSelectionRunData() selectq['name'] = 'fakestack_' + self.params['runname'] selectq['hidden'] = True selectq['path'] = pathq selectq['session'] = self.sessiondata selectq['manparams'] = manq stackq = appiondata.ApStackData() stackq['name'] = "start.hed" stackq['path'] = pathq stackq['description'] = self.params['description'] stackq['hidden'] = False stackq['pixelsize'] = self.params['apix'] * 1e-10 stackq['boxsize'] = self.boxsize stackq['centered'] = False stackparamq = appiondata.ApStackParamsData() stackparamq['boxSize'] = self.boxsize stackparamq['bin'] = 1 stackparamq['phaseFlipped'] = self.params['ctfcorrect'] if self.params['ctfcorrect'] is True: stackparamq['fileType'] = "manual" stackparamq['fileType'] = "imagic" stackparamq['normalized'] = self.params['normalize'] stackparamq['lowpass'] = 0 stackparamq['highpass'] = 0 stackrunq = appiondata.ApStackRunData() stackrunq['stackRunName'] = self.params['runname'] stackrunq['stackParams'] = stackparamq stackrunq['selectionrun'] = selectq stackrunq['session'] = self.sessiondata runsinstackq = appiondata.ApRunsInStackData() runsinstackq['stack'] = stackq runsinstackq['stackRun'] = stackrunq if self.params['commit'] is True: runsinstackq.insert() if 'syncstackid' in self.params.keys( ) and self.params['syncstackid']: stackdata = runsinstackq['stack'] stack2data = apStack.getOnlyStackData( self.params['syncstackid']) syncq = appiondata.ApSyncStackData( stack1=stackdata, stack2=stack2data, synctype=self.params['synctype']) syncq.insert() ### for each particle sys.stderr.write("Starting particle upload") for i in range(self.numpart): if i % 100 == 0: sys.stderr.write(".") partq = appiondata.ApParticleData() partq['image'] = None #We have no image, see if this works??? partq['selectionrun'] = selectq partq['xcoord'] = int(i % 1000) partq['ycoord'] = int(i / 1000) partq['diameter'] = self.params['diameter'] stackpartq = appiondata.ApStackParticleData() stackpartq['particleNumber'] = i + 1 stackpartq['stack'] = stackq stackpartq['stackRun'] = stackrunq stackpartq['particle'] = partq stackpartq['mean'] = 0.0 stackpartq['stdev'] = 1.0 if self.params['commit'] is True: stackpartq.insert() sys.stderr.write("\n") return
def commitMaskedStack(params, oldstackparts, newname=False): """ commit a substack to database required params: stackid description commit rundir mask """ oldstackdata = getOnlyStackData(params['stackid'], msg=False) #create new stack data stackq = appiondata.ApStackData() stackq['path'] = appiondata.ApPathData(path=os.path.abspath(params['rundir'])) stackq['name'] = oldstackdata['name'] # use new stack name if provided if newname: stackq['name'] = newname stackdata=stackq.query(results=1) if stackdata: apDisplay.printWarning("A stack with these parameters already exists") return stackq['oldstack'] = oldstackdata stackq['hidden'] = False stackq['substackname'] = params['runname'] stackq['description'] = params['description'] stackq['pixelsize'] = oldstackdata['pixelsize'] stackq['boxsize'] = oldstackdata['boxsize'] stackq['mask'] = params['mask'] if 'correctbeamtilt' in params.keys(): stackq['beamtilt_corrected'] = params['correctbeamtilt'] ## insert now before datamanager cleans up referenced data stackq.insert() #Insert particles apDisplay.printMsg("Inserting stack particles") count = 0 newpartnum = 1 total = len(oldstackparts) for part in oldstackparts: count += 1 if count % 100 == 0: sys.stderr.write("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b") sys.stderr.write(str(count)+" of "+(str(total))+" complete") # Insert particle newstackq = appiondata.ApStackParticleData() newstackq.update(part) newstackq['particleNumber'] = newpartnum newstackq['stack'] = stackq if params['commit'] is True: newstackq.insert() newpartnum += 1 sys.stderr.write("\n") if newpartnum == 0: apDisplay.printError("No particles were inserted for the stack") apDisplay.printMsg("Inserted "+str(newpartnum-1)+" stack particles into the database") apDisplay.printMsg("Inserting Runs in Stack") runsinstack = getRunsInStack(params['stackid']) for run in runsinstack: newrunsq = appiondata.ApRunsInStackData() newrunsq['stack'] = stackq newrunsq['stackRun'] = run['stackRun'] if params['commit'] is True: newrunsq.insert() else: apDisplay.printWarning("Not commiting to the database") apDisplay.printMsg("finished") return
def start(self): ### new stack path oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) newstack = os.path.join(self.params['rundir'], self.stackdata['name']) apStack.checkForPreviousStack(newstack) ### get particles from stack apDisplay.printMsg("Querying stack particles") t0 = time.time() stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = self.stackdata particles = stackpartq.query() apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 f = open("test.log", "w") count = 0 apDisplay.printMsg("Processing stack particles") t0 = time.time() for part in particles: count += 1 if count % 500 == 0: sys.stderr.write(".") emanstackpartnum = part['particleNumber'] - 1 ### get euler jump data jumpq = appiondata.ApEulerJumpData() jumpq['particle'] = part jumpq['refineRun'] = self.recondata jumpdatas = jumpq.query(results=1) if not jumpdatas or len(jumpdatas) < 1: ### no data continue jumpdata = jumpdatas[0] if jumpdata['median'] is None or jumpdata['median'] > self.params[ 'maxjump']: ### bad particle excludeParticle += 1 f.write("%d\t%d\t%.1f\texclude\n" % (count, emanstackpartnum, jumpdata['median'])) else: ### good particle includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%.1f\tinclude\n" % (count, emanstackpartnum, jumpdata['median'])) sys.stderr.write("\n") apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) f.close() includeParticle.sort() apDisplay.printMsg("Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) self.params['description'] += (" ... %d no jumpers substack" % (numparticles, )) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)