def getClusterStack(self): """ get selected class averages from cluster stack """ numclusters = self.clusterstackdata['num_classes'] if self.params['excludelist'] is None and self.params[ 'includelist'] is None: ### Case 1: Keep all classes self.params['keepfile'] = None apDisplay.printMsg("Keeping all %d clusters" % (numclusters)) else: ### Case 2: Keep subset of classes ### list of classes to be excluded excludelist = [] if self.params['excludelist'] is not None: excludestrlist = self.params['excludelist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of classes to be included includelist = [] if self.params['includelist'] is not None: includestrlist = self.params['includelist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) apDisplay.printMsg("Include list: " + str(includelist)) ### write kept cluster numbers to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") count = 0 for clusternum in range(numclusters): if ((len(includelist) > 0 and clusternum in includelist) or (len(excludelist) > 0 and not clusternum in excludelist)): count += 1 kf.write(str(clusternum) + "\n") kf.close() apDisplay.printMsg("Keeping %d of %d clusters" % (count, numclusters)) ### override number of clusters with new number numclusters = count ### create the new sub stack newstack = os.path.join(self.params['rundir'], "rawclusters.hed") oldstack = os.path.join(self.clusterstackdata['path']['path'], self.clusterstackdata['avg_imagicfile']) apFile.removeStack(newstack) apStack.makeNewStack(oldstack, newstack, self.params['keepfile']) if not os.path.isfile(newstack): apDisplay.printError("No cluster stack was created") return newstack, numclusters
def start(self): knownstackdata = apStack.getOnlyStackData(self.params['knownstackid']) fullstackdata = apStack.getOnlyStackData(self.params['fullstackid']) ### get good particle numbers includeParticle, tiltParticlesData = self.getGoodParticles() self.numpart = len(includeParticle) ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile"+self.timestamp+".lst") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### make new stack of tilted particle from that run fullstackfile = os.path.join(fullstackdata['path']['path'], fullstackdata['name']) sb = os.path.splitext(fullstackdata['name']) newname = "tiltpairsub%d" % self.params['knownstackid']+sb[-1] newstackfile = os.path.join(self.params['rundir'], newname) apFile.removeStack(newstackfile, warn=False) apStack.makeNewStack(fullstackfile, newstackfile, self.params['keepfile']) if not os.path.isfile(newstackfile): apDisplay.printError("No stack was created") self.params['stackid'] = self.params['fullstackid'] apStack.commitSubStack(self.params, newname, sorted=False) apStack.averageStack(stack=newstackfile) newstackid = apStack.getStackIdFromPath(newstackfile) if self.params['meanplot'] is True: apDisplay.printMsg("creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False) # creating a keepfile, fixed filename self.params['keepfile'] = os.path.join(self.params['newstackpath'], "keepfile.lst") #path to the old stack oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) #path to the new stack. the stack path will be provided by the db in the future newstack = os.path.join(self.params['newstackpath'], self.params['newstack']) #messy way to count the number of particles in a stack h = open(newstack, 'r') numimg = 0 while h.read(1024): numimg += 1 #have to use this function to make sure i get the same particle number like in the download stackpartdata = apStack.getStackParticlesFromId(self.params['stackid']) #since the keepfile has to be a proc2d like file, i create a dictionary to transfer the #uniqe particle id into the stack position. I have to decrement 1 to make it count from 0 #to numing partdict = {} dbids = [(part.dbid, part['particleNumber']) for part in stackpartdata] for part in dbids: partdict[int(part[0])] = int(part[1] - 1) #writing the keepfile f = open(self.params['keepfile'], 'w') for i in range(0, numimg): partnumber = partdict[int( numpy.memmap(newstack, dtype="float32", offset=i * 1024 + 19 * 4)[0])] f.write('%d\n' % partnumber) f.close() newcreatestack = os.path.join(self.params['rundir'], self.params['newstack']) apStack.makeNewStack(oldstack, newcreatestack, self.params['keepfile'], bad=True) apStack.commitSubStack(self.params, self.params['newstack'], sorted=False) apStack.averageStack(stack=newcreatestack) newstackid = apStack.getStackIdFromPath(newcreatestack)
def getClusterStack(self): """ get selected class averages from cluster stack """ numclusters = self.clusterstackdata['num_classes'] if self.params['excludelist'] is None and self.params['includelist'] is None: ### Case 1: Keep all classes self.params['keepfile'] = None apDisplay.printMsg("Keeping all %d clusters"%(numclusters)) else: ### Case 2: Keep subset of classes ### list of classes to be excluded excludelist = [] if self.params['excludelist'] is not None: excludestrlist = self.params['excludelist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included includelist = [] if self.params['includelist'] is not None: includestrlist = self.params['includelist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) apDisplay.printMsg("Include list: "+str(includelist)) ### write kept cluster numbers to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") count = 0 for clusternum in range(numclusters): if ( (len(includelist) > 0 and clusternum in includelist) or (len(excludelist) > 0 and not clusternum in excludelist) ): count+=1 kf.write(str(clusternum)+"\n") kf.close() apDisplay.printMsg("Keeping %d of %d clusters"%(count,numclusters)) ### override number of clusters with new number numclusters = count ### create the new sub stack newstack = os.path.join(self.params['rundir'], "rawclusters.hed") oldstack = os.path.join(self.clusterstackdata['path']['path'], self.clusterstackdata['avg_imagicfile']) apFile.removeStack(newstack) apStack.makeNewStack(oldstack, newstack, self.params['keepfile']) if not os.path.isfile(newstack): apDisplay.printError("No cluster stack was created") return newstack, numclusters
def start(self): stackparts = apStack.getStackParticlesFromId(self.params['stackid']) stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], newname) # calculate slop and intercept from the four points given slope = (self.params['maxy'] - self.params['miny']) / ( self.params['maxx'] - self.params['minx']) intercept = self.params['miny'] - (slope * self.params['minx']) # print slope # print intercept numparticles = 0 self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") f = open(self.params['keepfile'], 'w') for stackpart in stackparts: #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev']) if stackpart['mean'] > self.params['minx'] and stackpart[ 'mean'] < self.params['maxx']: #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev']) calcY = slope * stackpart['mean'] + intercept if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \ (calcY <= stackpart['stdev'] and self.params['keepabove'] is True): emanpartnum = stackpart['particleNumber'] - 1 f.write('%i\n' % emanpartnum) numparticles += 1 f.close() self.params['description'] += ( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid']))) #create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, oldstackparts=stackparts) apStack.averageStack(stack=newstack) # stack mean plot newstackid = apStack.getStackIdFromPath(newstack) apDisplay.printMsg("creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): #find stack stackparticles = apStack.getStackParticlesFromId( self.params['stackid']) if self.params['logsplit']: #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions']) stacklist = evenLogSplit(self.params['logstart'], len(stackparticles)) elif self.params['nptcls']: stacklist = [self.params['nptcls']] else: apDisplay.printError("Please specify nptlcs or logsplit") oldstackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) #create run directory if self.params['rundir'] is None: path = oldstackdata['path']['path'] path = os.path.split(os.path.abspath(path))[0] self.params['rundir'] = path apDisplay.printMsg("Out directory: " + self.params['rundir']) origdescription = self.params['description'] for stack in stacklist: self.params['description'] = ( origdescription + (" ... split %d particles from original stackid=%d" % (stack, self.params['stackid']))) workingdir = os.path.join(self.params['rundir'], str(stack)) #check for previously commited stacks newstack = os.path.join(workingdir, self.params['stackname']) apStack.checkForPreviousStack(newstack) #create rundir and change to that directory apDisplay.printMsg("Run directory: " + workingdir) apParam.createDirectory(workingdir) os.chdir(workingdir) #create random list lstfile = makeRandomLst(stack, stackparticles, self.params) #shutil.copy(lstfile, workingdir) #make new stack apStack.makeNewStack(oldstack, newstack, lstfile) #apStack.makeNewStack(lstfile, self.params['stackname']) #commit new stack self.params['keepfile'] = os.path.abspath(lstfile) self.params['rundir'] = os.path.abspath(workingdir) apStack.commitSubStack(self.params)
def start(self): #find stack stackparticles = apStack.getStackParticlesFromId(self.params['stackid']) if self.params['logsplit']: #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions']) stacklist = evenLogSplit(self.params['logstart'], len(stackparticles)) elif self.params['nptcls']: stacklist = [self.params['nptcls']] else: apDisplay.printError("Please specify nptlcs or logsplit") oldstackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) #create run directory if self.params['rundir'] is None: path = oldstackdata['path']['path'] path = os.path.split(os.path.abspath(path))[0] self.params['rundir'] = path apDisplay.printMsg("Out directory: "+self.params['rundir']) origdescription=self.params['description'] for stack in stacklist: self.params['description'] = ( origdescription+ (" ... split %d particles from original stackid=%d" % (stack, self.params['stackid'])) ) workingdir = os.path.join(self.params['rundir'], str(stack)) #check for previously commited stacks newstack = os.path.join(workingdir ,self.params['stackname']) apStack.checkForPreviousStack(newstack) #create rundir and change to that directory apDisplay.printMsg("Run directory: "+workingdir) apParam.createDirectory(workingdir) os.chdir(workingdir) #create random list lstfile = makeRandomLst(stack, stackparticles, self.params) #shutil.copy(lstfile, workingdir) #make new stack apStack.makeNewStack(oldstack, newstack, lstfile) #apStack.makeNewStack(lstfile, self.params['stackname']) #commit new stack self.params['keepfile'] = os.path.abspath(lstfile) self.params['rundir'] = os.path.abspath(workingdir) apStack.commitSubStack(self.params)
def start(self): stackparts = apStack.getStackParticlesFromId(self.params['stackid']) stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], newname) # calculate slop and intercept from the four points given slope = (self.params['maxy'] - self.params['miny']) / (self.params['maxx'] - self.params['minx']) intercept = self.params['miny'] - (slope*self.params['minx']) # print slope # print intercept numparticles = 0 self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") f=open(self.params['keepfile'],'w') for stackpart in stackparts: #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev']) if stackpart['mean'] > self.params['minx'] and stackpart['mean'] < self.params['maxx']: #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev']) calcY = slope*stackpart['mean']+intercept if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \ (calcY <= stackpart['stdev'] and self.params['keepabove'] is True): emanpartnum = stackpart['particleNumber']-1 f.write('%i\n' % emanpartnum) numparticles+=1 f.close() self.params['description'] +=( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ) #create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, oldstackparts=stackparts) apStack.averageStack(stack=newstack) # stack mean plot newstackid = apStack.getStackIdFromPath(newstack) apDisplay.printMsg("creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False) # creating a keepfile, fixed filename self.params['keepfile'] = os.path.join(self.params['newstackpath'],"keepfile.lst") #path to the old stack oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) #path to the new stack. the stack path will be provided by the db in the future newstack = os.path.join(self.params['newstackpath'], self.params['newstack']) #messy way to count the number of particles in a stack h = open(newstack, 'r') numimg = 0 while h.read(1024): numimg += 1 #have to use this function to make sure i get the same particle number like in the download stackpartdata = apStack.getStackParticlesFromId(self.params['stackid']) #since the keepfile has to be a proc2d like file, i create a dictionary to transfer the #uniqe particle id into the stack position. I have to decrement 1 to make it count from 0 #to numing partdict = {} dbids = [(part.dbid,part['particleNumber']) for part in stackpartdata] for part in dbids: partdict[int(part[0])] = int(part[1]-1) #writing the keepfile f = open(self.params['keepfile'], 'w') for i in range(0,numimg): partnumber = partdict[int(numpy.memmap(newstack, dtype="float32", offset=i*1024+19*4)[0])] f.write('%d\n' % partnumber) f.close() newcreatestack = os.path.join(self.params['rundir'],self.params['newstack']) apStack.makeNewStack(oldstack, newcreatestack, self.params['keepfile'], bad=True) apStack.commitSubStack(self.params, self.params['newstack'], sorted=False) apStack.averageStack(stack=newcreatestack) newstackid = apStack.getStackIdFromPath(newcreatestack)
def start(self): ### get stack data notstackdata = apStack.getOnlyStackData(self.params['notstackid']) tiltstackdata = apStack.getOnlyStackData(self.params['tiltstackid']) ### get good particle numbers includeParticle, tiltParticlesData = self.getGoodAlignParticles() self.numpart = len(includeParticle) ### make doc file of Euler angles eulerfile = self.makeEulerDoc(tiltParticlesData) ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile"+self.timestamp+".lst") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### make new stack of tilted particle from that run tiltstackfile = os.path.join(tiltstackdata['path']['path'], tiltstackdata['name']) rctstackfile = os.path.join(self.params['rundir'], "rctstack"+self.timestamp+".hed") apFile.removeStack(rctstackfile, warn=False) apStack.makeNewStack(tiltstackfile, rctstackfile, self.params['keepfile']) spiderstack = self.convertStackToSpider(rctstackfile) #self.mirrorParticles(tiltParticlesData, spiderstack) ### iterations over volume creation ### back project particles into filter volume volfile = os.path.join(self.params['rundir'], "volume%s-%03d.spi"%(self.timestamp, 0)) backproject.backprojectCG(spiderstack, eulerfile, volfile, numpart=self.numpart, pixrad=self.params['radius']) alignstack = spiderstack ### center/convert the volume file mrcvolfile = self.processVolume(volfile, 0) for i in range(self.params['numiters']): looptime = time.time() iternum = i+1 apDisplay.printMsg("running backprojection iteration "+str(iternum)) ### xy-shift particles to volume projections alignstack = backproject.rctParticleShift(volfile, alignstack, eulerfile, iternum, numpart=self.numpart, pixrad=self.params['radius'], timestamp=self.timestamp) apFile.removeFile(volfile) ### back project particles into better volume volfile = os.path.join(self.params['rundir'], "volume%s-%03d.spi"%(self.timestamp, iternum)) backproject.backproject3F(alignstack, eulerfile, volfile, numpart=self.numpart) ### center/convert the volume file mrcvolfile = self.processVolume(volfile, iternum) apDisplay.printColor("finished volume refinement loop in " +apDisplay.timeString(time.time()-looptime), "cyan") ### optimize Euler angles #NOT IMPLEMENTED YET ### perform eotest if self.params['eotest'] is True: self.runEoTest(alignstack, eulerfile) self.runRmeasure() ### insert volumes into DB self.insertRctRun(mrcvolfile)
def start(self): ### get stack data notstackdata = apStack.getOnlyStackData(self.params['notstackid']) tiltstackdata = apStack.getOnlyStackData(self.params['tiltstackid']) for cnum in self.classlist: print "\n" apDisplay.printMsg("###########################") apDisplay.printMsg("Processing stack of class "+str(cnum)+"") apDisplay.printMsg("###########################") print "\n" ### get good particle numbers includeParticle, tiltParticlesData = self.getGoodAlignParticles(cnum) self.numpart = len(includeParticle) ### write kept particles to file apParam.createDirectory(os.path.join(self.params['rundir'], str(cnum))) self.params['keepfile'] = os.path.join(self.params['rundir'], str(cnum), "keepfile"+self.timestamp+".lst") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### make new stack of tilted particle from that run tiltstackfile = os.path.join(tiltstackdata['path']['path'], tiltstackdata['name']) otrstackfile = os.path.join(self.params['rundir'], str(cnum), "otrstack"+self.timestamp+".hed") apFile.removeStack(otrstackfile) apStack.makeNewStack(tiltstackfile, otrstackfile, self.params['keepfile']) spiderstack = self.convertStackToSpider(otrstackfile, cnum) ### make doc file of Euler angles eulerfile = self.makeEulerDoc(tiltParticlesData, cnum) ### iterations over volume creation looptime = time.time() ### back project particles into volume volfile = os.path.join(self.params['rundir'], str(cnum), "volume%s-%03d.spi"%(self.timestamp, 0)) self.initialBPRP(cnum, volfile, spiderstack, eulerfile, len(includeParticle), self.params['radius']) ### RCT backproject method #backproject.backprojectCG(spiderstack, eulerfile, volfile, numpart=len(includeParticle), pixrad=self.params['radius']) ### filter the volume (low-pass Butterworth) apix = apStack.getStackPixelSizeFromStackId(self.params['tiltstackid']) backproject.butterworthLP(volfile, apix) ### need work... filtered volume overwrites on the existing volume backproject.normalizeVol(volfile) alignstack = spiderstack ### center/convert the volume file mrcvolfile = self.processVolume(volfile, cnum, 0) for i in range(self.params['numiters']): iternum = i+1 apDisplay.printMsg("running backprojection iteration "+str(iternum)) ### xy-shift particles to volume projections alignstack = backproject.otrParticleShift(volfile, alignstack, eulerfile, iternum, numpart=len(includeParticle), pixrad=self.params['radius'], timestamp=self.timestamp, classnum=cnum) apDisplay.printColor("finished volume refinement in " +apDisplay.timeString(time.time()-looptime), "cyan") ### back project particles into better volume volfile = os.path.join(self.params['rundir'], str(cnum), "volume%s-%03d.spi"%(self.timestamp, iternum)) backproject.backproject3F(alignstack, eulerfile, volfile, numpart=len(includeParticle)) ### filter the volume (low-pass Butterworth) backproject.butterworthLP(volfile, apix) ### need work... filtered volume has a different name backproject.normalizeVol(volfile) ### center/convert the volume file mrcvolfile = self.processVolume(volfile, cnum, iternum) ############################### # # # Andres's refinement steps # # # ############################### print "\n" apDisplay.printMsg("##################################") apDisplay.printMsg("Starting Andres' refinement steps") apDisplay.printMsg("##################################") print "\n" for j in range(self.params['refineiters']): iternum = j+1 appiondata.ApPathData.direct_query(1) apDisplay.printMsg("Starting projection-matching refinement/XMIPP iteration "+str(iternum)) boxsize = self.getBoxSize() ### projection-matching refinement/XMIPP apshout, apshstack, apsheuler = self.projMatchRefine(cnum, volfile, alignstack, eulerfile, boxsize, len(includeParticle), self.params['radius'], iternum) apDisplay.printMsg("Calculating weighted cross-correlation coefficients") ### calculation of weighted cross-correlation coefficients apshout_weighted = self.cccAPSH(apshout, cnum, iternum) apDisplay.printMsg("Creating select files based on weighted cross-correlation coefficients") ### create select files based on calculated weighted-cross-correlation corrSelect = self.makecccAPSHselectFile(apshout_weighted, cnum, iternum, factor=0.1) ### create volume file names apshVolfile = os.path.join(self.params['rundir'], str(cnum), "apshVolume-%03d.spi"%(iternum)) ### run BPRP on selected particles self.APSHbackProject(apshstack, apsheuler, apshVolfile, cnum, corrSelect) ### center volume filename = os.path.splitext(apshVolfile)[0] apshVolFileCentered = filename+"_centered.spi" backproject.centerVolume(apshVolfile, apshVolFileCentered) ### calculate FSC ### generate odd and even select files for FSC calculation corrSelectOdd, corrSelectEven = self.splitOddEven(cnum, corrSelect, iternum) fscout = self.runEoTest(corrSelectOdd, corrSelectEven, cnum, apshstack, apsheuler, iternum) self.runRmeasure(apshVolFileCentered) ### filter volume backproject.butterworthFscLP(apshVolFileCentered, fscout) ### reset file names for next round volfile = apshVolFileCentered eulerfile = apsheuler mrcvolfile = self.processVolume(volfile, cnum, iternum) print "\n" apDisplay.printMsg("###########################") apDisplay.printMsg("Done with iteration "+str(j+1)+"") apDisplay.printMsg("###########################") print "\n" #if len(self.classlist) > 1: #get a list of all unique combinations of volumes # pairlist = self.computeClassVolPair() ### insert volumes into DB self.insertOtrRun(mrcvolfile)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: " + str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = self.alignstackdata particles = alignpartq.query() elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata particles = clusterpartq.query() apDisplay.printMsg("Complete in " + apDisplay.timeString(time.time() - q0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 for part in particles: count += 1 #partnum = part['partnum']-1 if 'alignparticle' in part: alignpart = part['alignparticle'] classnum = int(part['refnum']) - 1 else: alignpart = part classnum = int(part['ref']['refnum']) - 1 emanstackpartnum = alignpart['stackpart']['particleNumber'] - 1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if (alignpart['score'] is not None and alignpart['score'] < self.params['minscore']): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badscore += 1 continue ### check spread if (alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore']): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badspread += 1 continue if includelist and classnum in includelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) elif excludelist and not classnum in excludelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift" % (badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score" % (badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread" % (badspread)) apDisplay.printMsg("Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): #old stack size stacksize = apStack.getNumberStackParticlesFromId(self.params['stackid']) # if exclude or include lists are not defined... if self.params['exclude'] is None and self.params['include'] is None: # if first and last are specified, create a file if self.params['first'] is not None and self.params['last'] is not None: stp = str(self.params['first']) enp = str(self.params['last']) fname = 'sub'+str(self.params['stackid'])+'_'+stp+'-'+enp+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) f=open(self.params['keepfile'],'w') for i in range(self.params['first'],self.params['last']+1): f.write('%d\n' % (int(i)-1)) f.close() # generate the random list by giving number and create the file elif self.params['random'] is not None: #numOfRandomParticles = str(self.params['random']) #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst' fname = "random%d_%d.lst"%(self.params['stackid'], self.params['random']) self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) # create a file f=open(self.params['keepfile'],'w') # generate a random sequence by giving size randomList = random.sample(xrange(self.params['last']), self.params['random']) randomList.sort() for partnum in randomList: f.write('%d\n' % partnum) f.close() # if splitting, create files containing the split values elif self.params['split'] > 1: for i in range(self.params['split']): fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) f = open(self.params['keepfile'],'w') for p in range(stacksize): if (p % self.params['split'])-i==0: f.write('%i\n' % p) f.close() # if exclude-from option is specified, convert particles to exclude elif self.params['excludefile'] is True: oldkf = open(self.params['keepfile']) partlist = [] for line in oldkf: particle=line.strip() try: particle = int(particle) except: continue partlist.append(particle) oldkf.close() # create new list excluding the particles apDisplay.printMsg("Converting keep file to exclude file") newkeepfile = "tmpnewkeepfile.txt" newkf = open(newkeepfile,'w') for p in range(stacksize): if p not in partlist: newkf.write("%i\n"%p) newkf.close() self.params['keepfile'] = os.path.abspath(newkeepfile) # otherwise, just copy the file elif not os.path.isfile(os.path.basename(self.params['keepfile'])): shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile'])) # if either exclude or include lists is defined elif self.params['exclude'] or self.params['include']: ### list of particles to be excluded excludelist = [] if self.params['exclude'] is not None: excludestrlist = self.params['exclude'].split(",") for excld in excludestrlist: excludelist.append(int(excld.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of particles to be included includelist = [] if self.params['include'] is not None: includestrlist = self.params['include'].split(",") for incld in includestrlist: includelist.append(int(incld.strip())) apDisplay.printMsg("Include list: "+str(includelist)) #new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) #if include or exclude list is given... if self.params['include'] is not None or self.params['exclude'] is not None: includeParticle = [] excludeParticle = 0 for partnum in range(stacksize): if includelist and partnum in includelist: includeParticle.append(partnum) elif excludelist and not partnum in excludelist: includeParticle.append(partnum) else: excludeParticle += 1 includeParticle.sort() ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() #get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) elif includelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ogdescr = self.params['description'] for i in range(self.params['split']): ### always do this, if not splitting split=1 sb = os.path.splitext(stackdata['name']) if self.params['first'] is not None and self.params['last'] is not None: newname = sb[0]+'.'+str(self.params['first'])+'-'+str(self.params['last'])+sb[-1] elif self.params['random'] is not None: newname = "%s-random%d%s"%(sb[0], self.params['random'], sb[-1]) elif self.params['split'] > 1: fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) newname = sb[0]+'.'+str(i+1)+'of'+str(self.params['split'])+sb[-1] newstack = os.path.join(self.params['rundir'], newname) apStack.checkForPreviousStack(newstack) #get number of particles f = open(self.params['keepfile'], "r") numparticles = len(f.readlines()) f.close() self.params['description'] = ogdescr self.params['description'] += ( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ) #if splitting, add to description if self.params['split'] > 1: self.params['description'] += (" (%i of %i)" % (i+1, self.params['split'])) #create the new sub stack if not self.params['correctbeamtilt']: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) else: apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, sorted=False) apStack.averageStack(stack=newstack) newstackid = apStack.getStackIdFromPath(newstack) if self.params['meanplot'] is True: apDisplay.printMsg("creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): #old stack size stacksize = apStack.getNumberStackParticlesFromId( self.params['stackid']) # if exclude or include lists are not defined... if self.params['exclude'] is None and self.params['include'] is None: # if first and last are specified, create a file if self.params['first'] is not None and self.params[ 'last'] is not None: stp = str(self.params['first']) enp = str(self.params['last']) fname = 'sub' + str( self.params['stackid']) + '_' + stp + '-' + enp + '.lst' self.params['keepfile'] = os.path.join(self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) f = open(self.params['keepfile'], 'w') for i in range(self.params['first'], self.params['last'] + 1): f.write('%d\n' % (int(i) - 1)) f.close() # generate the random list by giving number and create the file elif self.params['random'] is not None: #numOfRandomParticles = str(self.params['random']) #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst' fname = "random%d_%d.lst" % (self.params['stackid'], self.params['random']) self.params['keepfile'] = os.path.join(self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) # create a file f = open(self.params['keepfile'], 'w') # generate a random sequence by giving size randomList = random.sample(xrange(self.params['last']), self.params['random']) randomList.sort() for partnum in randomList: f.write('%d\n' % partnum) f.close() # if splitting, create files containing the split values elif self.params['split'] > 1: for i in range(self.params['split']): fname = 'sub' + str( self.params['stackid']) + '.' + str(i + 1) + '.lst' self.params['keepfile'] = os.path.join( self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) f = open(self.params['keepfile'], 'w') for p in range(stacksize): if (p % self.params['split']) - i == 0: f.write('%i\n' % p) f.close() # if exclude-from option is specified, convert particles to exclude elif self.params['excludefile'] is True: oldkf = open(self.params['keepfile']) partlist = [] for line in oldkf: particle = line.strip() try: particle = int(particle) except: continue partlist.append(particle) oldkf.close() # create new list excluding the particles apDisplay.printMsg("Converting keep file to exclude file") newkeepfile = "tmpnewkeepfile.txt" newkf = open(newkeepfile, 'w') for p in range(stacksize): if p not in partlist: newkf.write("%i\n" % p) newkf.close() self.params['keepfile'] = os.path.abspath(newkeepfile) # otherwise, just copy the file elif not os.path.isfile(os.path.basename(self.params['keepfile'])): shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile'])) # if either exclude or include lists is defined elif self.params['exclude'] or self.params['include']: ### list of particles to be excluded excludelist = [] if self.params['exclude'] is not None: excludestrlist = self.params['exclude'].split(",") for excld in excludestrlist: excludelist.append(int(excld.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of particles to be included includelist = [] if self.params['include'] is not None: includestrlist = self.params['include'].split(",") for incld in includestrlist: includelist.append(int(incld.strip())) apDisplay.printMsg("Include list: " + str(includelist)) #new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) #if include or exclude list is given... if self.params['include'] is not None or self.params[ 'exclude'] is not None: includeParticle = [] excludeParticle = 0 for partnum in range(stacksize): if includelist and partnum in includelist: includeParticle.append(partnum) elif excludelist and not partnum in excludelist: includeParticle.append(partnum) else: excludeParticle += 1 includeParticle.sort() ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() #get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) elif includelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ogdescr = self.params['description'] for i in range(self.params['split']): ### always do this, if not splitting split=1 sb = os.path.splitext(stackdata['name']) if self.params['first'] is not None and self.params[ 'last'] is not None: newname = sb[0] + '.' + str(self.params['first']) + '-' + str( self.params['last']) + sb[-1] elif self.params['random'] is not None: newname = "%s-random%d%s" % (sb[0], self.params['random'], sb[-1]) elif self.params['split'] > 1: fname = 'sub' + str( self.params['stackid']) + '.' + str(i + 1) + '.lst' self.params['keepfile'] = os.path.join(self.params['rundir'], fname) newname = sb[0] + '.' + str(i + 1) + 'of' + str( self.params['split']) + sb[-1] newstack = os.path.join(self.params['rundir'], newname) apStack.checkForPreviousStack(newstack) #get number of particles f = open(self.params['keepfile'], "r") numparticles = len(f.readlines()) f.close() self.params['description'] = ogdescr self.params['description'] += ( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid']))) #if splitting, add to description if self.params['split'] > 1: self.params['description'] += (" (%i of %i)" % (i + 1, self.params['split'])) #create the new sub stack if not self.params['correctbeamtilt']: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) else: apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, sorted=False) apStack.averageStack(stack=newstack) newstackid = apStack.getStackIdFromPath(newstack) if self.params['meanplot'] is True: apDisplay.printMsg( "creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: "+str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: # DIRECT SQL STUFF sqlcmd = "SELECT " + \ "apd.partnum, " + \ "apd.xshift, apd.yshift, " + \ "apd.rotation, apd.mirror, " + \ "apd.spread, apd.correlation, " + \ "apd.score, apd.bad, " + \ "spd.particleNumber, " + \ "ard.refnum "+ \ "FROM ApAlignParticleData apd " + \ "LEFT JOIN ApStackParticleData spd ON " + \ "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \ "LEFT JOIN ApAlignReferenceData ard ON" + \ "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \ "WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid']) # These are AlignParticles particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd) elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata # These are ClusteringParticles particles = clusterpartq.query() apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0))) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 t0 = time.time() apDisplay.printMsg("Parsing particle information") # find out if there is alignparticle info: is_cluster_p = False # alignparticle is a key of any particle in particles if the latter is # a CluateringParticle if 'alignparticle' in particles[0]: is_cluster_p = True for part in particles: count += 1 if is_cluster_p: # alignpart is an item of ClusteringParticle alignpart = part['alignparticle'] try: classnum = int(part['refnum'])-1 except: apDisplay.printWarning("particle %d was not put into any class" % (part['partnum'])) emanstackpartnum = alignpart['stackpart']['particleNumber']-1 else: # particle has info from AlignedParticle as results of direct query alignpart = part try: classnum = int(alignpart['refnum'])-1 except: apDisplay.printWarning("particle %d was not put into any class" % (part['partnum'])) classnum = None emanstackpartnum = int(alignpart['particleNumber'])-1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if ( alignpart['score'] is not None and alignpart['score'] < self.params['minscore'] ): excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badscore += 1 continue ### check spread if ( alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore'] ): excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badspread += 1 continue if classnum is not None: if includelist and (classnum in includelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) elif excludelist and not (classnum in excludelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift"%(badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score"%(badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread"%(badspread)) apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0))) apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles") ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) outavg = os.path.join(self.params['rundir'],"average.mrc") ### create the new sub stack # first check if virtual stack if not os.path.isfile(oldstack): vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid']) vparts = vstackdata['particles'] oldstack = vstackdata['filename'] # get subset of virtualstack vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle] if self.params['writefile'] is True: apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad']) apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist) else: if self.params['writefile'] is True: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle) if self.params['writefile'] is True and not os.path.isfile(newstack): apDisplay.printError("No stack was created") if self.params['commit'] is True: apStack.commitSubStack(self.params,included=includeParticle) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): partdict = {} partlist = [] ### get Euler angles for each particle for iternum in self.iternums: ### get recon iter data reconiterq = appiondata.ApRefineIterData() reconiterq['refineRun'] = self.reconrundata reconiterq['iteration'] = iternum reconiterdata = reconiterq.query(results=1)[0] #this should be unique ### get particle data reconpartq = appiondata.ApRefineParticleData() reconpartq['refineIter'] = reconiterdata apDisplay.printMsg("Querying for particles at "+time.asctime()) reconpartdatas = reconpartq.query() ### group particle data for partdata in reconpartdatas: partnum = partdata['particle']['particleNumber'] if not partnum in partlist: partlist.append(partnum) partdict[(partnum, iternum)] = partdata ### run through particles and check Euler angles partlist.sort() eulerdict = {} eulercount = {} reject = 0 for partnum in partlist: e1d = {} e2d = {} for iternum in self.iternums: if not (partnum, iternum) in partdict: continue partdata = partdict[(partnum, iternum)] euler1 = "%.2f"%(partdata['euler1']) if not euler1 in e1d: e1d[euler1] = 1 else: e1d[euler1] += 1 euler2 = "%.2f"%(partdata['euler2']) if not euler2 in e2d: e2d[euler2] = 1 else: e2d[euler2] += 1 #print partnum, euler1, euler2 counts = [(val,key) for key,val in e1d.items()] e1count, euler1 = max(counts) counts = [(val,key) for key,val in e2d.items()] e2count, euler2 = max(counts) # reject indeterminant particles if e2count < 2 or e1count < 2: reject += 1 continue ### group particles by their Euler angles if not (euler1,euler2) in eulerdict: eulerdict[(euler1,euler2)] = [] eulercount[(euler1,euler2)] = 0 eulerdict[(euler1,euler2)].append(partnum) eulercount[(euler1,euler2)] += 1 print "Rejected %d particles"%(reject) values = eulercount.values() values.sort() print values ### run through Euler angles and count particles counts = [(val,key) for key,val in eulercount.items()] mincount, val = min(counts) self.params['mincount'] = max(self.params['mincount'], mincount) #print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist)) print "Keeping %d of %d particles"%(self.params['mincount']*len(eulercount.keys()), len(partlist)) keeplist = [] for key in eulerdict.keys(): eulerpartlist = eulerdict[key] if len(partlist) < self.params['mincount']: keeplist.extend(eulerpartlist) else: keeplist.extend(eulerpartlist[:self.params['mincount']]) keeplist.sort() print "Keeping %d of %d particles"%(len(keeplist), len(partlist)) #need to set keepfile for commitSubStack self.params['keepfile'] = os.path.join(self.params['rundir'], "equalviews.lst") f = open(self.params['keepfile'], "w") for partnum in keeplist: f.write("%d\n"%(partnum-1)) f.close() ### make a new stack using the keep particles oldstackdata = self.reconrundata['stack'] oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) newstack = os.path.join(self.params['rundir'], "start.hed") apStack.makeNewStack(oldstack, newstack, listfile=self.params['keepfile'], remove=True, bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") self.params['stackid'] = oldstackdata.dbid #need to set stackid for commitSubStack apStack.commitSubStack(self.params, "start.hed") apStack.averageStack(stack=newstack)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: "+str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = self.alignstackdata particles = alignpartq.query() elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata particles = clusterpartq.query() apDisplay.printMsg("Complete in "+apDisplay.timeString(time.time()-q0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 for part in particles: count += 1 #partnum = part['partnum']-1 if 'alignparticle' in part: alignpart = part['alignparticle'] classnum = int(part['refnum'])-1 else: alignpart = part classnum = int(part['ref']['refnum'])-1 emanstackpartnum = alignpart['stackpart']['particleNumber']-1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if ( alignpart['score'] is not None and alignpart['score'] < self.params['minscore'] ): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badscore += 1 continue ### check spread if ( alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore'] ): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badspread += 1 continue if includelist and classnum in includelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) elif excludelist and not classnum in excludelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift"%(badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score"%(badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread"%(badspread)) apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def getClusterParticles(self): """ get selected particles from cluster stack """ ### list of classes to be excluded excludelist = [] if self.params['excludelist'] is not None: excludestrlist = self.params['excludelist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included includelist = [] if self.params['includelist'] is not None: includestrlist = self.params['includelist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) apDisplay.printMsg("Include list: "+str(includelist)) apDisplay.printMsg("Querying for clustered particles") clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata particles = clusterpartq.query() apDisplay.printMsg("Sorting "+str(len(particles))+" clustered particles") ### write included particles to text file includeParticle = [] excludeParticle = 0 #f = open("test.log", "w") count = 0 for part in particles: count += 1 if count % 250 == 0: sys.stderr.write(".") alignpart = part['alignparticle'] classnum = int(part['refnum'])-1 emanstackpartnum = alignpart['stackpart']['particleNumber']-1 if includelist and classnum not in includelist: excludeParticle += 1 #f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) elif excludelist and classnum in excludelist: excludeParticle += 1 #f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: includeParticle.append(emanstackpartnum) #f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) #f.close() sys.stderr.write("\n") includeParticle.sort() apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles") ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) ### create the new sub stack #stackdata = apStack.getOnlyStackData(self.params['stackid']) #oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) oldstack = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['imagicfile']) newstack = os.path.join(self.params['rundir'], "rawparticles.hed") apFile.removeStack(newstack) apStack.makeNewStack(oldstack, newstack, self.params['keepfile']) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") return newstack, numparticles
def start(self): ### new stack path oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) newstack = os.path.join(self.params['rundir'], self.stackdata['name']) apStack.checkForPreviousStack(newstack) ### get particles from stack apDisplay.printMsg("Querying stack particles") t0 = time.time() stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = self.stackdata particles = stackpartq.query() apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 f = open("test.log", "w") count = 0 apDisplay.printMsg("Processing stack particles") t0 = time.time() for part in particles: count += 1 if count%500 == 0: sys.stderr.write(".") emanstackpartnum = part['particleNumber']-1 ### get euler jump data jumpq = appiondata.ApEulerJumpData() jumpq['particle'] = part jumpq['refineRun'] = self.recondata jumpdatas = jumpq.query(results=1) if not jumpdatas or len(jumpdatas) < 1: ### no data continue jumpdata = jumpdatas[0] if jumpdata['median'] is None or jumpdata['median'] > self.params['maxjump']: ### bad particle excludeParticle += 1 f.write("%d\t%d\t%.1f\texclude\n"%(count, emanstackpartnum, jumpdata['median'])) else: ### good particle includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%.1f\tinclude\n"%(count, emanstackpartnum, jumpdata['median'])) sys.stderr.write("\n") apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0)) f.close() includeParticle.sort() apDisplay.printMsg("Keeping "+str(len(includeParticle)) +" and excluding "+str(excludeParticle)+" particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) self.params['description'] += ( " ... %d no jumpers substack" % (numparticles,)) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params["stackid"]) oldstack = os.path.join(stackdata["path"]["path"], stackdata["name"]) newstack = os.path.join(self.params["rundir"], stackdata["name"]) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params["dropclasslist"] is not None: excludestrlist = self.params["dropclasslist"].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of classes to be included if self.params["keepclasslist"] is not None: includestrlist = self.params["keepclasslist"].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params["keepfile"] is not None: keeplistfile = open(self.params["keepfile"]) for line in keeplistfile: if self.params["excludefrom"] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: " + str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params["alignid"] is not None: # DIRECT SQL STUFF sqlcmd = ( "SELECT " + "apd.partnum, " + "apd.xshift, apd.yshift, " + "apd.rotation, apd.mirror, " + "apd.spread, apd.correlation, " + "apd.score, apd.bad, " + "spd.particleNumber, " + "ard.refnum " + "FROM ApAlignParticleData apd " + "LEFT JOIN ApStackParticleData spd ON " + "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + "LEFT JOIN ApAlignReferenceData ard ON" + "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + "WHERE `REF|ApAlignStackData|alignstack` = %i" % (self.params["alignid"]) ) # These are AlignParticles particles = sinedon.directq.complexMysqlQuery("appiondata", sqlcmd) elif self.params["clusterid"] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq["clusterstack"] = self.clusterstackdata # These are ClusteringParticles particles = clusterpartq.query() apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - q0))) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 t0 = time.time() apDisplay.printMsg("Parsing particle information") # find out if there is alignparticle info: is_cluster_p = False # alignparticle is a key of any particle in particles if the latter is # a CluateringParticle if "alignparticle" in particles[0]: is_cluster_p = True for part in particles: count += 1 if is_cluster_p: # alignpart is an item of ClusteringParticle alignpart = part["alignparticle"] try: classnum = int(part["refnum"]) - 1 except: apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"])) emanstackpartnum = alignpart["stackpart"]["particleNumber"] - 1 else: # particle has info from AlignedParticle as results of direct query alignpart = part try: classnum = int(alignpart["refnum"]) - 1 except: apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"])) classnum = None emanstackpartnum = int(alignpart["particleNumber"]) - 1 ### check shift if self.params["maxshift"] is not None: shift = math.hypot(alignpart["xshift"], alignpart["yshift"]) if shift > self.params["maxshift"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badshift += 1 continue if self.params["minscore"] is not None: ### check score if alignpart["score"] is not None and alignpart["score"] < self.params["minscore"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badscore += 1 continue ### check spread if alignpart["spread"] is not None and alignpart["spread"] < self.params["minscore"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badspread += 1 continue if classnum is not None: if includelist and (classnum in includelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) elif excludelist and not (classnum in excludelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift" % (badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score" % (badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread" % (badspread)) apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - t0))) apDisplay.printMsg( "Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles" ) ### write kept particles to file self.params["keepfile"] = os.path.join(self.params["rundir"], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params["keepfile"]) kf = open(self.params["keepfile"], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params["description"] += " ... %d particle substack with %s classes excluded" % ( numparticles, self.params["dropclasslist"], ) elif includelist: self.params["description"] += " ... %d particle substack with %s classes included" % ( numparticles, self.params["keepclasslist"], ) outavg = os.path.join(self.params["rundir"], "average.mrc") ### create the new sub stack # first check if virtual stack if not os.path.isfile(oldstack): vstackdata = apStack.getVirtualStackParticlesFromId(self.params["stackid"]) vparts = vstackdata["particles"] oldstack = vstackdata["filename"] # get subset of virtualstack vpartlist = [int(vparts[p]["particleNumber"]) - 1 for p in includeParticle] if self.params["writefile"] is True: apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params["savebad"]) apStack.averageStack(stack=oldstack, outfile=outavg, partlist=vpartlist) else: if self.params["writefile"] is True: apStack.makeNewStack(oldstack, newstack, self.params["keepfile"], bad=self.params["savebad"]) apStack.averageStack(stack=oldstack, outfile=outavg, partlist=includeParticle) if self.params["writefile"] is True and not os.path.isfile(newstack): apDisplay.printError("No stack was created") if self.params["commit"] is True: apStack.commitSubStack(self.params, included=includeParticle) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): partdict = {} partlist = [] ### get Euler angles for each particle for iternum in self.iternums: ### get recon iter data reconiterq = appiondata.ApRefineIterData() reconiterq['refineRun'] = self.reconrundata reconiterq['iteration'] = iternum reconiterdata = reconiterq.query( results=1)[0] #this should be unique ### get particle data reconpartq = appiondata.ApRefineParticleData() reconpartq['refineIter'] = reconiterdata apDisplay.printMsg("Querying for particles at " + time.asctime()) reconpartdatas = reconpartq.query() ### group particle data for partdata in reconpartdatas: partnum = partdata['particle']['particleNumber'] if not partnum in partlist: partlist.append(partnum) partdict[(partnum, iternum)] = partdata ### run through particles and check Euler angles partlist.sort() eulerdict = {} eulercount = {} reject = 0 for partnum in partlist: e1d = {} e2d = {} for iternum in self.iternums: if not (partnum, iternum) in partdict: continue partdata = partdict[(partnum, iternum)] euler1 = "%.2f" % (partdata['euler1']) if not euler1 in e1d: e1d[euler1] = 1 else: e1d[euler1] += 1 euler2 = "%.2f" % (partdata['euler2']) if not euler2 in e2d: e2d[euler2] = 1 else: e2d[euler2] += 1 #print partnum, euler1, euler2 counts = [(val, key) for key, val in e1d.items()] e1count, euler1 = max(counts) counts = [(val, key) for key, val in e2d.items()] e2count, euler2 = max(counts) # reject indeterminant particles if e2count < 2 or e1count < 2: reject += 1 continue ### group particles by their Euler angles if not (euler1, euler2) in eulerdict: eulerdict[(euler1, euler2)] = [] eulercount[(euler1, euler2)] = 0 eulerdict[(euler1, euler2)].append(partnum) eulercount[(euler1, euler2)] += 1 print "Rejected %d particles" % (reject) values = eulercount.values() values.sort() print values ### run through Euler angles and count particles counts = [(val, key) for key, val in eulercount.items()] mincount, val = min(counts) self.params['mincount'] = max(self.params['mincount'], mincount) #print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist)) print "Keeping %d of %d particles" % ( self.params['mincount'] * len(eulercount.keys()), len(partlist)) keeplist = [] for key in eulerdict.keys(): eulerpartlist = eulerdict[key] if len(partlist) < self.params['mincount']: keeplist.extend(eulerpartlist) else: keeplist.extend(eulerpartlist[:self.params['mincount']]) keeplist.sort() print "Keeping %d of %d particles" % (len(keeplist), len(partlist)) #need to set keepfile for commitSubStack self.params['keepfile'] = os.path.join(self.params['rundir'], "equalviews.lst") f = open(self.params['keepfile'], "w") for partnum in keeplist: f.write("%d\n" % (partnum - 1)) f.close() ### make a new stack using the keep particles oldstackdata = self.reconrundata['stack'] oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) newstack = os.path.join(self.params['rundir'], "start.hed") apStack.makeNewStack(oldstack, newstack, listfile=self.params['keepfile'], remove=True, bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") self.params[ 'stackid'] = oldstackdata.dbid #need to set stackid for commitSubStack apStack.commitSubStack(self.params, "start.hed") apStack.averageStack(stack=newstack)
def start(self): ### get stack data notstackdata = apStack.getOnlyStackData(self.params['notstackid']) tiltstackdata = apStack.getOnlyStackData(self.params['tiltstackid']) ### get good particle numbers includeParticle, tiltParticlesData = self.getGoodAlignParticles() self.numpart = len(includeParticle) ### make doc file of Euler angles eulerfile = self.makeEulerDoc(tiltParticlesData) ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile" + self.timestamp + ".lst") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### make new stack of tilted particle from that run tiltstackfile = os.path.join(tiltstackdata['path']['path'], tiltstackdata['name']) rctstackfile = os.path.join(self.params['rundir'], "rctstack" + self.timestamp + ".hed") apFile.removeStack(rctstackfile, warn=False) apStack.makeNewStack(tiltstackfile, rctstackfile, self.params['keepfile']) spiderstack = self.convertStackToSpider(rctstackfile) #self.mirrorParticles(tiltParticlesData, spiderstack) ### iterations over volume creation ### back project particles into filter volume volfile = os.path.join(self.params['rundir'], "volume%s-%03d.spi" % (self.timestamp, 0)) backproject.backprojectCG(spiderstack, eulerfile, volfile, numpart=self.numpart, pixrad=self.params['radius']) alignstack = spiderstack ### center/convert the volume file mrcvolfile = self.processVolume(volfile, 0) for i in range(self.params['numiters']): looptime = time.time() iternum = i + 1 apDisplay.printMsg("running backprojection iteration " + str(iternum)) ### xy-shift particles to volume projections alignstack = backproject.rctParticleShift( volfile, alignstack, eulerfile, iternum, numpart=self.numpart, pixrad=self.params['radius'], timestamp=self.timestamp) apFile.removeFile(volfile) ### back project particles into better volume volfile = os.path.join( self.params['rundir'], "volume%s-%03d.spi" % (self.timestamp, iternum)) backproject.backproject3F(alignstack, eulerfile, volfile, numpart=self.numpart) ### center/convert the volume file mrcvolfile = self.processVolume(volfile, iternum) apDisplay.printColor( "finished volume refinement loop in " + apDisplay.timeString(time.time() - looptime), "cyan") ### optimize Euler angles #NOT IMPLEMENTED YET ### perform eotest if self.params['eotest'] is True: self.runEoTest(alignstack, eulerfile) self.runRmeasure() ### insert volumes into DB self.insertRctRun(mrcvolfile)
def start(self): ### new stack path oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) newstack = os.path.join(self.params['rundir'], self.stackdata['name']) apStack.checkForPreviousStack(newstack) ### get particles from stack apDisplay.printMsg("Querying stack particles") t0 = time.time() stackpartq = appiondata.ApRefineParticleData() stackpartq['refineIter'] = self.iterdata particles = stackpartq.query() apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 f = open("test.log", "w") count = 0 apDisplay.printMsg("Processing stack particles") t0 = time.time() for part in particles: count += 1 if count % 500 == 0: sys.stderr.write(".") emanstackpartnum = part['particle']['particleNumber'] - 1 if part['postRefine_keep'] == 1: ### good particle includeParticle.append(emanstackpartnum) f.write("%d\t%d\tinclude\n" % (count, emanstackpartnum)) else: ### bad particle excludeParticle += 1 f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) sys.stderr.write("\n") apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) f.close() includeParticle.sort() apDisplay.printMsg("Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles") ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) self.params['description'] += (" ... %d no jumpers substack" % (numparticles, )) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)