def start(self): #new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) #make sure that old stack is numbered apEMAN.checkStackNumbering(oldstack) alignedstack = os.path.join(self.params['rundir'], 'ali.img') badstack = os.path.join(self.params['rundir'], 'bad.img') apStack.checkForPreviousStack(alignedstack) #run centering algorithm apStack.centerParticles(oldstack, self.params['mask'], self.params['maxshift']) self.params['keepfile'] = os.path.join(self.params['rundir'],'keepfile.txt') apEMAN.writeStackParticlesToFile(alignedstack, self.params['keepfile']) if not os.path.isfile(alignedstack, ): apDisplay.printError("No stack was created") #get number of particles f = open(self.params['keepfile'], "r") numparticles = len(f.readlines()) f.close() self.params['description'] += ( (" ... %d eman centered substack id %d" % (numparticles, self.params['stackid'])) ) apStack.commitSubStack(self.params, newname='ali.hed', centered=True) apStack.averageStack(stack=alignedstack) if (os.path.exists(badstack)): apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) ### checks centerstack = os.path.join(self.params['rundir'], 'align.img') badstack = os.path.join(self.params['rundir'], 'bad.img') apStack.checkForPreviousStack(centerstack) ### run centering algorithm keeplist = self.centerParticles(oldstack, centerstack, badstack) if not os.path.isfile(centerstack): apDisplay.printError("No stack was created") self.params['keepfile'] = os.path.join(self.params['rundir'], 'keepfile.txt') ### get number of particles self.params['description'] += ( (" ... %d eman centered substack id %d" % (numparticles, self.params['stackid']))) apStack.commitSubStack(self.params, newname=os.path.basename(centerstack), centered=True) apStack.averageStack(stack=centerstack) if os.path.isfile(badstack): apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) ### make sure that old stack is numbered alignedstack = os.path.join(self.params['rundir'], 'alignstack.hed') apStack.checkForPreviousStack(alignedstack) ### run centering algorithm self.runMaxlike() ### create aligned stacks partlist = self.readPartDocFile() stackfile = self.createAlignedStacks(partlist) if not os.path.isfile(alignedstack): apDisplay.printError("No stack was created") ### get number of particles numpart = apStack.getNumberStackParticlesFromId(self.params['stackid']) self.writeFakeKeepFile(numpart) self.params['description'] += ( " ... %d maxlike centered substack id %d" % (numpart, self.params['stackid'])) apStack.commitSubStack(self.params, newname='alignstack.hed', centered=True) apStack.averageStack(stack=alignedstack)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) ### checks centerstack = os.path.join(self.params['rundir'], 'align.img') badstack = os.path.join(self.params['rundir'], 'bad.img') apStack.checkForPreviousStack(centerstack) ### run centering algorithm keeplist = self.centerParticles(oldstack, centerstack, badstack) if not os.path.isfile(centerstack): apDisplay.printError("No stack was created") self.params['keepfile'] = os.path.join(self.params['rundir'], 'keepfile.txt') ### get number of particles self.params['description'] += ( (" ... %d eman centered substack id %d" % (numparticles, self.params['stackid'])) ) apStack.commitSubStack(self.params, newname=os.path.basename(centerstack), centered=True) apStack.averageStack(stack=centerstack) if os.path.isfile(badstack): apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
def start(self): #find stack stackparticles = apStack.getStackParticlesFromId( self.params['stackid']) if self.params['logsplit']: #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions']) stacklist = evenLogSplit(self.params['logstart'], len(stackparticles)) elif self.params['nptcls']: stacklist = [self.params['nptcls']] else: apDisplay.printError("Please specify nptlcs or logsplit") oldstackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) #create run directory if self.params['rundir'] is None: path = oldstackdata['path']['path'] path = os.path.split(os.path.abspath(path))[0] self.params['rundir'] = path apDisplay.printMsg("Out directory: " + self.params['rundir']) origdescription = self.params['description'] for stack in stacklist: self.params['description'] = ( origdescription + (" ... split %d particles from original stackid=%d" % (stack, self.params['stackid']))) workingdir = os.path.join(self.params['rundir'], str(stack)) #check for previously commited stacks newstack = os.path.join(workingdir, self.params['stackname']) apStack.checkForPreviousStack(newstack) #create rundir and change to that directory apDisplay.printMsg("Run directory: " + workingdir) apParam.createDirectory(workingdir) os.chdir(workingdir) #create random list lstfile = makeRandomLst(stack, stackparticles, self.params) #shutil.copy(lstfile, workingdir) #make new stack apStack.makeNewStack(oldstack, newstack, lstfile) #apStack.makeNewStack(lstfile, self.params['stackname']) #commit new stack self.params['keepfile'] = os.path.abspath(lstfile) self.params['rundir'] = os.path.abspath(workingdir) apStack.commitSubStack(self.params)
def start(self): #find stack stackparticles = apStack.getStackParticlesFromId(self.params['stackid']) if self.params['logsplit']: #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions']) stacklist = evenLogSplit(self.params['logstart'], len(stackparticles)) elif self.params['nptcls']: stacklist = [self.params['nptcls']] else: apDisplay.printError("Please specify nptlcs or logsplit") oldstackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name']) #create run directory if self.params['rundir'] is None: path = oldstackdata['path']['path'] path = os.path.split(os.path.abspath(path))[0] self.params['rundir'] = path apDisplay.printMsg("Out directory: "+self.params['rundir']) origdescription=self.params['description'] for stack in stacklist: self.params['description'] = ( origdescription+ (" ... split %d particles from original stackid=%d" % (stack, self.params['stackid'])) ) workingdir = os.path.join(self.params['rundir'], str(stack)) #check for previously commited stacks newstack = os.path.join(workingdir ,self.params['stackname']) apStack.checkForPreviousStack(newstack) #create rundir and change to that directory apDisplay.printMsg("Run directory: "+workingdir) apParam.createDirectory(workingdir) os.chdir(workingdir) #create random list lstfile = makeRandomLst(stack, stackparticles, self.params) #shutil.copy(lstfile, workingdir) #make new stack apStack.makeNewStack(oldstack, newstack, lstfile) #apStack.makeNewStack(lstfile, self.params['stackname']) #commit new stack self.params['keepfile'] = os.path.abspath(lstfile) self.params['rundir'] = os.path.abspath(workingdir) apStack.commitSubStack(self.params)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: " + str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = self.alignstackdata particles = alignpartq.query() elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata particles = clusterpartq.query() apDisplay.printMsg("Complete in " + apDisplay.timeString(time.time() - q0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 for part in particles: count += 1 #partnum = part['partnum']-1 if 'alignparticle' in part: alignpart = part['alignparticle'] classnum = int(part['refnum']) - 1 else: alignpart = part classnum = int(part['ref']['refnum']) - 1 emanstackpartnum = alignpart['stackpart']['particleNumber'] - 1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if (alignpart['score'] is not None and alignpart['score'] < self.params['minscore']): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badscore += 1 continue ### check spread if (alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore']): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) badspread += 1 continue if includelist and classnum in includelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) elif excludelist and not classnum in excludelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift" % (badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score" % (badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread" % (badspread)) apDisplay.printMsg("Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): #old stack size stacksize = apStack.getNumberStackParticlesFromId(self.params['stackid']) # if exclude or include lists are not defined... if self.params['exclude'] is None and self.params['include'] is None: # if first and last are specified, create a file if self.params['first'] is not None and self.params['last'] is not None: stp = str(self.params['first']) enp = str(self.params['last']) fname = 'sub'+str(self.params['stackid'])+'_'+stp+'-'+enp+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) f=open(self.params['keepfile'],'w') for i in range(self.params['first'],self.params['last']+1): f.write('%d\n' % (int(i)-1)) f.close() # generate the random list by giving number and create the file elif self.params['random'] is not None: #numOfRandomParticles = str(self.params['random']) #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst' fname = "random%d_%d.lst"%(self.params['stackid'], self.params['random']) self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) # create a file f=open(self.params['keepfile'],'w') # generate a random sequence by giving size randomList = random.sample(xrange(self.params['last']), self.params['random']) randomList.sort() for partnum in randomList: f.write('%d\n' % partnum) f.close() # if splitting, create files containing the split values elif self.params['split'] > 1: for i in range(self.params['split']): fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) apDisplay.printMsg("Creating keep list: "+self.params['keepfile']) f = open(self.params['keepfile'],'w') for p in range(stacksize): if (p % self.params['split'])-i==0: f.write('%i\n' % p) f.close() # if exclude-from option is specified, convert particles to exclude elif self.params['excludefile'] is True: oldkf = open(self.params['keepfile']) partlist = [] for line in oldkf: particle=line.strip() try: particle = int(particle) except: continue partlist.append(particle) oldkf.close() # create new list excluding the particles apDisplay.printMsg("Converting keep file to exclude file") newkeepfile = "tmpnewkeepfile.txt" newkf = open(newkeepfile,'w') for p in range(stacksize): if p not in partlist: newkf.write("%i\n"%p) newkf.close() self.params['keepfile'] = os.path.abspath(newkeepfile) # otherwise, just copy the file elif not os.path.isfile(os.path.basename(self.params['keepfile'])): shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile'])) # if either exclude or include lists is defined elif self.params['exclude'] or self.params['include']: ### list of particles to be excluded excludelist = [] if self.params['exclude'] is not None: excludestrlist = self.params['exclude'].split(",") for excld in excludestrlist: excludelist.append(int(excld.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of particles to be included includelist = [] if self.params['include'] is not None: includestrlist = self.params['include'].split(",") for incld in includestrlist: includelist.append(int(incld.strip())) apDisplay.printMsg("Include list: "+str(includelist)) #new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) #if include or exclude list is given... if self.params['include'] is not None or self.params['exclude'] is not None: includeParticle = [] excludeParticle = 0 for partnum in range(stacksize): if includelist and partnum in includelist: includeParticle.append(partnum) elif excludelist and not partnum in excludelist: includeParticle.append(partnum) else: excludeParticle += 1 includeParticle.sort() ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() #get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) elif includelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ogdescr = self.params['description'] for i in range(self.params['split']): ### always do this, if not splitting split=1 sb = os.path.splitext(stackdata['name']) if self.params['first'] is not None and self.params['last'] is not None: newname = sb[0]+'.'+str(self.params['first'])+'-'+str(self.params['last'])+sb[-1] elif self.params['random'] is not None: newname = "%s-random%d%s"%(sb[0], self.params['random'], sb[-1]) elif self.params['split'] > 1: fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst' self.params['keepfile'] = os.path.join(self.params['rundir'],fname) newname = sb[0]+'.'+str(i+1)+'of'+str(self.params['split'])+sb[-1] newstack = os.path.join(self.params['rundir'], newname) apStack.checkForPreviousStack(newstack) #get number of particles f = open(self.params['keepfile'], "r") numparticles = len(f.readlines()) f.close() self.params['description'] = ogdescr self.params['description'] += ( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ) #if splitting, add to description if self.params['split'] > 1: self.params['description'] += (" (%i of %i)" % (i+1, self.params['split'])) #create the new sub stack if not self.params['correctbeamtilt']: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) else: apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, sorted=False) apStack.averageStack(stack=newstack) newstackid = apStack.getStackIdFromPath(newstack) if self.params['meanplot'] is True: apDisplay.printMsg("creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): ### new stack path oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) newstack = os.path.join(self.params['rundir'], self.stackdata['name']) apStack.checkForPreviousStack(newstack) ### get particles from stack apDisplay.printMsg("Querying stack particles") t0 = time.time() stackpartq = appiondata.ApRefineParticleData() stackpartq['refineIter'] = self.iterdata particles = stackpartq.query() apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 f = open("test.log", "w") count = 0 apDisplay.printMsg("Processing stack particles") t0 = time.time() for part in particles: count += 1 if count % 500 == 0: sys.stderr.write(".") emanstackpartnum = part['particle']['particleNumber'] - 1 if part['postRefine_keep'] == 1: ### good particle includeParticle.append(emanstackpartnum) f.write("%d\t%d\tinclude\n" % (count, emanstackpartnum)) else: ### bad particle excludeParticle += 1 f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) sys.stderr.write("\n") apDisplay.printMsg("Finished in " + apDisplay.timeString(time.time() - t0)) f.close() includeParticle.sort() apDisplay.printMsg("Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles") ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) self.params['description'] += (" ... %d no jumpers substack" % (numparticles, )) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
def start(self): ### new stack path oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name']) newstack = os.path.join(self.params['rundir'], self.stackdata['name']) apStack.checkForPreviousStack(newstack) ### get particles from stack apDisplay.printMsg("Querying stack particles") t0 = time.time() stackpartq = appiondata.ApStackParticleData() stackpartq['stack'] = self.stackdata particles = stackpartq.query() apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 f = open("test.log", "w") count = 0 apDisplay.printMsg("Processing stack particles") t0 = time.time() for part in particles: count += 1 if count%500 == 0: sys.stderr.write(".") emanstackpartnum = part['particleNumber']-1 ### get euler jump data jumpq = appiondata.ApEulerJumpData() jumpq['particle'] = part jumpq['refineRun'] = self.recondata jumpdatas = jumpq.query(results=1) if not jumpdatas or len(jumpdatas) < 1: ### no data continue jumpdata = jumpdatas[0] if jumpdata['median'] is None or jumpdata['median'] > self.params['maxjump']: ### bad particle excludeParticle += 1 f.write("%d\t%d\t%.1f\texclude\n"%(count, emanstackpartnum, jumpdata['median'])) else: ### good particle includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%.1f\tinclude\n"%(count, emanstackpartnum, jumpdata['median'])) sys.stderr.write("\n") apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0)) f.close() includeParticle.sort() apDisplay.printMsg("Keeping "+str(len(includeParticle)) +" and excluding "+str(excludeParticle)+" particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) self.params['description'] += ( " ... %d no jumpers substack" % (numparticles,)) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
def start(self): #old stack size stacksize = apStack.getNumberStackParticlesFromId( self.params['stackid']) # if exclude or include lists are not defined... if self.params['exclude'] is None and self.params['include'] is None: # if first and last are specified, create a file if self.params['first'] is not None and self.params[ 'last'] is not None: stp = str(self.params['first']) enp = str(self.params['last']) fname = 'sub' + str( self.params['stackid']) + '_' + stp + '-' + enp + '.lst' self.params['keepfile'] = os.path.join(self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) f = open(self.params['keepfile'], 'w') for i in range(self.params['first'], self.params['last'] + 1): f.write('%d\n' % (int(i) - 1)) f.close() # generate the random list by giving number and create the file elif self.params['random'] is not None: #numOfRandomParticles = str(self.params['random']) #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst' fname = "random%d_%d.lst" % (self.params['stackid'], self.params['random']) self.params['keepfile'] = os.path.join(self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) # create a file f = open(self.params['keepfile'], 'w') # generate a random sequence by giving size randomList = random.sample(xrange(self.params['last']), self.params['random']) randomList.sort() for partnum in randomList: f.write('%d\n' % partnum) f.close() # if splitting, create files containing the split values elif self.params['split'] > 1: for i in range(self.params['split']): fname = 'sub' + str( self.params['stackid']) + '.' + str(i + 1) + '.lst' self.params['keepfile'] = os.path.join( self.params['rundir'], fname) apDisplay.printMsg("Creating keep list: " + self.params['keepfile']) f = open(self.params['keepfile'], 'w') for p in range(stacksize): if (p % self.params['split']) - i == 0: f.write('%i\n' % p) f.close() # if exclude-from option is specified, convert particles to exclude elif self.params['excludefile'] is True: oldkf = open(self.params['keepfile']) partlist = [] for line in oldkf: particle = line.strip() try: particle = int(particle) except: continue partlist.append(particle) oldkf.close() # create new list excluding the particles apDisplay.printMsg("Converting keep file to exclude file") newkeepfile = "tmpnewkeepfile.txt" newkf = open(newkeepfile, 'w') for p in range(stacksize): if p not in partlist: newkf.write("%i\n" % p) newkf.close() self.params['keepfile'] = os.path.abspath(newkeepfile) # otherwise, just copy the file elif not os.path.isfile(os.path.basename(self.params['keepfile'])): shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile'])) # if either exclude or include lists is defined elif self.params['exclude'] or self.params['include']: ### list of particles to be excluded excludelist = [] if self.params['exclude'] is not None: excludestrlist = self.params['exclude'].split(",") for excld in excludestrlist: excludelist.append(int(excld.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of particles to be included includelist = [] if self.params['include'] is not None: includestrlist = self.params['include'].split(",") for incld in includestrlist: includelist.append(int(incld.strip())) apDisplay.printMsg("Include list: " + str(includelist)) #new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) newname = stackdata['name'] oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) #if include or exclude list is given... if self.params['include'] is not None or self.params[ 'exclude'] is not None: includeParticle = [] excludeParticle = 0 for partnum in range(stacksize): if includelist and partnum in includelist: includeParticle.append(partnum) elif excludelist and not partnum in excludelist: includeParticle.append(partnum) else: excludeParticle += 1 includeParticle.sort() ### write kept particles to file self.params['keepfile'] = os.path.join( self.params['rundir'], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() #get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) elif includelist: self.params['description'] += ( " ... %d particle substack of stackid %d" % (numparticles, self.params['stackid'])) ogdescr = self.params['description'] for i in range(self.params['split']): ### always do this, if not splitting split=1 sb = os.path.splitext(stackdata['name']) if self.params['first'] is not None and self.params[ 'last'] is not None: newname = sb[0] + '.' + str(self.params['first']) + '-' + str( self.params['last']) + sb[-1] elif self.params['random'] is not None: newname = "%s-random%d%s" % (sb[0], self.params['random'], sb[-1]) elif self.params['split'] > 1: fname = 'sub' + str( self.params['stackid']) + '.' + str(i + 1) + '.lst' self.params['keepfile'] = os.path.join(self.params['rundir'], fname) newname = sb[0] + '.' + str(i + 1) + 'of' + str( self.params['split']) + sb[-1] newstack = os.path.join(self.params['rundir'], newname) apStack.checkForPreviousStack(newstack) #get number of particles f = open(self.params['keepfile'], "r") numparticles = len(f.readlines()) f.close() self.params['description'] = ogdescr self.params['description'] += ( (" ... %d particle substack of stackid %d" % (numparticles, self.params['stackid']))) #if splitting, add to description if self.params['split'] > 1: self.params['description'] += (" (%i of %i)" % (i + 1, self.params['split'])) #create the new sub stack if not self.params['correctbeamtilt']: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True) else: apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.commitSubStack(self.params, newname, sorted=False) apStack.averageStack(stack=newstack) newstackid = apStack.getStackIdFromPath(newstack) if self.params['meanplot'] is True: apDisplay.printMsg( "creating Stack Mean Plot montage for stackid") apStackMeanPlot.makeStackMeanPlot(newstackid)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params["stackid"]) oldstack = os.path.join(stackdata["path"]["path"], stackdata["name"]) newstack = os.path.join(self.params["rundir"], stackdata["name"]) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params["dropclasslist"] is not None: excludestrlist = self.params["dropclasslist"].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: " + str(excludelist)) ### list of classes to be included if self.params["keepclasslist"] is not None: includestrlist = self.params["keepclasslist"].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params["keepfile"] is not None: keeplistfile = open(self.params["keepfile"]) for line in keeplistfile: if self.params["excludefrom"] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: " + str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params["alignid"] is not None: # DIRECT SQL STUFF sqlcmd = ( "SELECT " + "apd.partnum, " + "apd.xshift, apd.yshift, " + "apd.rotation, apd.mirror, " + "apd.spread, apd.correlation, " + "apd.score, apd.bad, " + "spd.particleNumber, " + "ard.refnum " + "FROM ApAlignParticleData apd " + "LEFT JOIN ApStackParticleData spd ON " + "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + "LEFT JOIN ApAlignReferenceData ard ON" + "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + "WHERE `REF|ApAlignStackData|alignstack` = %i" % (self.params["alignid"]) ) # These are AlignParticles particles = sinedon.directq.complexMysqlQuery("appiondata", sqlcmd) elif self.params["clusterid"] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq["clusterstack"] = self.clusterstackdata # These are ClusteringParticles particles = clusterpartq.query() apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - q0))) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 t0 = time.time() apDisplay.printMsg("Parsing particle information") # find out if there is alignparticle info: is_cluster_p = False # alignparticle is a key of any particle in particles if the latter is # a CluateringParticle if "alignparticle" in particles[0]: is_cluster_p = True for part in particles: count += 1 if is_cluster_p: # alignpart is an item of ClusteringParticle alignpart = part["alignparticle"] try: classnum = int(part["refnum"]) - 1 except: apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"])) emanstackpartnum = alignpart["stackpart"]["particleNumber"] - 1 else: # particle has info from AlignedParticle as results of direct query alignpart = part try: classnum = int(alignpart["refnum"]) - 1 except: apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"])) classnum = None emanstackpartnum = int(alignpart["particleNumber"]) - 1 ### check shift if self.params["maxshift"] is not None: shift = math.hypot(alignpart["xshift"], alignpart["yshift"]) if shift > self.params["maxshift"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badshift += 1 continue if self.params["minscore"] is not None: ### check score if alignpart["score"] is not None and alignpart["score"] < self.params["minscore"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badscore += 1 continue ### check spread if alignpart["spread"] is not None and alignpart["spread"] < self.params["minscore"]: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) badspread += 1 continue if classnum is not None: if includelist and (classnum in includelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) elif excludelist and not (classnum in excludelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\texclude\n" % (count, emanstackpartnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift" % (badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score" % (badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread" % (badspread)) apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - t0))) apDisplay.printMsg( "Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles" ) ### write kept particles to file self.params["keepfile"] = os.path.join(self.params["rundir"], "keepfile-" + self.timestamp + ".list") apDisplay.printMsg("writing to keepfile " + self.params["keepfile"]) kf = open(self.params["keepfile"], "w") for partnum in includeParticle: kf.write(str(partnum) + "\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params["description"] += " ... %d particle substack with %s classes excluded" % ( numparticles, self.params["dropclasslist"], ) elif includelist: self.params["description"] += " ... %d particle substack with %s classes included" % ( numparticles, self.params["keepclasslist"], ) outavg = os.path.join(self.params["rundir"], "average.mrc") ### create the new sub stack # first check if virtual stack if not os.path.isfile(oldstack): vstackdata = apStack.getVirtualStackParticlesFromId(self.params["stackid"]) vparts = vstackdata["particles"] oldstack = vstackdata["filename"] # get subset of virtualstack vpartlist = [int(vparts[p]["particleNumber"]) - 1 for p in includeParticle] if self.params["writefile"] is True: apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params["savebad"]) apStack.averageStack(stack=oldstack, outfile=outavg, partlist=vpartlist) else: if self.params["writefile"] is True: apStack.makeNewStack(oldstack, newstack, self.params["keepfile"], bad=self.params["savebad"]) apStack.averageStack(stack=oldstack, outfile=outavg, partlist=includeParticle) if self.params["writefile"] is True and not os.path.isfile(newstack): apDisplay.printError("No stack was created") if self.params["commit"] is True: apStack.commitSubStack(self.params, included=includeParticle) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: "+str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: # DIRECT SQL STUFF sqlcmd = "SELECT " + \ "apd.partnum, " + \ "apd.xshift, apd.yshift, " + \ "apd.rotation, apd.mirror, " + \ "apd.spread, apd.correlation, " + \ "apd.score, apd.bad, " + \ "spd.particleNumber, " + \ "ard.refnum "+ \ "FROM ApAlignParticleData apd " + \ "LEFT JOIN ApStackParticleData spd ON " + \ "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \ "LEFT JOIN ApAlignReferenceData ard ON" + \ "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \ "WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid']) # These are AlignParticles particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd) elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata # These are ClusteringParticles particles = clusterpartq.query() apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0))) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 t0 = time.time() apDisplay.printMsg("Parsing particle information") # find out if there is alignparticle info: is_cluster_p = False # alignparticle is a key of any particle in particles if the latter is # a CluateringParticle if 'alignparticle' in particles[0]: is_cluster_p = True for part in particles: count += 1 if is_cluster_p: # alignpart is an item of ClusteringParticle alignpart = part['alignparticle'] try: classnum = int(part['refnum'])-1 except: apDisplay.printWarning("particle %d was not put into any class" % (part['partnum'])) emanstackpartnum = alignpart['stackpart']['particleNumber']-1 else: # particle has info from AlignedParticle as results of direct query alignpart = part try: classnum = int(alignpart['refnum'])-1 except: apDisplay.printWarning("particle %d was not put into any class" % (part['partnum'])) classnum = None emanstackpartnum = int(alignpart['particleNumber'])-1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if ( alignpart['score'] is not None and alignpart['score'] < self.params['minscore'] ): excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badscore += 1 continue ### check spread if ( alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore'] ): excludeParticle += 1 if classnum is not None: f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) badspread += 1 continue if classnum is not None: if includelist and (classnum in includelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) elif excludelist and not (classnum in excludelist): includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\texclude\n"%(count, emanstackpartnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift"%(badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score"%(badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread"%(badspread)) apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0))) apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles") ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) outavg = os.path.join(self.params['rundir'],"average.mrc") ### create the new sub stack # first check if virtual stack if not os.path.isfile(oldstack): vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid']) vparts = vstackdata['particles'] oldstack = vstackdata['filename'] # get subset of virtualstack vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle] if self.params['writefile'] is True: apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad']) apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist) else: if self.params['writefile'] is True: apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle) if self.params['writefile'] is True and not os.path.isfile(newstack): apDisplay.printError("No stack was created") if self.params['commit'] is True: apStack.commitSubStack(self.params,included=includeParticle) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
def start(self): ### new stack path stackdata = apStack.getOnlyStackData(self.params['stackid']) oldstack = os.path.join(stackdata['path']['path'], stackdata['name']) newstack = os.path.join(self.params['rundir'], stackdata['name']) apStack.checkForPreviousStack(newstack) includelist = [] excludelist = [] ### list of classes to be excluded if self.params['dropclasslist'] is not None: excludestrlist = self.params['dropclasslist'].split(",") for excludeitem in excludestrlist: excludelist.append(int(excludeitem.strip())) apDisplay.printMsg("Exclude list: "+str(excludelist)) ### list of classes to be included if self.params['keepclasslist'] is not None: includestrlist = self.params['keepclasslist'].split(",") for includeitem in includestrlist: includelist.append(int(includeitem.strip())) ### or read from keepfile elif self.params['keepfile'] is not None: keeplistfile = open(self.params['keepfile']) for line in keeplistfile: if self.params['excludefrom'] is True: excludelist.append(int(line.strip())) else: includelist.append(int(line.strip())) keeplistfile.close() apDisplay.printMsg("Include list: "+str(includelist)) ### get particles from align or cluster stack apDisplay.printMsg("Querying database for particles") q0 = time.time() if self.params['alignid'] is not None: alignpartq = appiondata.ApAlignParticleData() alignpartq['alignstack'] = self.alignstackdata particles = alignpartq.query() elif self.params['clusterid'] is not None: clusterpartq = appiondata.ApClusteringParticleData() clusterpartq['clusterstack'] = self.clusterstackdata particles = clusterpartq.query() apDisplay.printMsg("Complete in "+apDisplay.timeString(time.time()-q0)) ### write included particles to text file includeParticle = [] excludeParticle = 0 badscore = 0 badshift = 0 badspread = 0 f = open("test.log", "w") count = 0 for part in particles: count += 1 #partnum = part['partnum']-1 if 'alignparticle' in part: alignpart = part['alignparticle'] classnum = int(part['refnum'])-1 else: alignpart = part classnum = int(part['ref']['refnum'])-1 emanstackpartnum = alignpart['stackpart']['particleNumber']-1 ### check shift if self.params['maxshift'] is not None: shift = math.hypot(alignpart['xshift'], alignpart['yshift']) if shift > self.params['maxshift']: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badshift += 1 continue if self.params['minscore'] is not None: ### check score if ( alignpart['score'] is not None and alignpart['score'] < self.params['minscore'] ): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badscore += 1 continue ### check spread if ( alignpart['spread'] is not None and alignpart['spread'] < self.params['minscore'] ): excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) badspread += 1 continue if includelist and classnum in includelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) elif excludelist and not classnum in excludelist: includeParticle.append(emanstackpartnum) f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum)) else: excludeParticle += 1 f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum)) f.close() includeParticle.sort() if badshift > 0: apDisplay.printMsg("%d paricles had a large shift"%(badshift)) if badscore > 0: apDisplay.printMsg("%d paricles had a low score"%(badscore)) if badspread > 0: apDisplay.printMsg("%d paricles had a low spread"%(badspread)) apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles") #print includeParticle ### write kept particles to file self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list") apDisplay.printMsg("writing to keepfile "+self.params['keepfile']) kf = open(self.params['keepfile'], "w") for partnum in includeParticle: kf.write(str(partnum)+"\n") kf.close() ### get number of particles numparticles = len(includeParticle) if excludelist: self.params['description'] += ( " ... %d particle substack with %s classes excluded" % (numparticles, self.params['dropclasslist'])) elif includelist: self.params['description'] += ( " ... %d particle substack with %s classes included" % (numparticles, self.params['keepclasslist'])) ### create the new sub stack apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad']) if not os.path.isfile(newstack): apDisplay.printError("No stack was created") apStack.averageStack(stack=newstack) if self.params['commit'] is True: apStack.commitSubStack(self.params) newstackid = apStack.getStackIdFromPath(newstack) apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)