def start(self):
		knownstackdata = apStack.getOnlyStackData(self.params['knownstackid'])
		fullstackdata = apStack.getOnlyStackData(self.params['fullstackid'])

		### get good particle numbers
		includeParticle, tiltParticlesData = self.getGoodParticles()
		self.numpart = len(includeParticle)

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile"+self.timestamp+".lst")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### make new stack of tilted particle from that run
		fullstackfile = os.path.join(fullstackdata['path']['path'], fullstackdata['name'])
		sb = os.path.splitext(fullstackdata['name'])
		newname = "tiltpairsub%d" % self.params['knownstackid']+sb[-1]
		newstackfile = os.path.join(self.params['rundir'], newname)
		apFile.removeStack(newstackfile, warn=False)
		apStack.makeNewStack(fullstackfile, newstackfile, self.params['keepfile'])
		if not os.path.isfile(newstackfile):
			apDisplay.printError("No stack was created")
		self.params['stackid'] = self.params['fullstackid']
		apStack.commitSubStack(self.params, newname, sorted=False)
		apStack.averageStack(stack=newstackfile)
		newstackid = apStack.getStackIdFromPath(newstackfile)
		if self.params['meanplot'] is True:
			apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
			apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		### make sure that old stack is numbered
		alignedstack = os.path.join(self.params['rundir'], 'alignstack.hed')
		apStack.checkForPreviousStack(alignedstack)

		### run centering algorithm
		self.runMaxlike()

		### create aligned stacks
		partlist = self.readPartDocFile()
		stackfile = self.createAlignedStacks(partlist)
		if not os.path.isfile(alignedstack):
			apDisplay.printError("No stack was created")

		### get number of particles
		numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
		self.writeFakeKeepFile(numpart)
		self.params['description'] += (
			" ... %d maxlike centered substack id %d" 
			% (numpart, self.params['stackid']))
		
		apStack.commitSubStack(self.params, newname='alignstack.hed', centered=True)
		apStack.averageStack(stack=alignedstack)
        def start(self):
                #new stack path
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

                #make sure that old stack is numbered
                apEMAN.checkStackNumbering(oldstack)

                alignedstack = os.path.join(self.params['rundir'], 'ali.img')
                badstack = os.path.join(self.params['rundir'], 'bad.img')
                apStack.checkForPreviousStack(alignedstack)

                #run centering algorithm
                apStack.centerParticles(oldstack, self.params['mask'], self.params['maxshift'])
                self.params['keepfile'] = os.path.join(self.params['rundir'],'keepfile.txt')
                apEMAN.writeStackParticlesToFile(alignedstack, self.params['keepfile'])
                if not os.path.isfile(alignedstack, ):
                        apDisplay.printError("No stack was created")

                #get number of particles
                f = open(self.params['keepfile'], "r")
                numparticles = len(f.readlines())
                f.close()
                self.params['description'] += (
                        (" ... %d eman centered substack id %d" 
                        % (numparticles, self.params['stackid']))
                )
                
                apStack.commitSubStack(self.params, newname='ali.hed', centered=True)
                apStack.averageStack(stack=alignedstack)
                if (os.path.exists(badstack)):
                        apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        ### checks
        centerstack = os.path.join(self.params['rundir'], 'align.img')
        badstack = os.path.join(self.params['rundir'], 'bad.img')
        apStack.checkForPreviousStack(centerstack)

        ### run centering algorithm
        keeplist = self.centerParticles(oldstack, centerstack, badstack)
        if not os.path.isfile(centerstack):
            apDisplay.printError("No stack was created")

        self.params['keepfile'] = os.path.join(self.params['rundir'],
                                               'keepfile.txt')

        ### get number of particles
        self.params['description'] += (
            (" ... %d eman centered substack id %d" %
             (numparticles, self.params['stackid'])))

        apStack.commitSubStack(self.params,
                               newname=os.path.basename(centerstack),
                               centered=True)
        apStack.averageStack(stack=centerstack)
        if os.path.isfile(badstack):
            apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
示例#5
0
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        ### make sure that old stack is numbered
        alignedstack = os.path.join(self.params['rundir'], 'alignstack.hed')
        apStack.checkForPreviousStack(alignedstack)

        ### run centering algorithm
        self.runMaxlike()

        ### create aligned stacks
        partlist = self.readPartDocFile()
        stackfile = self.createAlignedStacks(partlist)
        if not os.path.isfile(alignedstack):
            apDisplay.printError("No stack was created")

        ### get number of particles
        numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
        self.writeFakeKeepFile(numpart)
        self.params['description'] += (
            " ... %d maxlike centered substack id %d" %
            (numpart, self.params['stackid']))

        apStack.commitSubStack(self.params,
                               newname='alignstack.hed',
                               centered=True)
        apStack.averageStack(stack=alignedstack)
	def start(self):
		#new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		#make sure that old stack is numbered
		apEMAN.checkStackNumbering(oldstack)

		alignedstack = os.path.join(self.params['rundir'], 'ali.img')
		badstack = os.path.join(self.params['rundir'], 'bad.img')
		apStack.checkForPreviousStack(alignedstack)

		#run centering algorithm
		apStack.centerParticles(oldstack, self.params['mask'], self.params['maxshift'])
		self.params['keepfile'] = os.path.join(self.params['rundir'],'keepfile.txt')
		apEMAN.writeStackParticlesToFile(alignedstack, self.params['keepfile'])
		if not os.path.isfile(alignedstack, ):
			apDisplay.printError("No stack was created")

		#get number of particles
		f = open(self.params['keepfile'], "r")
		numparticles = len(f.readlines())
		f.close()
		self.params['description'] += (
			(" ... %d eman centered substack id %d" 
			% (numparticles, self.params['stackid']))
		)
		
		apStack.commitSubStack(self.params, newname='ali.hed', centered=True)
		apStack.averageStack(stack=alignedstack)
		if (os.path.exists(badstack)):
			apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		### checks
		centerstack = os.path.join(self.params['rundir'], 'align.img')
		badstack = os.path.join(self.params['rundir'], 'bad.img')
		apStack.checkForPreviousStack(centerstack)

		### run centering algorithm
		keeplist = self.centerParticles(oldstack, centerstack, badstack)
		if not os.path.isfile(centerstack):
			apDisplay.printError("No stack was created")

		self.params['keepfile'] = os.path.join(self.params['rundir'], 'keepfile.txt')

		### get number of particles
		self.params['description'] += (
			(" ... %d eman centered substack id %d" 
			% (numparticles, self.params['stackid']))
		)
		
		apStack.commitSubStack(self.params, newname=os.path.basename(centerstack), centered=True)
		apStack.averageStack(stack=centerstack)
		if os.path.isfile(badstack):
			apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
示例#8
0
    def start(self):

        self.stackdata = apStack.getOnlyStackData(self.params['stackid'],
                                                  msg=False)

        # creating a keepfile, fixed filename
        self.params['keepfile'] = os.path.join(self.params['newstackpath'],
                                               "keepfile.lst")

        #path to the old stack
        oldstack = os.path.join(self.stackdata['path']['path'],
                                self.stackdata['name'])

        #path to the new stack. the stack path will be provided by the db in the future
        newstack = os.path.join(self.params['newstackpath'],
                                self.params['newstack'])

        #messy way to count the number of particles in a stack
        h = open(newstack, 'r')
        numimg = 0
        while h.read(1024):
            numimg += 1

        #have to use this function to make sure i get the same particle number like in the download
        stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])

        #since the keepfile has to be a proc2d like file, i create a dictionary to transfer the
        #uniqe particle id into the stack position. I have to decrement 1 to make it count from 0
        #to numing
        partdict = {}
        dbids = [(part.dbid, part['particleNumber']) for part in stackpartdata]
        for part in dbids:
            partdict[int(part[0])] = int(part[1] - 1)

        #writing the keepfile
        f = open(self.params['keepfile'], 'w')
        for i in range(0, numimg):
            partnumber = partdict[int(
                numpy.memmap(newstack,
                             dtype="float32",
                             offset=i * 1024 + 19 * 4)[0])]
            f.write('%d\n' % partnumber)
        f.close()

        newcreatestack = os.path.join(self.params['rundir'],
                                      self.params['newstack'])
        apStack.makeNewStack(oldstack,
                             newcreatestack,
                             self.params['keepfile'],
                             bad=True)
        apStack.commitSubStack(self.params,
                               self.params['newstack'],
                               sorted=False)
        apStack.averageStack(stack=newcreatestack)
        newstackid = apStack.getStackIdFromPath(newcreatestack)
    def start(self):
        stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        newname = stackdata['name']

        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], newname)

        # calculate slop and intercept from the four points given
        slope = (self.params['maxy'] - self.params['miny']) / (
            self.params['maxx'] - self.params['minx'])
        intercept = self.params['miny'] - (slope * self.params['minx'])

        #               print slope
        #               print intercept

        numparticles = 0

        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        f = open(self.params['keepfile'], 'w')

        for stackpart in stackparts:
            #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
            if stackpart['mean'] > self.params['minx'] and stackpart[
                    'mean'] < self.params['maxx']:
                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                calcY = slope * stackpart['mean'] + intercept
                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                    emanpartnum = stackpart['particleNumber'] - 1
                    f.write('%i\n' % emanpartnum)
                    numparticles += 1

        f.close()
        self.params['description'] += (
            (" ... %d particle substack of stackid %d" %
             (numparticles, self.params['stackid'])))

        #create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=True)
        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")
        apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
        apStack.averageStack(stack=newstack)

        # stack mean plot
        newstackid = apStack.getStackIdFromPath(newstack)
        apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
        apStackMeanPlot.makeStackMeanPlot(newstackid)
    def start(self):
        #find stack
        stackparticles = apStack.getStackParticlesFromId(
            self.params['stackid'])

        if self.params['logsplit']:
            #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions'])
            stacklist = evenLogSplit(self.params['logstart'],
                                     len(stackparticles))
        elif self.params['nptcls']:
            stacklist = [self.params['nptcls']]
        else:
            apDisplay.printError("Please specify nptlcs or logsplit")

        oldstackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(oldstackdata['path']['path'],
                                oldstackdata['name'])
        #create run directory
        if self.params['rundir'] is None:
            path = oldstackdata['path']['path']
            path = os.path.split(os.path.abspath(path))[0]
            self.params['rundir'] = path
        apDisplay.printMsg("Out directory: " + self.params['rundir'])

        origdescription = self.params['description']
        for stack in stacklist:
            self.params['description'] = (
                origdescription +
                (" ... split %d particles from original stackid=%d" %
                 (stack, self.params['stackid'])))
            workingdir = os.path.join(self.params['rundir'], str(stack))

            #check for previously commited stacks
            newstack = os.path.join(workingdir, self.params['stackname'])
            apStack.checkForPreviousStack(newstack)

            #create rundir and change to that directory
            apDisplay.printMsg("Run directory: " + workingdir)
            apParam.createDirectory(workingdir)
            os.chdir(workingdir)

            #create random list
            lstfile = makeRandomLst(stack, stackparticles, self.params)
            #shutil.copy(lstfile, workingdir)

            #make new stack
            apStack.makeNewStack(oldstack, newstack, lstfile)
            #apStack.makeNewStack(lstfile, self.params['stackname'])

            #commit new stack
            self.params['keepfile'] = os.path.abspath(lstfile)
            self.params['rundir'] = os.path.abspath(workingdir)
            apStack.commitSubStack(self.params)
	def start(self):
		#find stack
		stackparticles = apStack.getStackParticlesFromId(self.params['stackid'])

		if self.params['logsplit']:
			#stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions'])
			stacklist = evenLogSplit(self.params['logstart'], len(stackparticles))
		elif self.params['nptcls']:
			stacklist = [self.params['nptcls']]
		else:
			apDisplay.printError("Please specify nptlcs or logsplit")

		oldstackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name'])
		#create run directory
		if self.params['rundir'] is None:
			path = oldstackdata['path']['path']
			path = os.path.split(os.path.abspath(path))[0]
			self.params['rundir'] = path
		apDisplay.printMsg("Out directory: "+self.params['rundir'])

		origdescription=self.params['description']
		for stack in stacklist:
			self.params['description'] = (
				origdescription+
				(" ... split %d particles from original stackid=%d"
				% (stack, self.params['stackid']))
			)
			workingdir = os.path.join(self.params['rundir'], str(stack))

			#check for previously commited stacks
			newstack = os.path.join(workingdir ,self.params['stackname'])
			apStack.checkForPreviousStack(newstack)

			#create rundir and change to that directory
			apDisplay.printMsg("Run directory: "+workingdir)
			apParam.createDirectory(workingdir)
			os.chdir(workingdir)

			#create random list
			lstfile = makeRandomLst(stack, stackparticles, self.params)
			#shutil.copy(lstfile, workingdir)

			#make new stack
			apStack.makeNewStack(oldstack, newstack, lstfile)
			#apStack.makeNewStack(lstfile, self.params['stackname'])

			#commit new stack
			self.params['keepfile'] = os.path.abspath(lstfile)
			self.params['rundir'] = os.path.abspath(workingdir)
			apStack.commitSubStack(self.params)
        def start(self):
                stackparts = apStack.getStackParticlesFromId(self.params['stackid'])
                
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                newname = stackdata['name']
                
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                newstack = os.path.join(self.params['rundir'], newname)

                # calculate slop and intercept from the four points given       
                slope = (self.params['maxy'] - self.params['miny']) / (self.params['maxx'] - self.params['minx'])
                intercept = self.params['miny'] - (slope*self.params['minx'])
                
#               print slope
#               print intercept
                
                numparticles = 0
                
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                f=open(self.params['keepfile'],'w')
                
                for stackpart in stackparts:
                        #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                        if stackpart['mean'] > self.params['minx'] and stackpart['mean'] < self.params['maxx']:
                                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                                calcY = slope*stackpart['mean']+intercept 
                                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                                        emanpartnum = stackpart['particleNumber']-1
                                        f.write('%i\n' % emanpartnum)
                                        numparticles+=1
                                        
                f.close()
                self.params['description'] +=(
                                (" ... %d particle substack of stackid %d" 
                                 % (numparticles, self.params['stackid']))
                        )

                #create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)
                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")
                apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
                apStack.averageStack(stack=newstack)

                # stack mean plot
                newstackid = apStack.getStackIdFromPath(newstack)
                apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
                apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		
		# creating a keepfile, fixed filename
		self.params['keepfile'] = os.path.join(self.params['newstackpath'],"keepfile.lst")

		#path to the old stack
		oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		#path to the new stack. the stack path will be provided by the db in the future
		newstack = os.path.join(self.params['newstackpath'], self.params['newstack'])

		#messy way to count the number of particles in a stack
		h = open(newstack, 'r')
		numimg = 0
		while h.read(1024):
			numimg += 1

		#have to use this function to make sure i get the same particle number like in the download
		stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])
		
		#since the keepfile has to be a proc2d like file, i create a dictionary to transfer the 
		#uniqe particle id into the stack position. I have to decrement 1 to make it count from 0 
		#to numing
		partdict = {}
		dbids = [(part.dbid,part['particleNumber']) for part in stackpartdata]
		for part in dbids:
			partdict[int(part[0])] = int(part[1]-1)

		#writing the keepfile
		f = open(self.params['keepfile'], 'w')
		for i in range(0,numimg):
			partnumber = partdict[int(numpy.memmap(newstack, dtype="float32", offset=i*1024+19*4)[0])]
			f.write('%d\n' % partnumber)
		f.close()

		newcreatestack = os.path.join(self.params['rundir'],self.params['newstack'])
		apStack.makeNewStack(oldstack, newcreatestack, self.params['keepfile'], bad=True)
		apStack.commitSubStack(self.params, self.params['newstack'], sorted=False)
		apStack.averageStack(stack=newcreatestack)
		newstackid = apStack.getStackIdFromPath(newcreatestack)
	def start(self):
		self.params['output_fileformat'] = 'mrc'
		newstackname='framealigned.hed'
		stackdata=apStack.getStackParticlesFromId(self.params['stackid'])
		stackrundata=apStack.getOnlyStackData(self.params['stackid'])
		apix=stackrundata['pixelsize']*1e10
		kev=stackdata[0]['particle']['image']['scope']['high tension']/1000
		origstackpath=os.path.join(stackrundata['path']['path'],stackrundata['name'])
		boxsize=stackdata[0]['stackRun']['stackParams']['boxSize']
		binning=stackdata[0]['stackRun']['stackParams']['bin']	
		
		#determine camera type
		cameratype=stackdata[0]['particle']['image']['camera']['ccdcamera']['name']
		if self.params['override_camera'] is not None:
			cameratype=self.params['override_camera']
		
		#create sorted boxfiles
		imagedict={}
		masterlist=[]
		for particle in stackdata:
			parentimage=particle['particle']['image']['filename']
			if parentimage in imagedict.keys():
				imagedict[parentimage].append(particle['particle'])
			else:
				imagedict[parentimage]=[]
				imagedict[parentimage].append(particle['particle'])
			index=len(imagedict[parentimage])-1
			masterlist.append({'particle':particle,'key':parentimage,'index':index})
		#print masterlist
		
		for key in imagedict:
			particlelst=imagedict[key]
			parentimage=key
			framespath=particlelst[0]['image']['session']['frame path']
			
			print cameratype
			if 'Gatan' in cameratype:
				#prepare frames
				print framespath
				
				#prepare frame directory
				framespathname=os.path.join(self.params['rundir'],parentimage+'.frames')
				if os.path.exists(framespathname):
					pass
				else:
					os.mkdir(framespathname)
				print framespathname
				
				mrcframestackname=parentimage+'.frames.mrc'
				
				print mrcframestackname
				
				nframes=particlelst[0]['image']['camera']['nframes']
				
				print "Extracting frames for", mrcframestackname
				for n in range(nframes):
					a=mrc.read(os.path.join(framespath,mrcframestackname),n)
					numpil.write(a,imfile=os.path.join(framespathname,'RawImage_%d.tif' % (n)), format='tiff')
				
			elif 'DE' in cameratype:
				framespathname=os.path.join(framespath,parentimage+'.frames')
			
			print os.getcwd()
			print framespathname
			#generate DE script call
			if os.path.exists(framespathname):
				print "found frames for", parentimage

				nframes=particlelst[0]['image']['camera']['nframes']
				boxname=parentimage + '.box'
				boxpath=os.path.join(framespathname,boxname)
				shiftdata={'scale':1,'shiftx':0,'shifty':0}

				#flatfield references
				brightrefpath=particlelst[0]['image']['bright']['session']['image path']
				brightrefname=particlelst[0]['image']['bright']['filename']
				brightnframes=particlelst[0]['image']['bright']['camera']['nframes']
				darkrefpath=particlelst[0]['image']['dark']['session']['image path']
				darkrefname=particlelst[0]['image']['dark']['filename']
				darknframes=particlelst[0]['image']['dark']['camera']['nframes']
				brightref=os.path.join(brightrefpath,brightrefname+'.mrc')
				darkref=os.path.join(darkrefpath,darkrefname+'.mrc')
				print brightref
				print darkref			
				apBoxer.processParticleData(particle['particle']['image'],boxsize,particlelst,shiftdata,boxpath)
				print framespathname			

				#set appion specific options
				self.params['gainreference_filename']=brightref
				self.params['gainreference_framecount']=brightnframes
				self.params['darkreference_filename']=darkref
				self.params['darkreference_framecount']=darknframes
				self.params['input_framecount']=nframes
				self.params['boxes_fromfiles']=1
				#self.params['run_verbosity']=3
				self.params['output_invert']=0
				#self.params['radiationdamage_apix=']=apix
				self.params['radiationdamage_voltage']=kev
				#self.params['boxes_boxsize']=boxsize

				outpath=os.path.join(self.params['rundir'],key)
				if os.path.exists(outpath):
					shutil.rmtree(outpath)
				os.mkdir(outpath)
				
				command=['deProcessFrames.py']
				keys=self.params.keys()
				keys.sort()
				for key in keys:
					param=self.params[key]
					#print key, param, type(param)
					if param == None or param=='':
						pass
					else:
						option='--%s=%s' % (key,param)
						command.append(option)
				command.append(outpath)
				command.append(framespathname)
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
					
		
		#recreate particle stack
		for n,particledict in enumerate(masterlist):
			parentimage=particledict['key']
			correctedpath=os.path.join(self.params['rundir'],parentimage)
			print correctedpath
			if os.path.exists(correctedpath):
			
				correctedparticle=glob.glob(os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index']))))
				print os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index'])))
				print correctedparticle
				#sys.exit()
				command=['proc2d',correctedparticle[0], newstackname]
				if self.params['output_rotation'] !=0:
					command.append('rot=%d' % self.params['output_rotation'])
				
				if self.params['show_DE_command'] is True:
					print command
				subprocess.call(command)
			else:
				print "did not find frames for ", parentimage
				command=['proc2d', origstackpath, newstackname,('first=%d' % n), ('last=%d' % n)]
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
				
		#upload stack
		
		#make keep file
		self.params['keepfile']='keepfile.txt'
		f=open(self.params['keepfile'],'w')
		for n in range(len(masterlist)):
			f.write('%d\n' % (n))
		f.close()
		
		apStack.commitSubStack(self.params, newname=newstackname)
		apStack.averageStack(stack=newstackname)
		
		print "Done!!!!"
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		newstack = os.path.join(self.params['rundir'], stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		includelist = []
		excludelist = []
		### list of classes to be excluded
		if self.params['dropclasslist'] is not None:
			excludestrlist = self.params['dropclasslist'].split(",")
			for excludeitem in excludestrlist:
				excludelist.append(int(excludeitem.strip()))
		apDisplay.printMsg("Exclude list: "+str(excludelist))

		### list of classes to be included
		if self.params['keepclasslist'] is not None:
			includestrlist = self.params['keepclasslist'].split(",")
			for includeitem in includestrlist:
				includelist.append(int(includeitem.strip()))

		### or read from keepfile
		elif self.params['keepfile'] is not None:
			keeplistfile = open(self.params['keepfile'])
			for line in keeplistfile:
				if self.params['excludefrom'] is True:
					excludelist.append(int(line.strip()))
				else:
					includelist.append(int(line.strip()))
			keeplistfile.close()
		apDisplay.printMsg("Include list: "+str(includelist))

		### get particles from align or cluster stack
		apDisplay.printMsg("Querying database for particles")
		q0 = time.time()

		if self.params['alignid'] is not None:
			# DIRECT SQL STUFF
			sqlcmd = "SELECT " + \
				"apd.partnum, " + \
				"apd.xshift, apd.yshift, " + \
				"apd.rotation, apd.mirror, " + \
				"apd.spread, apd.correlation, " + \
				"apd.score, apd.bad, " + \
				"spd.particleNumber, " + \
				"ard.refnum "+ \
				"FROM ApAlignParticleData apd " + \
				"LEFT JOIN ApStackParticleData spd ON " + \
				"(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \
				"LEFT JOIN ApAlignReferenceData ard ON" + \
				"(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \
				"WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid'])
			# These are AlignParticles
			particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)

		elif self.params['clusterid'] is not None:
			clusterpartq = appiondata.ApClusteringParticleData()
			clusterpartq['clusterstack'] = self.clusterstackdata
			# These are ClusteringParticles
			particles = clusterpartq.query()
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0)))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		badscore = 0
		badshift = 0
		badspread = 0

		f = open("test.log", "w")
		count = 0
		t0 = time.time()
		apDisplay.printMsg("Parsing particle information")

		# find out if there is alignparticle info:
		is_cluster_p = False
		# alignparticle is a key of any particle in particles if the latter is
		# a CluateringParticle
		if 'alignparticle' in particles[0]:
			is_cluster_p = True

		for part in particles:
			count += 1
			if is_cluster_p:
				# alignpart is an item of ClusteringParticle
				alignpart = part['alignparticle']
				try:
					classnum = int(part['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
				emanstackpartnum = alignpart['stackpart']['particleNumber']-1
			else:
				# particle has info from AlignedParticle as results of direct query
				alignpart = part
				try:
					classnum = int(alignpart['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
					classnum = None
				emanstackpartnum = int(alignpart['particleNumber'])-1

			### check shift
			if self.params['maxshift'] is not None:
				shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
				if shift > self.params['maxshift']:
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badshift += 1
					continue

			if self.params['minscore'] is not None:
				### check score
				if ( alignpart['score'] is not None
				 and alignpart['score'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badscore += 1
					continue

				### check spread
				if ( alignpart['spread'] is not None
				 and alignpart['spread'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badspread += 1
					continue

			if classnum is not None:
				if includelist and (classnum in includelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				elif excludelist and not (classnum in excludelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				else:
					excludeParticle += 1
					f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
			else:
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
			
		f.close()

		includeParticle.sort()
		if badshift > 0:
			apDisplay.printMsg("%d paricles had a large shift"%(badshift))
		if badscore > 0:
			apDisplay.printMsg("%d paricles had a low score"%(badscore))
		if badspread > 0:
			apDisplay.printMsg("%d paricles had a low spread"%(badspread))
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0)))
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		if excludelist:
			self.params['description'] += ( " ... %d particle substack with %s classes excluded"
				% (numparticles, self.params['dropclasslist']))
		elif includelist:
			self.params['description'] += ( " ... %d particle substack with %s classes included"
				% (numparticles, self.params['keepclasslist']))

		outavg = os.path.join(self.params['rundir'],"average.mrc")

		### create the new sub stack
		# first check if virtual stack
		if not os.path.isfile(oldstack):
			vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			vparts = vstackdata['particles']
			oldstack = vstackdata['filename']
			# get subset of virtualstack
			vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle]
	
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad'])

			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist)
		else:
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])
			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle)

		if self.params['writefile'] is True and not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		if self.params['commit'] is True:
			apStack.commitSubStack(self.params,included=includeParticle)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
示例#16
0
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], stackdata['name'])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params['dropclasslist'] is not None:
            excludestrlist = self.params['dropclasslist'].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params['keepclasslist'] is not None:
            includestrlist = self.params['keepclasslist'].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

        ### or read from keepfile
        elif self.params['keepfile'] is not None:
            keeplistfile = open(self.params['keepfile'])
            for line in keeplistfile:
                if self.params['excludefrom'] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()
        if self.params['alignid'] is not None:
            alignpartq = appiondata.ApAlignParticleData()
            alignpartq['alignstack'] = self.alignstackdata
            particles = alignpartq.query()
        elif self.params['clusterid'] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq['clusterstack'] = self.clusterstackdata
            particles = clusterpartq.query()
        apDisplay.printMsg("Complete in " +
                           apDisplay.timeString(time.time() - q0))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0
        f = open("test.log", "w")
        count = 0
        for part in particles:
            count += 1
            #partnum = part['partnum']-1
            if 'alignparticle' in part:
                alignpart = part['alignparticle']
                classnum = int(part['refnum']) - 1
            else:
                alignpart = part
                classnum = int(part['ref']['refnum']) - 1
            emanstackpartnum = alignpart['stackpart']['particleNumber'] - 1

            ### check shift
            if self.params['maxshift'] is not None:
                shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
                if shift > self.params['maxshift']:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badshift += 1
                    continue

            if self.params['minscore'] is not None:
                ### check score
                if (alignpart['score'] is not None
                        and alignpart['score'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badscore += 1
                    continue

                ### check spread
                if (alignpart['spread'] is not None
                        and alignpart['spread'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badspread += 1
                    continue

            if includelist and classnum in includelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            elif excludelist and not classnum in excludelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\t%d\texclude\n" %
                        (count, emanstackpartnum, classnum))

        f.close()
        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding " + str(excludeParticle) +
                           " particles")

        #print includeParticle

        ### write kept particles to file
        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params['keepfile'])
        kf = open(self.params['keepfile'], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes excluded" %
                (numparticles, self.params['dropclasslist']))
        elif includelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes included" %
                (numparticles, self.params['keepclasslist']))

        ### create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=self.params['savebad'])

        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        apStack.averageStack(stack=newstack)
        if self.params['commit'] is True:
            apStack.commitSubStack(self.params)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
	def start(self):
		#old stack size
		stacksize = apStack.getNumberStackParticlesFromId(self.params['stackid'])

		# if exclude or include lists are not defined...
		if self.params['exclude'] is None and self.params['include'] is None:
			# if first and last are specified, create a file
			if self.params['first'] is not None and self.params['last'] is not None:
				stp = str(self.params['first'])
				enp = str(self.params['last'])
				fname = 'sub'+str(self.params['stackid'])+'_'+stp+'-'+enp+'.lst'
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
				f=open(self.params['keepfile'],'w')
				for i in range(self.params['first'],self.params['last']+1):
					f.write('%d\n' % (int(i)-1))
				f.close()
				# generate the random list by giving number and create the file
			elif self.params['random'] is not None:
				#numOfRandomParticles = str(self.params['random'])
				#fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst'
				fname = "random%d_%d.lst"%(self.params['stackid'], self.params['random'])
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
				# create a file
				f=open(self.params['keepfile'],'w')
				# generate a random sequence by giving size
				randomList = random.sample(xrange(self.params['last']), self.params['random'])
				randomList.sort()
				for partnum in randomList:
					f.write('%d\n' % partnum)
				f.close()				
				
			# if splitting, create files containing the split values
			elif self.params['split'] > 1:
				for i in range(self.params['split']):
					fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst'
					self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
					apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
					f = open(self.params['keepfile'],'w')
					for p in range(stacksize):
						if (p % self.params['split'])-i==0:
							f.write('%i\n' % p)
					f.close()

			# if exclude-from option is specified, convert particles to exclude
			elif self.params['excludefile'] is True:
				oldkf = open(self.params['keepfile'])
				partlist = []
				for line in oldkf:
					particle=line.strip()
					try:
						particle = int(particle)
					except:
						continue
					partlist.append(particle)
				oldkf.close()
				# create new list excluding the particles
				apDisplay.printMsg("Converting keep file to exclude file")
				newkeepfile = "tmpnewkeepfile.txt"
				newkf = open(newkeepfile,'w')
				for p in range(stacksize):
					if p not in partlist:
						newkf.write("%i\n"%p)
				newkf.close()
				self.params['keepfile'] = os.path.abspath(newkeepfile)

			# otherwise, just copy the file
			elif not os.path.isfile(os.path.basename(self.params['keepfile'])):
				shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile']))

		# if either exclude or include lists is defined
		elif self.params['exclude'] or self.params['include']:
			
			### list of particles to be excluded
			excludelist = []
			if self.params['exclude'] is not None:
				excludestrlist = self.params['exclude'].split(",")
				for excld in excludestrlist:
					excludelist.append(int(excld.strip()))
			apDisplay.printMsg("Exclude list: "+str(excludelist))

			### list of particles to be included
			includelist = []
			if self.params['include'] is not None:
				includestrlist = self.params['include'].split(",")
				for incld in includestrlist:
					includelist.append(int(incld.strip()))		
			apDisplay.printMsg("Include list: "+str(includelist))


		#new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		newname = stackdata['name']

		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		#if include or exclude list is given...			
		if self.params['include'] is not None or self.params['exclude'] is not None:
		
			includeParticle = []
			excludeParticle = 0

			for partnum in range(stacksize):
				if includelist and partnum in includelist:
					includeParticle.append(partnum)
				elif excludelist and not partnum in excludelist:
					includeParticle.append(partnum)
				else:
					excludeParticle += 1
			includeParticle.sort()
		
			### write kept particles to file
			self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
			apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
			kf = open(self.params['keepfile'], "w")
			for partnum in includeParticle:
				kf.write(str(partnum)+"\n")
			kf.close()		

			#get number of particles
			numparticles = len(includeParticle)
			if excludelist:
				self.params['description'] += ( " ... %d particle substack of stackid %d" 
				% (numparticles, self.params['stackid']))
			elif includelist:
				self.params['description'] += ( " ... %d particle substack of stackid %d" 
				% (numparticles, self.params['stackid']))	
		
		ogdescr = self.params['description']
		for i in range(self.params['split']):
			### always do this, if not splitting split=1
			sb = os.path.splitext(stackdata['name'])
			if self.params['first'] is not None and self.params['last'] is not None:
				newname = sb[0]+'.'+str(self.params['first'])+'-'+str(self.params['last'])+sb[-1]
			elif self.params['random'] is not None:
				newname = "%s-random%d%s"%(sb[0], self.params['random'], sb[-1])
			elif self.params['split'] > 1:
				fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst'
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				newname = sb[0]+'.'+str(i+1)+'of'+str(self.params['split'])+sb[-1]
			newstack = os.path.join(self.params['rundir'], newname)
			apStack.checkForPreviousStack(newstack)

			#get number of particles
			f = open(self.params['keepfile'], "r")
			numparticles = len(f.readlines())
			f.close()
			self.params['description'] = ogdescr
			self.params['description'] += (
				(" ... %d particle substack of stackid %d" 
				 % (numparticles, self.params['stackid']))
			)
			#if splitting, add to description
			if self.params['split'] > 1:
				self.params['description'] += (" (%i of %i)" % (i+1, self.params['split']))

			#create the new sub stack
			if not self.params['correctbeamtilt']:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)
			else:
				apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack)
			if not os.path.isfile(newstack):
				apDisplay.printError("No stack was created")
			apStack.commitSubStack(self.params, newname, sorted=False)
			apStack.averageStack(stack=newstack)
			newstackid = apStack.getStackIdFromPath(newstack)
			if self.params['meanplot'] is True:
				apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
				apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		partdict = {}
		partlist = []
		### get Euler angles for each particle
		for iternum in self.iternums:
			### get recon iter data
			reconiterq = appiondata.ApRefineIterData()
			reconiterq['refineRun'] = self.reconrundata
			reconiterq['iteration'] = iternum
			reconiterdata = reconiterq.query(results=1)[0] #this should be unique

			### get particle data
			reconpartq = appiondata.ApRefineParticleData()
			reconpartq['refineIter'] = reconiterdata
			apDisplay.printMsg("Querying for particles at "+time.asctime())
			reconpartdatas = reconpartq.query()

			### group particle data
			for partdata in reconpartdatas:
				partnum = partdata['particle']['particleNumber']
				if not partnum in partlist:
					partlist.append(partnum)
				partdict[(partnum, iternum)] = partdata

		### run through particles and check Euler angles
		partlist.sort()
		eulerdict = {}
		eulercount = {}
		reject = 0
		for partnum in partlist:
			e1d = {}
			e2d = {}
			for iternum in self.iternums:
				if not (partnum, iternum) in partdict:
					continue
				partdata = partdict[(partnum, iternum)]
				euler1 = "%.2f"%(partdata['euler1'])
				if not euler1 in e1d:
					e1d[euler1] = 1
				else:
					e1d[euler1] += 1
				euler2 = "%.2f"%(partdata['euler2'])
				if not euler2 in e2d:
					e2d[euler2] = 1
				else:
					e2d[euler2] += 1
				#print partnum, euler1, euler2
			counts = [(val,key) for key,val in e1d.items()]
			e1count, euler1 = max(counts)
			counts = [(val,key) for key,val in e2d.items()]
			e2count, euler2 = max(counts)

			# reject indeterminant particles
			if e2count < 2 or e1count < 2:
				reject += 1
				continue

			### group particles by their Euler angles
			if not (euler1,euler2) in eulerdict:
				eulerdict[(euler1,euler2)] = []
				eulercount[(euler1,euler2)] = 0
			eulerdict[(euler1,euler2)].append(partnum)
			eulercount[(euler1,euler2)] += 1

		print "Rejected %d particles"%(reject)

		values = eulercount.values()
		values.sort()
		print values

		### run through Euler angles and count particles
		counts = [(val,key) for key,val in eulercount.items()]
		mincount, val = min(counts)
		self.params['mincount'] = max(self.params['mincount'], mincount)
		#print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist))
		print "Keeping %d of %d particles"%(self.params['mincount']*len(eulercount.keys()), len(partlist))

		keeplist = []
		for key in eulerdict.keys():
			eulerpartlist = eulerdict[key]
			if len(partlist) < self.params['mincount']:
				keeplist.extend(eulerpartlist)
			else:
				keeplist.extend(eulerpartlist[:self.params['mincount']])
		keeplist.sort()
		print "Keeping %d of %d particles"%(len(keeplist), len(partlist))

		#need to set keepfile for commitSubStack
		self.params['keepfile'] = os.path.join(self.params['rundir'], "equalviews.lst")
		f = open(self.params['keepfile'], "w")
		for partnum in keeplist:
			f.write("%d\n"%(partnum-1))
		f.close()

		### make a new stack using the keep particles
		oldstackdata = self.reconrundata['stack']

		oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name'])
		newstack = os.path.join(self.params['rundir'], "start.hed")
		apStack.makeNewStack(oldstack, newstack, listfile=self.params['keepfile'], remove=True, bad=True)
		if not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")
		self.params['stackid'] = oldstackdata.dbid #need to set stackid for commitSubStack
		apStack.commitSubStack(self.params, "start.hed")
		apStack.averageStack(stack=newstack)
    def start(self):
        ### new stack path
        oldstack = os.path.join(self.stackdata['path']['path'],
                                self.stackdata['name'])
        newstack = os.path.join(self.params['rundir'], self.stackdata['name'])
        apStack.checkForPreviousStack(newstack)

        ### get particles from stack
        apDisplay.printMsg("Querying stack particles")
        t0 = time.time()
        stackpartq = appiondata.ApRefineParticleData()
        stackpartq['refineIter'] = self.iterdata
        particles = stackpartq.query()
        apDisplay.printMsg("Finished in " +
                           apDisplay.timeString(time.time() - t0))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        f = open("test.log", "w")
        count = 0
        apDisplay.printMsg("Processing stack particles")
        t0 = time.time()
        for part in particles:
            count += 1
            if count % 500 == 0:
                sys.stderr.write(".")
            emanstackpartnum = part['particle']['particleNumber'] - 1

            if part['postRefine_keep'] == 1:
                ### good particle
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\tinclude\n" % (count, emanstackpartnum))
            else:
                ### bad particle
                excludeParticle += 1
                f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
        sys.stderr.write("\n")
        apDisplay.printMsg("Finished in " +
                           apDisplay.timeString(time.time() - t0))

        f.close()
        includeParticle.sort()
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding " + str(excludeParticle) +
                           " particles")

        ### write kept particles to file
        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params['keepfile'])
        kf = open(self.params['keepfile'], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        self.params['description'] += (" ... %d no jumpers substack" %
                                       (numparticles, ))

        ### create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=True)

        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        apStack.averageStack(stack=newstack)
        if self.params['commit'] is True:
            apStack.commitSubStack(self.params)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params["stackid"])
        oldstack = os.path.join(stackdata["path"]["path"], stackdata["name"])
        newstack = os.path.join(self.params["rundir"], stackdata["name"])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params["dropclasslist"] is not None:
            excludestrlist = self.params["dropclasslist"].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params["keepclasslist"] is not None:
            includestrlist = self.params["keepclasslist"].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

                ### or read from keepfile
        elif self.params["keepfile"] is not None:
            keeplistfile = open(self.params["keepfile"])
            for line in keeplistfile:
                if self.params["excludefrom"] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()

        if self.params["alignid"] is not None:
            # DIRECT SQL STUFF
            sqlcmd = (
                "SELECT "
                + "apd.partnum, "
                + "apd.xshift, apd.yshift, "
                + "apd.rotation, apd.mirror, "
                + "apd.spread, apd.correlation, "
                + "apd.score, apd.bad, "
                + "spd.particleNumber, "
                + "ard.refnum "
                + "FROM ApAlignParticleData apd "
                + "LEFT JOIN ApStackParticleData spd ON "
                + "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) "
                + "LEFT JOIN ApAlignReferenceData ard ON"
                + "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) "
                + "WHERE `REF|ApAlignStackData|alignstack` = %i" % (self.params["alignid"])
            )
            # These are AlignParticles
            particles = sinedon.directq.complexMysqlQuery("appiondata", sqlcmd)

        elif self.params["clusterid"] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq["clusterstack"] = self.clusterstackdata
            # These are ClusteringParticles
            particles = clusterpartq.query()
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - q0)))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0

        f = open("test.log", "w")
        count = 0
        t0 = time.time()
        apDisplay.printMsg("Parsing particle information")

        # find out if there is alignparticle info:
        is_cluster_p = False
        # alignparticle is a key of any particle in particles if the latter is
        # a CluateringParticle
        if "alignparticle" in particles[0]:
            is_cluster_p = True

        for part in particles:
            count += 1
            if is_cluster_p:
                # alignpart is an item of ClusteringParticle
                alignpart = part["alignparticle"]
                try:
                    classnum = int(part["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                emanstackpartnum = alignpart["stackpart"]["particleNumber"] - 1
            else:
                # particle has info from AlignedParticle as results of direct query
                alignpart = part
                try:
                    classnum = int(alignpart["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                    classnum = None
                emanstackpartnum = int(alignpart["particleNumber"]) - 1

                ### check shift
            if self.params["maxshift"] is not None:
                shift = math.hypot(alignpart["xshift"], alignpart["yshift"])
                if shift > self.params["maxshift"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badshift += 1
                    continue

            if self.params["minscore"] is not None:
                ### check score
                if alignpart["score"] is not None and alignpart["score"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badscore += 1
                    continue

                    ### check spread
                if alignpart["spread"] is not None and alignpart["spread"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badspread += 1
                    continue

            if classnum is not None:
                if includelist and (classnum in includelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                elif excludelist and not (classnum in excludelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                else:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))

        f.close()

        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - t0)))
        apDisplay.printMsg(
            "Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles"
        )

        ### write kept particles to file
        self.params["keepfile"] = os.path.join(self.params["rundir"], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params["keepfile"])
        kf = open(self.params["keepfile"], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params["description"] += " ... %d particle substack with %s classes excluded" % (
                numparticles,
                self.params["dropclasslist"],
            )
        elif includelist:
            self.params["description"] += " ... %d particle substack with %s classes included" % (
                numparticles,
                self.params["keepclasslist"],
            )

        outavg = os.path.join(self.params["rundir"], "average.mrc")

        ### create the new sub stack
        # first check if virtual stack
        if not os.path.isfile(oldstack):
            vstackdata = apStack.getVirtualStackParticlesFromId(self.params["stackid"])
            vparts = vstackdata["particles"]
            oldstack = vstackdata["filename"]
            # get subset of virtualstack
            vpartlist = [int(vparts[p]["particleNumber"]) - 1 for p in includeParticle]

            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params["savebad"])

            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=vpartlist)
        else:
            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, self.params["keepfile"], bad=self.params["savebad"])
            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=includeParticle)

        if self.params["writefile"] is True and not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        if self.params["commit"] is True:
            apStack.commitSubStack(self.params, included=includeParticle)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
        def start(self):
                ### new stack path
                oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
                newstack = os.path.join(self.params['rundir'], self.stackdata['name'])
                apStack.checkForPreviousStack(newstack)

                ### get particles from stack
                apDisplay.printMsg("Querying stack particles")
                t0 = time.time()
                stackpartq =  appiondata.ApStackParticleData()
                stackpartq['stack'] = self.stackdata
                particles = stackpartq.query()
                apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

                ### write included particles to text file
                includeParticle = []
                excludeParticle = 0
                f = open("test.log", "w")
                count = 0
                apDisplay.printMsg("Processing stack particles")
                t0 = time.time()
                for part in particles:
                        count += 1
                        if count%500 == 0:
                                sys.stderr.write(".")
                        emanstackpartnum = part['particleNumber']-1

                        ### get euler jump data
                        jumpq = appiondata.ApEulerJumpData()
                        jumpq['particle'] = part
                        jumpq['refineRun'] = self.recondata
                        jumpdatas = jumpq.query(results=1)
                        if not jumpdatas or len(jumpdatas) < 1:
                                ### no data
                                continue
                        jumpdata = jumpdatas[0]

                        if jumpdata['median'] is None or jumpdata['median'] > self.params['maxjump']:
                                ### bad particle
                                excludeParticle += 1
                                f.write("%d\t%d\t%.1f\texclude\n"%(count, emanstackpartnum, jumpdata['median']))
                        else:
                                ### good particle
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%.1f\tinclude\n"%(count, emanstackpartnum, jumpdata['median']))
                sys.stderr.write("\n")
                apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

                f.close()
                includeParticle.sort()
                apDisplay.printMsg("Keeping "+str(len(includeParticle))
                        +" and excluding "+str(excludeParticle)+" particles")

                #print includeParticle

                ### write kept particles to file
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
                kf = open(self.params['keepfile'], "w")
                for partnum in includeParticle:
                        kf.write(str(partnum)+"\n")
                kf.close()

                ### get number of particles
                numparticles = len(includeParticle)
                self.params['description'] += ( " ... %d no jumpers substack" % (numparticles,))

                ### create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)

                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")

                apStack.averageStack(stack=newstack)
                if self.params['commit'] is True:
                        apStack.commitSubStack(self.params)
                        newstackid = apStack.getStackIdFromPath(newstack)
                        apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
示例#22
0
    def start(self):
        self.params['output_fileformat'] = 'mrc'
        newstackname = 'framealigned.hed'
        stackdata = apStack.getStackParticlesFromId(self.params['stackid'])
        stackrundata = apStack.getOnlyStackData(self.params['stackid'])
        apix = stackrundata['pixelsize'] * 1e10
        kev = stackdata[0]['particle']['image']['scope']['high tension'] / 1000
        origstackpath = os.path.join(stackrundata['path']['path'],
                                     stackrundata['name'])
        boxsize = stackdata[0]['stackRun']['stackParams']['boxSize']
        binning = stackdata[0]['stackRun']['stackParams']['bin']

        #determine camera type
        cameratype = stackdata[0]['particle']['image']['camera']['ccdcamera'][
            'name']
        if self.params['override_camera'] is not None:
            cameratype = self.params['override_camera']

        #create sorted boxfiles
        imagedict = {}
        masterlist = []
        for particle in stackdata:
            parentimage = particle['particle']['image']['filename']
            if parentimage in imagedict.keys():
                imagedict[parentimage].append(particle['particle'])
            else:
                imagedict[parentimage] = []
                imagedict[parentimage].append(particle['particle'])
            index = len(imagedict[parentimage]) - 1
            masterlist.append({
                'particle': particle,
                'key': parentimage,
                'index': index
            })
        #print masterlist

        for key in imagedict:
            particlelst = imagedict[key]
            parentimage = key
            framespath = particlelst[0]['image']['session']['frame path']

            print cameratype
            if 'Gatan' in cameratype:
                #prepare frames
                print framespath

                #prepare frame directory
                framespathname = os.path.join(self.params['rundir'],
                                              parentimage + '.frames')
                if os.path.exists(framespathname):
                    pass
                else:
                    os.mkdir(framespathname)
                print framespathname

                mrcframestackname = parentimage + '.frames.mrc'

                print mrcframestackname

                nframes = particlelst[0]['image']['camera']['nframes']

                print "Extracting frames for", mrcframestackname
                for n in range(nframes):
                    a = mrc.read(os.path.join(framespath, mrcframestackname),
                                 n)
                    numpil.write(a,
                                 imfile=os.path.join(framespathname,
                                                     'RawImage_%d.tif' % (n)),
                                 format='tiff')

            elif 'DE' in cameratype:
                framespathname = os.path.join(framespath,
                                              parentimage + '.frames')

            print os.getcwd()
            print framespathname
            #generate DE script call
            if os.path.exists(framespathname):
                print "found frames for", parentimage

                nframes = particlelst[0]['image']['camera']['nframes']
                boxname = parentimage + '.box'
                boxpath = os.path.join(framespathname, boxname)
                shiftdata = {'scale': 1, 'shiftx': 0, 'shifty': 0}

                #flatfield references
                brightrefpath = particlelst[0]['image']['bright']['session'][
                    'image path']
                brightrefname = particlelst[0]['image']['bright']['filename']
                brightnframes = particlelst[0]['image']['bright']['camera'][
                    'nframes']
                darkrefpath = particlelst[0]['image']['dark']['session'][
                    'image path']
                darkrefname = particlelst[0]['image']['dark']['filename']
                darknframes = particlelst[0]['image']['dark']['camera'][
                    'nframes']
                brightref = os.path.join(brightrefpath, brightrefname + '.mrc')
                darkref = os.path.join(darkrefpath, darkrefname + '.mrc')
                print brightref
                print darkref
                apBoxer.processParticleData(particle['particle']['image'],
                                            boxsize, particlelst, shiftdata,
                                            boxpath)
                print framespathname

                #set appion specific options
                self.params['gainreference_filename'] = brightref
                self.params['gainreference_framecount'] = brightnframes
                self.params['darkreference_filename'] = darkref
                self.params['darkreference_framecount'] = darknframes
                self.params['input_framecount'] = nframes
                self.params['boxes_fromfiles'] = 1
                #self.params['run_verbosity']=3
                self.params['output_invert'] = 0
                #self.params['radiationdamage_apix=']=apix
                self.params['radiationdamage_voltage'] = kev
                #self.params['boxes_boxsize']=boxsize

                outpath = os.path.join(self.params['rundir'], key)
                if os.path.exists(outpath):
                    shutil.rmtree(outpath)
                os.mkdir(outpath)

                command = ['deProcessFrames.py']
                keys = self.params.keys()
                keys.sort()
                for key in keys:
                    param = self.params[key]
                    #print key, param, type(param)
                    if param == None or param == '':
                        pass
                    else:
                        option = '--%s=%s' % (key, param)
                        command.append(option)
                command.append(outpath)
                command.append(framespathname)
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #recreate particle stack
        for n, particledict in enumerate(masterlist):
            parentimage = particledict['key']
            correctedpath = os.path.join(self.params['rundir'], parentimage)
            print correctedpath
            if os.path.exists(correctedpath):

                correctedparticle = glob.glob(
                    os.path.join(correctedpath,
                                 ('%s.*.region_%03d.*' %
                                  (parentimage, particledict['index']))))
                print os.path.join(correctedpath,
                                   ('%s.*.region_%03d.*' %
                                    (parentimage, particledict['index'])))
                print correctedparticle
                #sys.exit()
                command = ['proc2d', correctedparticle[0], newstackname]
                if self.params['output_rotation'] != 0:
                    command.append('rot=%d' % self.params['output_rotation'])

                if self.params['show_DE_command'] is True:
                    print command
                subprocess.call(command)
            else:
                print "did not find frames for ", parentimage
                command = [
                    'proc2d', origstackpath, newstackname, ('first=%d' % n),
                    ('last=%d' % n)
                ]
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #upload stack

        #make keep file
        self.params['keepfile'] = 'keepfile.txt'
        f = open(self.params['keepfile'], 'w')
        for n in range(len(masterlist)):
            f.write('%d\n' % (n))
        f.close()

        apStack.commitSubStack(self.params, newname=newstackname)
        apStack.averageStack(stack=newstackname)

        print "Done!!!!"
示例#23
0
    def start(self):
        partdict = {}
        partlist = []
        ### get Euler angles for each particle
        for iternum in self.iternums:
            ### get recon iter data
            reconiterq = appiondata.ApRefineIterData()
            reconiterq['refineRun'] = self.reconrundata
            reconiterq['iteration'] = iternum
            reconiterdata = reconiterq.query(
                results=1)[0]  #this should be unique

            ### get particle data
            reconpartq = appiondata.ApRefineParticleData()
            reconpartq['refineIter'] = reconiterdata
            apDisplay.printMsg("Querying for particles at " + time.asctime())
            reconpartdatas = reconpartq.query()

            ### group particle data
            for partdata in reconpartdatas:
                partnum = partdata['particle']['particleNumber']
                if not partnum in partlist:
                    partlist.append(partnum)
                partdict[(partnum, iternum)] = partdata

        ### run through particles and check Euler angles
        partlist.sort()
        eulerdict = {}
        eulercount = {}
        reject = 0
        for partnum in partlist:
            e1d = {}
            e2d = {}
            for iternum in self.iternums:
                if not (partnum, iternum) in partdict:
                    continue
                partdata = partdict[(partnum, iternum)]
                euler1 = "%.2f" % (partdata['euler1'])
                if not euler1 in e1d:
                    e1d[euler1] = 1
                else:
                    e1d[euler1] += 1
                euler2 = "%.2f" % (partdata['euler2'])
                if not euler2 in e2d:
                    e2d[euler2] = 1
                else:
                    e2d[euler2] += 1
                #print partnum, euler1, euler2
            counts = [(val, key) for key, val in e1d.items()]
            e1count, euler1 = max(counts)
            counts = [(val, key) for key, val in e2d.items()]
            e2count, euler2 = max(counts)

            # reject indeterminant particles
            if e2count < 2 or e1count < 2:
                reject += 1
                continue

            ### group particles by their Euler angles
            if not (euler1, euler2) in eulerdict:
                eulerdict[(euler1, euler2)] = []
                eulercount[(euler1, euler2)] = 0
            eulerdict[(euler1, euler2)].append(partnum)
            eulercount[(euler1, euler2)] += 1

        print "Rejected %d particles" % (reject)

        values = eulercount.values()
        values.sort()
        print values

        ### run through Euler angles and count particles
        counts = [(val, key) for key, val in eulercount.items()]
        mincount, val = min(counts)
        self.params['mincount'] = max(self.params['mincount'], mincount)
        #print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist))
        print "Keeping %d of %d particles" % (
            self.params['mincount'] * len(eulercount.keys()), len(partlist))

        keeplist = []
        for key in eulerdict.keys():
            eulerpartlist = eulerdict[key]
            if len(partlist) < self.params['mincount']:
                keeplist.extend(eulerpartlist)
            else:
                keeplist.extend(eulerpartlist[:self.params['mincount']])
        keeplist.sort()
        print "Keeping %d of %d particles" % (len(keeplist), len(partlist))

        #need to set keepfile for commitSubStack
        self.params['keepfile'] = os.path.join(self.params['rundir'],
                                               "equalviews.lst")
        f = open(self.params['keepfile'], "w")
        for partnum in keeplist:
            f.write("%d\n" % (partnum - 1))
        f.close()

        ### make a new stack using the keep particles
        oldstackdata = self.reconrundata['stack']

        oldstack = os.path.join(oldstackdata['path']['path'],
                                oldstackdata['name'])
        newstack = os.path.join(self.params['rundir'], "start.hed")
        apStack.makeNewStack(oldstack,
                             newstack,
                             listfile=self.params['keepfile'],
                             remove=True,
                             bad=True)
        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")
        self.params[
            'stackid'] = oldstackdata.dbid  #need to set stackid for commitSubStack
        apStack.commitSubStack(self.params, "start.hed")
        apStack.averageStack(stack=newstack)
        def start(self):
                ### new stack path
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                newstack = os.path.join(self.params['rundir'], stackdata['name'])
                apStack.checkForPreviousStack(newstack)

                includelist = []
                excludelist = []
                ### list of classes to be excluded
                if self.params['dropclasslist'] is not None:
                        excludestrlist = self.params['dropclasslist'].split(",")
                        for excludeitem in excludestrlist:
                                excludelist.append(int(excludeitem.strip()))
                apDisplay.printMsg("Exclude list: "+str(excludelist))

                ### list of classes to be included
                if self.params['keepclasslist'] is not None:
                        includestrlist = self.params['keepclasslist'].split(",")
                        for includeitem in includestrlist:
                                includelist.append(int(includeitem.strip()))

                ### or read from keepfile
                elif self.params['keepfile'] is not None:
                        keeplistfile = open(self.params['keepfile'])
                        for line in keeplistfile:
                                if self.params['excludefrom'] is True:
                                        excludelist.append(int(line.strip()))
                                else:
                                        includelist.append(int(line.strip()))
                        keeplistfile.close()
                apDisplay.printMsg("Include list: "+str(includelist))

                ### get particles from align or cluster stack
                apDisplay.printMsg("Querying database for particles")
                q0 = time.time()
                if self.params['alignid'] is not None:
                        alignpartq =  appiondata.ApAlignParticleData()
                        alignpartq['alignstack'] = self.alignstackdata
                        particles = alignpartq.query()
                elif self.params['clusterid'] is not None:
                        clusterpartq = appiondata.ApClusteringParticleData()
                        clusterpartq['clusterstack'] = self.clusterstackdata
                        particles = clusterpartq.query()
                apDisplay.printMsg("Complete in "+apDisplay.timeString(time.time()-q0))

                ### write included particles to text file
                includeParticle = []
                excludeParticle = 0
                badscore = 0
                badshift = 0
                badspread = 0
                f = open("test.log", "w")
                count = 0
                for part in particles:
                        count += 1
                        #partnum = part['partnum']-1
                        if 'alignparticle' in part:
                                alignpart = part['alignparticle']
                                classnum = int(part['refnum'])-1
                        else:
                                alignpart = part
                                classnum = int(part['ref']['refnum'])-1
                        emanstackpartnum = alignpart['stackpart']['particleNumber']-1

                        ### check shift
                        if self.params['maxshift'] is not None:
                                shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
                                if shift > self.params['maxshift']:
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badshift += 1
                                        continue


                        if self.params['minscore'] is not None:
                                ### check score
                                if ( alignpart['score'] is not None
                                 and alignpart['score'] < self.params['minscore'] ):
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badscore += 1
                                        continue

                                ### check spread
                                if ( alignpart['spread'] is not None
                                 and alignpart['spread'] < self.params['minscore'] ):
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badspread += 1
                                        continue

                        if includelist and classnum in includelist:
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
                        elif excludelist and not classnum in excludelist:
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
                        else:
                                excludeParticle += 1
                                f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))

                f.close()
                includeParticle.sort()
                if badshift > 0:
                        apDisplay.printMsg("%d paricles had a large shift"%(badshift))
                if badscore > 0:
                        apDisplay.printMsg("%d paricles had a low score"%(badscore))
                if badspread > 0:
                        apDisplay.printMsg("%d paricles had a low spread"%(badspread))
                apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

                #print includeParticle

                ### write kept particles to file
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
                kf = open(self.params['keepfile'], "w")
                for partnum in includeParticle:
                        kf.write(str(partnum)+"\n")
                kf.close()

                ### get number of particles
                numparticles = len(includeParticle)
                if excludelist:
                        self.params['description'] += ( " ... %d particle substack with %s classes excluded"
                                % (numparticles, self.params['dropclasslist']))
                elif includelist:
                        self.params['description'] += ( " ... %d particle substack with %s classes included"
                                % (numparticles, self.params['keepclasslist']))

                ### create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])

                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")

                apStack.averageStack(stack=newstack)
                if self.params['commit'] is True:
                        apStack.commitSubStack(self.params)
                        newstackid = apStack.getStackIdFromPath(newstack)
                        apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
示例#25
0
    def start(self):
        #old stack size
        stacksize = apStack.getNumberStackParticlesFromId(
            self.params['stackid'])

        # if exclude or include lists are not defined...
        if self.params['exclude'] is None and self.params['include'] is None:
            # if first and last are specified, create a file
            if self.params['first'] is not None and self.params[
                    'last'] is not None:
                stp = str(self.params['first'])
                enp = str(self.params['last'])
                fname = 'sub' + str(
                    self.params['stackid']) + '_' + stp + '-' + enp + '.lst'
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                apDisplay.printMsg("Creating keep list: " +
                                   self.params['keepfile'])
                f = open(self.params['keepfile'], 'w')
                for i in range(self.params['first'], self.params['last'] + 1):
                    f.write('%d\n' % (int(i) - 1))
                f.close()
                # generate the random list by giving number and create the file
            elif self.params['random'] is not None:
                #numOfRandomParticles = str(self.params['random'])
                #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst'
                fname = "random%d_%d.lst" % (self.params['stackid'],
                                             self.params['random'])
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                apDisplay.printMsg("Creating keep list: " +
                                   self.params['keepfile'])
                # create a file
                f = open(self.params['keepfile'], 'w')
                # generate a random sequence by giving size
                randomList = random.sample(xrange(self.params['last']),
                                           self.params['random'])
                randomList.sort()
                for partnum in randomList:
                    f.write('%d\n' % partnum)
                f.close()

            # if splitting, create files containing the split values
            elif self.params['split'] > 1:
                for i in range(self.params['split']):
                    fname = 'sub' + str(
                        self.params['stackid']) + '.' + str(i + 1) + '.lst'
                    self.params['keepfile'] = os.path.join(
                        self.params['rundir'], fname)
                    apDisplay.printMsg("Creating keep list: " +
                                       self.params['keepfile'])
                    f = open(self.params['keepfile'], 'w')
                    for p in range(stacksize):
                        if (p % self.params['split']) - i == 0:
                            f.write('%i\n' % p)
                    f.close()

            # if exclude-from option is specified, convert particles to exclude
            elif self.params['excludefile'] is True:
                oldkf = open(self.params['keepfile'])
                partlist = []
                for line in oldkf:
                    particle = line.strip()
                    try:
                        particle = int(particle)
                    except:
                        continue
                    partlist.append(particle)
                oldkf.close()
                # create new list excluding the particles
                apDisplay.printMsg("Converting keep file to exclude file")
                newkeepfile = "tmpnewkeepfile.txt"
                newkf = open(newkeepfile, 'w')
                for p in range(stacksize):
                    if p not in partlist:
                        newkf.write("%i\n" % p)
                newkf.close()
                self.params['keepfile'] = os.path.abspath(newkeepfile)

            # otherwise, just copy the file
            elif not os.path.isfile(os.path.basename(self.params['keepfile'])):
                shutil.copy(self.params['keepfile'],
                            os.path.basename(self.params['keepfile']))

        # if either exclude or include lists is defined
        elif self.params['exclude'] or self.params['include']:

            ### list of particles to be excluded
            excludelist = []
            if self.params['exclude'] is not None:
                excludestrlist = self.params['exclude'].split(",")
                for excld in excludestrlist:
                    excludelist.append(int(excld.strip()))
            apDisplay.printMsg("Exclude list: " + str(excludelist))

            ### list of particles to be included
            includelist = []
            if self.params['include'] is not None:
                includestrlist = self.params['include'].split(",")
                for incld in includestrlist:
                    includelist.append(int(incld.strip()))
            apDisplay.printMsg("Include list: " + str(includelist))

        #new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        newname = stackdata['name']

        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        #if include or exclude list is given...
        if self.params['include'] is not None or self.params[
                'exclude'] is not None:

            includeParticle = []
            excludeParticle = 0

            for partnum in range(stacksize):
                if includelist and partnum in includelist:
                    includeParticle.append(partnum)
                elif excludelist and not partnum in excludelist:
                    includeParticle.append(partnum)
                else:
                    excludeParticle += 1
            includeParticle.sort()

            ### write kept particles to file
            self.params['keepfile'] = os.path.join(
                self.params['rundir'], "keepfile-" + self.timestamp + ".list")
            apDisplay.printMsg("writing to keepfile " +
                               self.params['keepfile'])
            kf = open(self.params['keepfile'], "w")
            for partnum in includeParticle:
                kf.write(str(partnum) + "\n")
            kf.close()

            #get number of particles
            numparticles = len(includeParticle)
            if excludelist:
                self.params['description'] += (
                    " ... %d particle substack of stackid %d" %
                    (numparticles, self.params['stackid']))
            elif includelist:
                self.params['description'] += (
                    " ... %d particle substack of stackid %d" %
                    (numparticles, self.params['stackid']))

        ogdescr = self.params['description']
        for i in range(self.params['split']):
            ### always do this, if not splitting split=1
            sb = os.path.splitext(stackdata['name'])
            if self.params['first'] is not None and self.params[
                    'last'] is not None:
                newname = sb[0] + '.' + str(self.params['first']) + '-' + str(
                    self.params['last']) + sb[-1]
            elif self.params['random'] is not None:
                newname = "%s-random%d%s" % (sb[0], self.params['random'],
                                             sb[-1])
            elif self.params['split'] > 1:
                fname = 'sub' + str(
                    self.params['stackid']) + '.' + str(i + 1) + '.lst'
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                newname = sb[0] + '.' + str(i + 1) + 'of' + str(
                    self.params['split']) + sb[-1]
            newstack = os.path.join(self.params['rundir'], newname)
            apStack.checkForPreviousStack(newstack)

            #get number of particles
            f = open(self.params['keepfile'], "r")
            numparticles = len(f.readlines())
            f.close()
            self.params['description'] = ogdescr
            self.params['description'] += (
                (" ... %d particle substack of stackid %d" %
                 (numparticles, self.params['stackid'])))
            #if splitting, add to description
            if self.params['split'] > 1:
                self.params['description'] += (" (%i of %i)" %
                                               (i + 1, self.params['split']))

            #create the new sub stack
            if not self.params['correctbeamtilt']:
                apStack.makeNewStack(oldstack,
                                     newstack,
                                     self.params['keepfile'],
                                     bad=True)
            else:
                apBeamTilt.makeCorrectionStack(self.params['stackid'],
                                               oldstack, newstack)
            if not os.path.isfile(newstack):
                apDisplay.printError("No stack was created")
            apStack.commitSubStack(self.params, newname, sorted=False)
            apStack.averageStack(stack=newstack)
            newstackid = apStack.getStackIdFromPath(newstack)
            if self.params['meanplot'] is True:
                apDisplay.printMsg(
                    "creating Stack Mean Plot montage for stackid")
                apStackMeanPlot.makeStackMeanPlot(newstackid)