def projectVolume(volfile, eulerdocfile, projstackfile, numpart, pixrad, dataext=".spi"):
	"""
	project 3D volumes using given Euler angles
	"""
	starttime = time.time()

	volfile = spyder.fileFilter(volfile)
	eulerdocfile = spyder.fileFilter(eulerdocfile)
	projstackfile = spyder.fileFilter(projstackfile)
	if not os.path.isfile(volfile+dataext):
		apDisplay.printError("volume file not found: "+volfile+dataext)
	if not os.path.isfile(eulerdocfile+dataext):
		apDisplay.printError("euler doc file not found: "+eulerdocfile+dataext)

	apFile.removeFile(projstackfile)
	nproc = apParam.getNumProcessors()
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
	mySpider.toSpider("PJ 3Q", 
		volfile, #input vol file
		str(pixrad), #pixel radius
		"1-%d"%(numpart), #number of particles		
		eulerdocfile, #Euler DOC file
		projstackfile+"@*****", #output projections
	)
	mySpider.close()
	apDisplay.printColor("finished projections in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return
	def createSpiderFile(self):
		"""
		takes the stack file and creates a spider file ready for processing
		"""
		emancmd  = "proc2d "
		if not os.path.isfile(self.stack['file']):
			apDisplay.printError("stackfile does not exist: "+self.stack['file'])
		emancmd += self.stack['file']+" "

		spiderstack = os.path.join(self.params['rundir'], "start.spi")
		apFile.removeFile(spiderstack, warn=True)
		emancmd += spiderstack+" "

		emancmd += "apix="+str(self.stack['apix'])+" "
		if self.params['lowpass'] > 0:
			emancmd += "lp="+str(self.params['lowpass'])+" "
		if self.params['highpass'] > 0:
			emancmd += "hp="+str(self.params['highpass'])+" "
		if self.params['bin'] > 1:
			clipboxsize = self.boxsize*self.params['bin']
			emancmd += "shrink="+str(self.params['bin'])+" "
			emancmd += "clip="+str(clipboxsize)+","+str(clipboxsize)+" "
		emancmd += "last="+str(self.params['numpart']-1)+" "
		emancmd += "spider edgenorm"
		starttime = time.time()
		apDisplay.printColor("Running spider stack conversion this can take a while", "cyan")
		apEMAN.executeEmanCmd(emancmd, verbose=True)
		apDisplay.printColor("finished eman in "+apDisplay.timeString(time.time()-starttime), "cyan")
		return spiderstack
	def convertSQLtoEulerTree(self, results):
		t0 = time.time()
		eulertree = []
		for row in results:
			if len(row) < 11:
				apDisplay.printError("delete MySQL cache file and run again")
			try:
				eulerpair = { 'part1': {}, 'part2': {} }
				eulerpair['part1']['partid'] = int(row[0])
				eulerpair['part1']['dbid']   = int(row[1])
				eulerpair['part1']['euler1'] = float(row[2])
				eulerpair['part1']['euler2'] = float(row[3])
				eulerpair['part1']['euler3'] = float(row[4])
				eulerpair['part1']['mirror'] = self.nullOrValue(row[5])
				eulerpair['part1']['reject'] = not self.nullOrValue(row[6])
				eulerpair['part1']['tilt']   = apStack.getStackParticleTilt(eulerpair['part1']['dbid'])

				eulerpair['part2']['partid'] = int(row[7])
				eulerpair['part2']['dbid']   = int(row[8])
				eulerpair['part2']['euler1'] = float(row[9])
				eulerpair['part2']['euler2'] = float(row[10])
				eulerpair['part2']['euler3'] = float(row[11])
				eulerpair['part2']['mirror'] = self.nullOrValue(row[12])
				eulerpair['part2']['reject'] = not self.nullOrValue(row[13])
				eulerpair['part2']['tilt']   = apStack.getStackParticleTilt(eulerpair['part2']['dbid'])
				eulertree.append(eulerpair)
			except:
				print row
				apDisplay.printError("bad row entry")

		apDisplay.printMsg("Converted "+str(len(eulertree))+" eulers in "+apDisplay.timeString(time.time()-t0))
		return eulertree
def rctParticleShift(volfile, origstackfile, eulerdocfile, iternum, numpart, pixrad, timestamp, dataext=".spi"):
	"""
	inputs:
		stack, in spider format
		eulerdocfile
	outputs:
		volume
	"""
	starttime = time.time()
	### create corresponding projections
	projstackfile = "projstack%s-%03d.spi"%(timestamp, iternum)
	projectVolume(volfile, eulerdocfile, projstackfile, numpart, pixrad, dataext)

	### clean up files
	ccdocfile = "ccdocfile%s-%03d.spi"%(timestamp, iternum)
	apFile.removeFile(ccdocfile)
	alignstackfile = "alignstack%s-%03d.spi"%(timestamp, iternum)
	apFile.removeFile(alignstackfile)

	### align particles to projection
	apDisplay.printMsg("Shifting particles")
	crossCorrelateAndShift(origstackfile, projstackfile, alignstackfile, ccdocfile, numpart)

	if not os.path.isfile(alignstackfile):
		apDisplay.printError("aligned stack file not found: "+alignstackfile)
	apDisplay.printColor("finished correlations in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return alignstackfile
        def fillSimilarityMatrix(self, alignedstack):
                ### Get initial correlation values
                ### this is really, really slow

                similarfile = "similarities.dat"
                simstack = similarityStack()
                simstack.similarfile = similarfile
                simstack.start(alignedstack)

                if not os.path.isfile(similarfile):
                        apDisplay.printError("Failed to create similarity file")

                simf = open(similarfile, 'r')
                simlist = []
                count = 0
                for line in simf:
                        count += 1
                        sline = line.strip()
                        slist = sline.split()
                        ccval = float(slist[2])
                        simlist.append(ccval)
                simf.close()
                apDisplay.printMsg("There are %d lines in the sim file: %s"%(count, similarfile))

                numpart = apFile.numImagesInStack(alignedstack)
                if count != numpart*(numpart-1):
                        ### we have a valid file already
                        apDisplay.printError("There are only %d lines need to have %d"%(count, numpart*(numpart-1)))

                return similarfile, simlist
	def start(self):
		aligndata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
		boxsize = aligndata['boxsize']
		apix = aligndata['pixelsize']
		maskpixrad = self.params['maskrad']/apix
		if maskpixrad*2 > boxsize-2:
			apDisplay.printError("Mask radius is too big for boxsize: %d > %d"%(maskpixrad*2,boxsize-2))
		apDisplay.printMsg("Mask radius and boxsize: %.1f < %d"%(maskpixrad*2,boxsize-2))
		self.instack = os.path.join(aligndata['path']['path'], aligndata['imagicfile'])
		outdata = "stack.data"

		apXmipp.convertStackToXmippData(self.instack, outdata, maskpixrad,
			boxsize, numpart=self.params['numpart']-1)

		self.runKerdenSOM(outdata)
		if apFile.stackSize(self.instack) > 3.0*(1024**3):
			# Big stacks use eman
			self.createMontageByEMAN()
			binned = None
		else:
			binned = self.createMontageInMemory(apix)
		self.insertKerDenSOM(binned=binned)

		apFile.removeFile(outdata)
		apFile.removeFilePattern("*.cod")
	def getImagesInDirectory(self, directory):
		searchstring = os.path.join(directory, "*.mrc")
		mrclist = glob.glob(searchstring)
		if len(mrclist) == 0:
			apDisplay.printError("Did not find any images to upload")
		mrclist.sort()
		return mrclist
	def runEoTest(self, corrSelectOdd, corrSelectEven, cnum, apshstack, apsheuler, iternum):


		apshOddVolfile = os.path.join(self.params['rundir'], str(cnum), "apshVolume_Odd-%03d.spi"%(iternum))
		apshEvenVolfile = os.path.join(self.params['rundir'], str(cnum), "apshVolume_Even-%03d.spi"%(iternum))

		self.APSHbackProject(apshstack, apsheuler, apshOddVolfile, cnum, corrSelectOdd)
		self.APSHbackProject(apshstack, apsheuler, apshEvenVolfile, cnum, corrSelectEven)

		fscout = os.path.join(self.params['rundir'], str(cnum), "FSCout-%03d.spi"%(iternum))
		backproject.calcFSC(apshOddVolfile, apshEvenVolfile, fscout)

		### Calculate FSC - taken from Neil's RCT script
		apix = apStack.getStackPixelSizeFromStackId(self.params['tiltstackid'])*self.params['tiltbin']
		emancmd = "proc3d %s %s"%(apshEvenVolfile, apshEvenVolfile+".mrc")
		apEMAN.executeEmanCmd(emancmd, verbose=True, showcmd=True)
		emancmd = "proc3d %s %s"%(apshOddVolfile, apshOddVolfile+".mrc")
		apEMAN.executeEmanCmd(emancmd, verbose=True, showcmd=True)
		fscfile = os.path.join(self.params['rundir'], "fscdata%s.fsc"%(self.timestamp))
		emancmd = "proc3d %s %s fsc=%s"%(apshEvenVolfile+".mrc", apshOddVolfile+".mrc", fscfile)
		apEMAN.executeEmanCmd(emancmd, verbose=True, showcmd=True)

		if not os.path.isfile(fscfile):
			apDisplay.printError("Even-Odd fsc calculation failed")
		boxsize = self.getBoxSize()
		self.fscresolution = apRecon.getResolutionFromFSCFile(fscfile, boxsize, apix, msg=True)
		apDisplay.printColor( ("Final FSC resolution: %.5f" % (self.fscresolution)), "cyan")

		return fscout
	def boxFileToPeakTree(self, imgdata):
		boxfile = imgdata['filename']+".box"
		if self.params['coordtype'] == "xmipp":
			boxfile = imgdata['filename']+".pos"
		if not os.path.isfile(boxfile):
			apDisplay.printError("Could not find box file "+boxfile)
		f = open(boxfile, "r")
		peaktree = []
		for line in f:
			sline = line.strip()
			cols = sline.split()
			if self.params['coordtype'] == "xmipp":
				if len(cols)>2 or cols[0][0]=="#":
					continue
				xcoord = float(cols[0]) * self.params['bin']
				ycoord = float(cols[1]) * self.params['bin']
			else:
				xcoord = (float(cols[0]) + float(cols[2])/2.)* self.params['bin']
				ycoord = (float(cols[1]) + float(cols[3])/2.)* self.params['bin']
			peakdict = {
				'diameter': self.params['diam'],
				'xcoord': xcoord,
				'ycoord': ycoord,
				'peakarea': 10,
			}
			peaktree.append(peakdict)
		return peaktree
	def convertStackToSpider(self, emanstackfile, classnum):
		"""
		takes the stack file and creates a spider file ready for processing
		"""
		if not os.path.isfile(emanstackfile):
			apDisplay.printError("stackfile does not exist: "+emanstackfile)

		### first high pass filter particles
		apDisplay.printMsg("pre-filtering particles")
		apix = apStack.getStackPixelSizeFromStackId(self.params['tiltstackid'])
		emancmd = ("proc2d "+emanstackfile+" "+emanstackfile
			+" apix="+str(apix)+" hp="+str(self.params['highpasspart'])
			+" inplace")
		apEMAN.executeEmanCmd(emancmd, verbose=True)

		### convert imagic stack to spider
		emancmd  = "proc2d "
		emancmd += emanstackfile+" "
		spiderstack = os.path.join(self.params['rundir'], str(classnum), "otrstack"+self.timestamp+".spi")
		apFile.removeFile(spiderstack, warn=True)
		emancmd += spiderstack+" "

		emancmd += "spiderswap edgenorm"
		starttime = time.time()
		apDisplay.printColor("Running spider stack conversion this can take a while", "cyan")
		apEMAN.executeEmanCmd(emancmd, verbose=True)
		apDisplay.printColor("finished eman in "+apDisplay.timeString(time.time()-starttime), "cyan")
		return spiderstack
	def getGoodParticles(self, classpartdatas, norefclassnum):
		includeParticle = []
		tiltParticlesData = []
		nopairParticle = 0
		excludeParticle = 0
		apDisplay.printMsg("sorting particles")
		for classpart in classpartdatas:
			#write to text file
			classnum = classpart['classNumber']-1
			if classnum == norefclassnum:
				notstackpartnum = classpart['noref_particle']['particle']['particleNumber']
				tiltstackpartdata = apTiltPair.getStackParticleTiltPair(self.params['notstackid'],
					notstackpartnum, self.params['tiltstackid'])
				if tiltstackpartdata is None:
					nopairParticle += 1
				else:
					emantiltstackpartnum = tiltstackpartdata['particleNumber']-1
					includeParticle.append(emantiltstackpartnum)
					tiltParticlesData.append(tiltstackpartdata)
			else:
				excludeParticle += 1
		includeParticle.sort()
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding \n\t"
			+str(excludeParticle)+" particles with "+str(nopairParticle)+" unpaired particles")
		if len(includeParticle) < 1:
			apDisplay.printError("No particles were kept")
		return includeParticle, tiltParticlesData
	def findLastCompletedIteration(self):
		#recondir = os.path.join(self.params['rundir'], "recon")
		recondir = self.params['rundir']
		iternum = 0
		stop = False
		while stop is False:
			## check if iteration is complete
			iternum += 1

			class1Volume = "recon_it%03d_half1_model.star"%(iternum)
			class2Volume = "recon_it%03d_half2_model.star"%(iternum)
			class1Volumepath = os.path.join(recondir,class1Volume)
			class2Volumepath = os.path.join(recondir,class2Volume)
			if not os.path.isfile(class1Volumepath) or not os.path.isfile(class2Volumepath):
				apDisplay.printWarning("Model.star file %s or %s is missing"%(class1Volumepath, class2Volumepath))
				stop = True
				break

		### set last working iteration
		numiter = iternum-1
		if numiter < 1:
			apDisplay.printError("No iterations were found")
		apDisplay.printColor("Found %d complete iterations"%(numiter), "green")

		return numiter	
	def parseParticleDataIterationFile(self,paramfile,test=False):
		'''
		parse data.star file from Relion 1.2:

		'''
		if not os.path.isfile(paramfile):
			apDisplay.printError("Relion data.star file does not exist: %s" % (paramfile))
		apDisplay.printMsg("Parsing parameter file: %s" % (paramfile))
			
		# Use the star file class to read the parameter file
		f = starFile.StarFile( paramfile )
		f.read()
		dataBlock = f.getDataBlock("data_images")
		loopDict  = dataBlock.getLoopDict() # there is only one loop in the data_images block
		partnum   = 1 # partnum starts with 1, not 0
		partdict  = {}
		
		for valueSet in loopDict:
			paramdict = {
					'partnum'      : partnum,   
					'angleRot'     : float(valueSet["_rlnAngleRot"]),
					'angleTilt'    : float(valueSet["_rlnAngleTilt"]),
					'anglePsi'     : float(valueSet["_rlnAnglePsi"]),
					'originX'      : float(valueSet["_rlnOriginX"]),
					'originY'      : float(valueSet["_rlnOriginY"]),
			}
			partdict[paramdict['partnum']] = paramdict
			partnum = partnum + 1
				
		if len(partdict) < 2:
			apDisplay.printError("No particles found in particle data file %s" % (paramfile))
	
		apDisplay.printMsg("Processed %d particles" % (len(partdict)))
		return partdict	
 def processImage(self, imgdata):
         """
         this is the main component of the script
         where all the processing is done
         """
         apDisplay.printError("you did not create a 'processImage' function in your script")
         raise NotImplementedError()
def getTiltTransformFromParticle(partdata):
        t0 = time.time()

        ### figure out if its particle 1 or 2
        tiltpartq1 = appiondata.ApTiltParticlePairData()
        tiltpartq1['particle1'] = partdata
        tiltpartdatas1 = tiltpartq1.query(results=1, readimages=False)

        tiltpartq2 = appiondata.ApTiltParticlePairData()
        tiltpartq2['particle2'] = partdata
        tiltpartdatas2 = tiltpartq2.query(results=1, readimages=False)

        if tiltpartdatas1 and not tiltpartdatas2:
                imgnum = 1
                transformdata = tiltpartdatas1[0]['transform']
                otherpartdata = tiltpartdatas1[0]['particle2']
        elif not tiltpartdatas1 and tiltpartdatas2:
                imgnum = 2
                transformdata = tiltpartdatas2[0]['transform']
                otherpartdata = tiltpartdatas2[0]['particle1']
        else:
                print partdata
                print tiltpartdatas1
                print tiltpartdatas2
                apDisplay.printError("failed to get tilt pair data")

        if time.time()-t0 > 1.0:
                apDisplay.printMsg("long getTiltTransFromPart1 "+apDisplay.timeString(time.time()-t0))
        return imgnum, transformdata, otherpartdata
def getParticleTiltRotationAnglesOTR(stackpartdata):
        partdata = stackpartdata['particle']
        imgnum, transformdata, otherpartdata = getTiltTransformFromParticle(partdata)

        t0 = time.time()
        tiltangle1, tiltangle2 = apDatabase.getTiltAnglesDegFromTransform(transformdata)
        if time.time()-t0 > 1.0:
                apDisplay.printMsg("long angle query "+apDisplay.timeString(time.time()-t0))

        if imgnum == 1:
                ### negative case, tilt picker theta < 0
                tiltrot = transformdata['image1_rotation']
                theta = transformdata['tilt_angle']
                notrot   = transformdata['image2_rotation']
                tiltangle = tiltangle1 - tiltangle2
        elif imgnum == 2:
                ### positive case, tilt picker theta > 0
                tiltrot = transformdata['image2_rotation']
                theta = transformdata['tilt_angle']
                notrot   = transformdata['image1_rotation']
                tiltangle = tiltangle2 - tiltangle1
        else:
                #no particle pair info was found or some other problem
                print partdata
                apDisplay.printError("failed to get tilt pair data or some other problem")

        if transformdata.timestamp < datetime.datetime(2009, 2, 19, 0, 0, 0):
                ### bugfix for switched tilt axis angles, before Feb 19, 2009
                #apDisplay.printWarning("Switching angles")
                temprot = notrot
                notrot = tiltrot
                tiltrot = temprot

        #print "tr=%.2f, th=%.2f, nr=%.2f, tilt=%.2f"%(tiltrot, theta, notrot, tiltangle)
        return tiltrot, theta, notrot, tiltangle
def normalizeVol(volfile, dataext=".spi"):
	"""
	inputs:
		volume
	outputs:
		volume
	"""
	### setup
	starttime = time.time()
	volfile = spyder.fileFilter(volfile)
	if not os.path.isfile(volfile+dataext):
		apDisplay.printError("volume file not found: "+volfile+dataext)
		
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True)
	### read out the statistics of the volume
	mySpider.toSpider("FS x11,x12", 
		volfile, #filename for volume
	)
	mySpider.toSpider("IF(x12.LT.0.0)x12=-x12")
	### set all values to positive
	mySpider.toSpider("AR",
		volfile, #filename for volume
		"_1",
		"(P1+x12)",
	)
	### save file
	mySpider.toSpider("CP",
		"_1",
		volfile, #filename for volume
	)
	
	mySpider.close()
	apDisplay.printColor("finished normalizing the volume to set all values to be positive"+apDisplay.timeString(time.time()-starttime), "cyan")
	return	
 def readBatchUploadInfo(self):
         # in this example, the batch script file should be separated by tab
         # see example in function readUploadInfo for format
         batchfilename = self.params['batchscript']
         if not os.path.exists(batchfilename):
                 apDisplay.printError('Batch file %s not exist' % batchfilename)
                 return []
         batchfile = open(batchfilename,'r')
         lines = batchfile.readlines()
         batchfile.close()
         batchinfo = []
         count = 0
         for line in lines:
                 count += 1
                 #remove white space at ends
                 sline = line.strip()
                 if ' ' in sline:
                         apDisplay.printWarning("There is a space in the batch file on line %d"%(count))
                 #remove white space at ends
                 cols = sline.split('\t')
                 if len(cols) > 1:
                         batchinfo.append(cols)
                 else:
                         apDisplay.printWarning("Skipping line %d"%(count))
         return batchinfo
	def setupJob(self, scratchdir, imgdata, command):
		jobname=imgdata['filename']+'.csh'
		jobpath=os.path.join(scratchdir,jobname)
		f=open(jobpath,'w')
		
		f.write('#!/bin/csh\n')
		if self.params['queue_style']=='MOAB' or self.params['queue_style']=='PBS':
			f.write('#%s -l nodes=1:ppn=%d\n' % (self.params['queue_style'], self.params['queue_ppn']))
			f.write('#%s -l walltime=%d:00:00\n' % (self.params['queue_style'], self.params['walltime']))
			f.write('#%s -l pmem=%dgb\n\n' % (self.params['queue_style'], self.params['queue_memory']))
		elif self.params['queue_style']=='SLURM':
			f.write('#SBATCH -N 1\n')
			f.write('#SBATCH -n %d\n' % (self.params['queue_ppn']))
			f.write('#SBATCH -t %d:00:00\n' % (self.params['walltime']))
			f.write('#SBATCH --mem-per-cpu=%dG\n\n' % (self.params['queue_memory'])) # in gigabytes
		else:
			apDisplay.printError('Queue style %s not supported' % (self.params['queue_style']))
			
		f.write('cd %s\n\n' % scratchdir )
		s=''
		for arg in command:
			s+=arg
			s+=' '
			if len(s) > 75:
				f.write('%s \\\n' % s )
				s=' '
		f.write('%s \n' % s )
		donefile=imgdata['filename']+'.done'
		f.write('touch %s\n' % (donefile) )
		f.write('%s \n' % s )

		f.close()
		print jobpath
		return(jobname,donefile)
def backprojectCG(stackfile, eulerdocfile, volfile, numpart, pixrad, dataext=".spi"):
	"""
	inputs:
		stack, in spider format
		eulerdocfile
	outputs:
		volume
	"""
	### setup
	starttime = time.time()
	stackfile = spyder.fileFilter(stackfile)
	eulerdocfile = spyder.fileFilter(eulerdocfile)
	volfile = spyder.fileFilter(volfile)
	if not os.path.isfile(stackfile+dataext):
		apDisplay.printError("stack file not found: "+stackfile+dataext)
	if not os.path.isfile(eulerdocfile+dataext):
		apDisplay.printError("euler doc file not found: "+eulerdocfile+dataext)
	apFile.removeFile(volfile+dataext)
	nproc = apParam.getNumProcessors()
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
	mySpider.toSpider("BP CG", 
		stackfile+"@*****", #stack file
		"1-%d"%(numpart), #number of particles
		str(pixrad), #particle radius
		eulerdocfile, #angle doc file
		"N", #has symmetry?, does not work
		volfile, #filename for volume
 		"%.1e,%.1f" % (1.0e-5, 0.0), #error, chi^2 limits
 		"%d,%d" % (25,1), #iterations, 1st derivative mode
 		"2000", #lambda - higher=less sensitive to noise
	)
	mySpider.close()
	apDisplay.printColor("finished backprojection in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return
def backproject3F(stackfile, eulerdocfile, volfile, numpart, dataext=".spi"):
	"""
	inputs:
		stack, in spider format
		eulerdocfile
	outputs:
		volume
	"""
	### setup
	starttime = time.time()
	stackfile = spyder.fileFilter(stackfile)
	eulerdocfile = spyder.fileFilter(eulerdocfile)
	volfile = spyder.fileFilter(volfile)
	if not os.path.isfile(stackfile+dataext):
		apDisplay.printError("stack file not found: "+stackfile+dataext)
	if not os.path.isfile(eulerdocfile+dataext):
		apDisplay.printError("euler doc file not found: "+eulerdocfile+dataext)
	apFile.removeFile(volfile+dataext)
	nproc = apParam.getNumProcessors()
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
	mySpider.toSpider("BP 3F", 
		stackfile+"@*****", #stack file
		"1-%d"%(numpart), #number of particles
		eulerdocfile, #angle doc file
		"*", #input symmetry file, '*' for skip
		volfile, #filename for volume
	)
	mySpider.close()
	apDisplay.printColor("finished backprojection in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return
	def createParamsFromGoodAligner(self,alignerid):
		q = appiondata.ApTomoAlignerParamsData()
		alignerdata = q.direct_query(alignerid)
		self.params['goodcycle'] = alignerdata['refine_cycle']['cycle']
		alignrundata = alignerdata['alignrun']
		if self.params['runname'] is None:
			self.params['runname'] = alignrundata['name']
		else:
			if self.params['runname'] != alignrundata['name']:
				apDisplay.printError("Alignment run name can not be changed between cycles")
		if self.params['description'] is None:
				self.params['description'] = alignrundata['description']
		q = appiondata.ApTiltsInAlignRunData(alignrun=alignrundata)
		results = q.query()
		if results:
			if len(results) == 1:
				tiltdata = results[0]['tiltseries']
				self.params['tiltseries'] = tiltdata
				self.params['tiltseriesnumber'] = tiltdata['number']
				self.params['othertiltseries'] = None
			else:
				runpathdata = alignrundata['path']
				for tiltdata in results:
					pathdata = tiltdata['path']
					if pathdata.dbid == runpathdata.dbid:
						self.params['tiltseries'] = tiltdata
						self.params['tiltseriesnumber'] = tiltdata['number']
					else:
						self.params['othertiltseries'] = tiltdata
						self.params['othertilt'] = tiltdata['number']
def readRunParameters(paramfile):
        if not os.path.isfile(paramfile):
                apDisplay.printError("Could not find run parameters file: "+paramfile)
        pf = open(paramfile, "r")
        runparams = cPickle.load(pf)
        pf.close()
        return runparams
def getAppionDirectory():
        """
        Used by appionLoop and ScriptProgramRun logging in all appionScript
        """
        appiondir = None
        this_file = inspect.currentframe().f_code.co_filename
        libdir = os.path.dirname(this_file)
        libdir = os.path.abspath(libdir)
        trypath = os.path.dirname(libdir)
        if os.path.isdir(trypath):
                appiondir = trypath
                return appiondir

        trypath = os.environ.get('APPIONDIR')
        if trypath and os.path.isdir(trypath):
                appiondir = trypath
                return appiondir

        user = os.getlogin() #os.environ.get('USER')
        trypath = "/home/"+user+"/pyappion"
        if os.path.isdir(trypath):
                appiondir = trypath
                return appiondir

        apDisplay.printError("environmental variable, APPIONDIR, is not defined.\n"+
                "Did you source useappion.sh?")
	def start(self):
		### universal particle counter
		self.partnum = 1

		### final stack file
		self.combinefile = os.path.join( self.params['rundir'], self.params['stackfilename'] )
		if os.path.isfile(self.combinefile):
			apDisplay.printError("A stack with name "+self.params['stackfilename']+" and path "
				+self.params['rundir']+" already exists.")

		### loop through stacks
		for stackstr in self.params['stackids']:
			stackid = int(stackstr)

			### get stack data
			stackdata = apStack.getOnlyStackData(stackid)

			### append particle to stack file
			self.appendToStack(stackdata)

			if self.params['commit'] is True:
				### insert stack data
				apDisplay.printColor("inserting new stack particles from stackid="+str(stackid), "cyan")
				self.commitStack(stackid)
			else:
				apDisplay.printWarning("not committing data to database")

		apStack.averageStack(stack=self.combinefile)
    def createSpiderFile(self):
        """
                takes the stack file and creates a spider file ready for processing
                """
        emancmd = "proc2d "
        if not os.path.isfile(self.stack["file"]):
            apDisplay.printError("stackfile does not exist: " + self.stack["file"])
        emancmd += self.stack["file"] + " "

        spiderstack = os.path.join(self.params["rundir"], "start.spi")
        apFile.removeFile(spiderstack, warn=True)
        emancmd += spiderstack + " "

        emancmd += "apix=" + str(self.stack["apix"]) + " "
        if self.params["lowpass"] > 0:
            emancmd += "lp=" + str(self.params["lowpass"]) + " "
        emancmd += "last=" + str(self.params["numpart"] - 1) + " "
        emancmd += "shrink=" + str(self.params["bin"]) + " "
        clipsize = int(math.floor(self.stack["boxsize"] / self.params["bin"] / 2.0) * self.params["bin"] * 2)
        emancmd += "clip=" + str(clipsize) + "," + str(clipsize) + " "
        emancmd += "spiderswap edgenorm"
        starttime = time.time()
        apDisplay.printColor("Running spider stack conversion this can take a while", "cyan")
        apEMAN.executeEmanCmd(emancmd, verbose=True)
        apDisplay.printColor("finished eman in " + apDisplay.timeString(time.time() - starttime), "cyan")
        return spiderstack
	def start(self):
		self.setFileName()

		scale =  float(self.params['oldapix'])/self.params['newapix']

		mrcname = os.path.join(self.params['rundir'], self.params['name']+".mrc")
		origmodel = self.params['file']
		if os.path.isfile(mrcname):
			apDisplay.printError("File exists")

		if (abs(self.params['oldapix'] - self.params['newapix']) > 1.0e-2 or
			abs(self.params['oldbox'] - self.params['newbox']) > 1.0e-1):
			### rescale old model to a new size
			apDisplay.printWarning("rescaling original model to a new size")
			scale = float(self.params['oldapix'])/self.params['newapix']
			apDisplay.printMsg("rescaling model by scale factor of %.4f"%(scale))
			apVolume.rescaleVolume(origmodel, mrcname,
				self.params['oldapix'], self.params['newapix'], self.params['newbox'])
		else:
			### simple upload, just copy file to models folder
			apDisplay.printMsg("copying original model to a new location: "+mrcname)
			shutil.copyfile(origmodel, mrcname)

		if self.params['viper2eman'] is True:
			apVolume.viper2eman(mrcname, mrcname, apix=self.params['newapix'])

		### render chimera images of model
		contour = self.params['contour']
		if self.params['mass'] is not None:
			apChimera.setVolumeMass(mrcname, self.params['newapix'], self.params['mass'])
			contour = 1.0
		apChimera.renderSnapshots(mrcname, contour=contour,
			zoom=self.params['zoom'], sym=self.params['symdata']['eman_name'])

		self.insertModel(mrcname)
	def readRunParameters(self):
		paramfile = "simple-"+self.params['timestamp']+"-params.pickle"
		if not os.path.isfile(paramfile):
			apDisplay.printError("Could not find run parameters file: "+paramfile)
		f = open(paramfile, "r")
		runparams = cPickle.load(f)
		return runparams
        def _initializeDoneDict(self):
                """
                reads or creates a done dictionary
                """
                self.donedictfile = os.path.join(self.params['rundir'] , self.functionname+".donedict")
                if os.path.isfile(self.donedictfile) and self.params['continue'] == True:
                        ### unpickle previously done dictionary
                        apDisplay.printMsg("Reading old done dictionary: "+os.path.basename(self.donedictfile))
                        f = open(self.donedictfile,'r')
                        self.donedict = cPickle.load(f)
                        f.close()
                        if not 'commit' in self.donedict or self.donedict['commit'] == self.params['commit']:
                                ### all is well
                                apDisplay.printMsg("Found "+str(len(self.donedict))+" done dictionary entries")
                                return
                        elif self.donedict['commit'] is True and self.params['commit'] is not True:
                                ### die
                                apDisplay.printError("Commit flag was enabled and is now disabled, create a new runname")
                        else:
                                ### set up fresh dictionary
                                apDisplay.printWarning("'--commit' flag was changed, creating new done dictionary")

                ### set up fresh dictionary
                self.donedict = {}
                self.donedict['commit'] = self.params['commit']
                apDisplay.printMsg("Creating new done dictionary: "+os.path.basename(self.donedictfile))

                ### write donedict to file
                f = open(self.donedictfile, 'w', 0666)
                cPickle.dump(self.donedict, f)
                f.close()
                return
	def getNumberOfIterations(self):
		iternum = 0
		stop = False
		while stop is False:
			## check if iteration is complete
			iternum += 1

			paramfile = "params.iter%03d.par"%(iternum)
			if not os.path.isfile(paramfile):
				apDisplay.printWarning("Parameter file %s is missing"%(paramfile))
				stop = True
				break

			imagicvolume = "threed.%03da.hed"%(iternum)
			if not os.path.isfile(imagicvolume):
				apDisplay.printWarning("Volume file %s is missing"%(imagicvolume))
				stop = True
				break

			combineshell = "iter%03d/frealign.iter%03d.combine.sh"%(iternum, iternum)
			if not os.path.isfile(combineshell):
				apDisplay.printWarning("Shell file %s is missing"%(combineshell))
				stop = True
				break

		### set last working iteration
		numiter = iternum-1
		if numiter < 1:
			apDisplay.printError("No iterations were found")
		apDisplay.printColor("Found %d complete iterations"%(numiter), "green")

		return numiter
Beispiel #31
0
	return params

if __name__=="__main__":
	params = parseOptions()
	apProject.setDBfromProjectId(params['projectid'])

	stackdata = apStack.getOnlyStackData(params['stackid'])
	stackpath = stackdata['path']['path']
	# generate stack if it doesn't exist.
	if not os.path.isdir(stackpath):
		os.makedirs(stackpath)
	fname = os.path.join(stackpath, stackdata['name'])

	# check if stack file already exists
	if os.path.isfile(fname):
		apDisplay.printError("file: '%s' already exists"%fname)
 
	vstackdata = apStack.getVirtualStackParticlesFromId(params['stackid'])
	plist = [int(p['particleNumber'])-1 for p in vstackdata['particles']]

	a = proc2dLib.RunProc2d()
	a.setValue('infile',vstackdata['filename'])
	a.setValue('outfile',fname)
	a.setValue('list',plist)
	a.setValue('apix',apStack.getStackPixelSizeFromStackId(params['stackid']))

	apDisplay.printMsg("generating stack: '%s' with %i particles"%(fname,len(plist)))
	a.run()

	outavg = os.path.join(stackpath, "average.mrc")
	if not os.path.isfile(outavg):
Beispiel #32
0
    def insertRefineParticleData(self,
                                 cls,
                                 iteration,
                                 eulers,
                                 badprtls,
                                 refineq,
                                 numcls,
                                 euler_convention='zxz'):
        # get the corresponding proj number & eulers from filename
        replace = re.compile('\D')
        projnum = int(replace.sub('', cls))

        clsfilename = os.path.join(self.params['tmpdir'], cls)
        sys.stderr.write(".")
        #f=open(clsfilename)
        #apDisplay.printMsg("Class "+str(projnum+1)+" of "+str(numcls)+": inserting "
        #	+str(len(f.readlines())-2)+" particles")
        #f.close()

        # for each cls file get alignments for particles
        f = open(clsfilename)
        coranfail = False
        for line in f:
            # skip line if not a particle
            if re.search("start", line):
                prtlaliq = appiondata.ApRefineParticleData()

                # gather alignment data from line
                ali = line.split()
                prtlnum = int(ali[0])

                # check if bad particle
                if prtlnum in badprtls:
                    prtlaliq['refine_keep'] = False
                else:
                    prtlaliq['refine_keep'] = True

                prtlnum += 1  # offset for EMAN
                qualf = float(ali[2].strip(','))
                other = ali[3].split(',')
                rot = float(other[0]) * 180. / math.pi
                shx = float(other[1])
                shy = float(other[2])
                if (other[3] == '1'):
                    prtlaliq['mirror'] = True

                # SPIDER coran kept particle
                corank = None
                if self.params['package'] == 'EMAN/SpiCoran':
                    if len(other) > 4:
                        corank = bool(int(other[4]))
                    else:
                        if coranfail is False:
                            apDisplay.printWarning(
                                "Coran failed on this iteration")
                            coranfail = True

                # message passing kept particle
                if self.params['package'] == 'EMAN/MsgP' and len(ali) > 4:
                    msgk = bool(int(ali[4]))
                else:
                    msgk = None
                # find particle in stack database
                defid = self.stackmapping[prtlnum]
                stackp = appiondata.ApStackParticleData.direct_query(defid)

                if not stackp:
                    apDisplay.printError("particle " + str(prtlnum) +
                                         " not in stack id=" +
                                         str(self.params['stack'].dbid))

                # insert classification info
                prtlaliq['refineIter'] = refineq
                prtlaliq['particle'] = stackp
                prtlaliq['shiftx'] = shx
                prtlaliq['shifty'] = shy
                prtlaliq['euler1'] = eulers[projnum][0]
                prtlaliq['euler2'] = eulers[projnum][1]
                prtlaliq['euler3'] = rot
                prtlaliq['quality_factor'] = qualf
                if self.params['package'] == 'EMAN/MsgP':
                    prtlaliq['postRefine_keep'] = msgk
                else:
                    prtlaliq['postRefine_keep'] = corank
                prtlaliq['euler_convention'] = euler_convention

                #apDisplay.printMsg("inserting Particle Classification Data into database")
                if self.params['commit'] is True:
                    prtlaliq.insert()

        f.close()
        return
	def checkConflicts(self):
		if self.params['stackid'] is None:
			apDisplay.printError("stack id was not defined")
		if self.params['modelid'] is None:
			apDisplay.printError("model id was not defined")
		if self.params['runname'] is None:
			apDisplay.printError("missing a reconstruction name")
		if self.params['last'] is None:
			self.params['last'] = apStack.getNumberStackParticlesFromId(self.params['stackid'])
		self.boxsize = apStack.getStackBoxsize(self.params['stackid'], msg=False)
		self.apix = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		if self.params['reconstackid'] is not None:	
			reconboxsize = apStack.getStackBoxsize(self.params['reconstackid'], msg=False)
			reconapix = apStack.getStackPixelSizeFromStackId(self.params['reconstackid'])
			refinenumpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
			reconnumpart = apStack.getNumberStackParticlesFromId(self.params['reconstackid'])
			if reconboxsize != self.boxsize:
				apDisplay.printError("Boxsize do not match for stacks")
			if reconapix != self.apix:
				apDisplay.printError("Pixelsize do not match for stacks")
			if refinenumpart != reconnumpart:
				apDisplay.printError("Number of particles do not match for stacks")
		maxmask = math.floor(self.apix*(self.boxsize-10)/2.0)
		if self.params['mask'] is None:
			apDisplay.printWarning("mask was not defined, setting to boxsize: %d"%(maxmask))
			self.params['mask'] = maxmask
		if self.params['mask'] > maxmask:
			apDisplay.printWarning("mask was too big, setting to boxsize: %d"%(maxmask))
			self.params['mask'] = maxmask
		if self.params['noctf'] is True:
			apDisplay.printWarning("Using no CTF method")
			self.params['wgh'] = -1.0
		if self.params['nodes'] > 1 and self.params['cluster'] is False:
			apDisplay.printError("cluster mode must be enabled to run on more than 1 node")
		if self.params['ppn'] is None:
			if self.params['cluster'] is True:
				apDisplay.printError("you must define ppn for cluster mode")
			self.params['ppn'] = apParam.getNumProcessors()
			apDisplay.printMsg("Setting number of processors to %d"%(self.params['ppn']))
		if self.params['rpn'] is None:
			self.params['rpn'] = self.params['ppn']
		self.params['nproc'] = self.params['nodes']*self.params['rpn']
		### get the symmetry data
		if self.params['sym'] is None:
			apDisplay.printError("Symmetry was not defined")
		else:
			self.symmdata = apSymmetry.findSymmetry(self.params['sym'])
			self.params['symm_id'] = self.symmdata.dbid
			self.params['symm_name'] = self.symmdata['eman_name']
			apDisplay.printMsg("Selected symmetry %s with id %s"%(self.symmdata['eman_name'], self.symmdata.dbid))
		### set cs value
		self.params['cs'] = apInstrument.getCsValueFromSession(self.getSessionData())
Beispiel #34
0
    def insertRefinementRun(self):
        runq = appiondata.ApRefineRunData()
        #first two must be unique
        runq['runname'] = self.params['runname']
        runq['stack'] = self.params['stack']

        #Recon upload can be continued
        earlyresult = runq.query(results=1)
        if earlyresult:
            apDisplay.printWarning(
                "Run already exists in the database.\nIdentical data will not be reinserted"
            )
        # empty <> than None for Sinedon query
        paramdescription = self.params['description']
        if not paramdescription:
            paramdescription = None

        runq['job'] = self.params['jobinfo']
        runq['initialModel'] = self.params['model']
        runq['package'] = self.params['package']
        runq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        runq['description'] = paramdescription
        runq['initialModel'] = self.params['model']
        runq['num_iter'] = len(self.iterationdatas)

        result = runq.query(results=1)

        if earlyresult and not result:
            if self.params['commit'] is True:
                apDisplay.printError("Refinement Run parameters have changed")
            else:
                apDisplay.printWarning(
                    "Refinement Run parameters have changed")

        # get stack apix
        self.params['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stack'].dbid)

        apDisplay.printMsg("inserting Refinement Run into database")
        if self.params['commit'] is True:
            runq.insert()
        else:
            apDisplay.printWarning("not committing results to database")

        #if we insert runq then this returns no results !!!
        # this is a workaround (annoying & bad)
        runq = appiondata.ApRefineRunData()
        runq['runname'] = self.params['runname']
        runq['stack'] = self.params['stack']
        runq['job'] = self.params['jobinfo']
        runq['initialModel'] = self.params['model']
        runq['package'] = self.params['package']
        runq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        runq['description'] = paramdescription
        runq['package'] = self.params['package']
        runq['initialModel'] = self.params['model']

        result = runq.query(results=1)

        # save run entry in the parameters
        if result:
            self.params['refineRun'] = result[0]
        elif self.params['commit'] is True:
            apDisplay.printWarning(
                "Refinement Run was not found, setting to inserted values")
            self.params['refineRun'] = runq
        else:
            apDisplay.printWarning(
                "Refinement Run was not found, setting to 'None'")
            self.params['refineRun'] = None
        return True
Beispiel #35
0
    def start(self):
        if self.params['rundir'] is None or not os.path.isdir(
                self.params['rundir']):
            apDisplay.printError("upload directory does not exist: " +
                                 str(self.params['rundir']))

        ### create temp directory for extracting data
        self.params['tmpdir'] = os.path.join(self.params['rundir'], "temp")
        apParam.createDirectory(self.params['tmpdir'], warning=True)

        ### make sure that the stack & model IDs exist in database
        emanJobFile = self.findEmanJobFile()
        self.params['stack'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stackmapping = apRecon.partnum2defid(self.params['stackid'])
        self.params['model'] = appiondata.ApInitialModelData.direct_query(
            self.params['modelid'])
        self.params['boxsize'] = apStack.getStackBoxsize(
            self.params['stackid'])

        ### parse out the refinement parameters from the log file
        self.parseLogFile()

        ### parse out the message passing subclassification parameters from the job/log file
        if self.params['package'] == 'EMAN/MsgP':
            self.parseMsgPassingParams()

        ### convert class average files from old to new format
        self.convertClassAvgFiles()

        ### get a list of the files in the directory
        self.listFiles()

        ### create a refinementRun entry in the database
        self.insertRefinementRun()

        if self.params['euleronly'] is False:
            ### insert the Iteration info
            for iteration in self.iterationdatas:
                ### if only uploading one iteration, skip to that one
                if self.params['oneiter'] and int(
                        iteration['num']) != self.params['oneiter']:
                    continue
                ### if beginning at later iteration, skip to that one
                if self.params['startiter'] and int(
                        iteration['num']) < self.params['startiter']:
                    continue
                ### if beginning at later iteration, skip to that one
                if self.params['enditer'] and int(
                        iteration['num']) > self.params['enditer']:
                    continue
                apDisplay.printColor(
                    "\nUploading iteration " + str(iteration['num']) + " of " +
                    str(len(self.iterationdatas)) + "\n", "green")
                for i in range(75):
                    sys.stderr.write("#")
                sys.stderr.write("\n")
                self.insertIteration(iteration)

        ### calculate euler jumps
        if self.params['commit'] is True:
            reconrunid = self.params['refineRun'].dbid
            stackid = self.params['stack'].dbid
            if self.params['oneiter'] is None and len(self.iterationdatas) > 1:
                apDisplay.printMsg("calculating euler jumpers for recon=" +
                                   str(reconrunid))
                eulerjump = apEulerJump.ApEulerJump()
                eulerjump.calculateEulerJumpsForEntireRecon(
                    reconrunid, stackid)
            ### coran keep plot
            if self.params['package'] == 'EMAN/SpiCoran':
                apCoranPlot.makeCoranKeepPlot(reconrunid)
            apRecon.setGoodBadParticlesFromReconId(reconrunid)
Beispiel #36
0
	def insertAffinityPropagationRun(self, classes):
		### Preliminary data
		numclass = len(classes.keys())
		projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid'])
		alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
		pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))

		### Affinity Propagation Params object
		affpropq = appiondata.ApAffinityPropagationClusterParamsData()
		affpropq['mask_diam'] = 2.0*self.params['maskrad']
		affpropq['run_seconds'] = time.time()-self.t0
		affpropq['preference_type'] = self.params['preftype']

		### Align Analysis Run object
		analysisq = appiondata.ApAlignAnalysisRunData()
		analysisq['runname'] = self.params['runname']
		analysisq['path'] = pathdata
		analysisq['description'] = self.params['description']
		analysisq['alignstack'] = alignstackdata
		analysisq['hidden'] = False
		### linked through cluster not analysis

		### Clustering Run object
		clusterrunq = appiondata.ApClusteringRunData()
		clusterrunq['runname'] = self.params['runname']
		clusterrunq['description'] = self.params['description']
		clusterrunq['boxsize'] = alignstackdata['boxsize']
		clusterrunq['pixelsize'] = alignstackdata['pixelsize']
		clusterrunq['num_particles'] = self.params['numpart']
		clusterrunq['alignstack'] = alignstackdata
		clusterrunq['analysisrun'] = analysisq
		clusterrunq['affpropparams'] = affpropq

		### Clustering Stack object
		clusterstackq = appiondata.ApClusteringStackData()
		clusterstackq['avg_imagicfile'] = "classaverage-"+self.timestamp+".hed"
		clusterstackq['num_classes'] = numclass
		clusterstackq['clusterrun'] = clusterrunq
		clusterstackq['path'] = pathdata
		clusterstackq['hidden'] = False
		imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile'])
		if not os.path.isfile(imagicfile):
			apDisplay.printError("could not find average stack file: "+imagicfile)

		### looping over clusters
		apDisplay.printColor("Inserting particle classification data, please wait", "cyan")
		for i,classkey in enumerate(classes.keys()):
			classnum = i+1
			partlist = classes[classkey]
			#print "MINIMUM: ", min(partlist)
			classroot = "%s.%d"% (self.timestamp, classnum-1)
			classdocfile = os.path.join(self.params['rundir'], classroot)

			### Clustering Particle object
			clusterrefq = appiondata.ApClusteringReferenceData()
			clusterrefq['refnum'] = classnum
			clusterrefq['clusterrun'] = clusterrunq
			clusterrefq['path'] = pathdata
			clusterrefq['num_particles'] = len(partlist)
			#clusterrefq['ssnr_resolution'] = self.cluster_resolution[i]

			### looping over particles
			sys.stderr.write(".")
			for partnum in partlist:
				alignpartdata = self.getAlignParticleData(partnum, alignstackdata)

				### Clustering Particle objects
				clusterpartq = appiondata.ApClusteringParticleData()
				clusterpartq['clusterstack'] = clusterstackq
				clusterpartq['alignparticle'] = alignpartdata
				clusterpartq['partnum'] = partnum
				clusterpartq['refnum'] = classnum
				clusterpartq['clusterreference'] = clusterrefq

				### finally we can insert parameters
				if self.params['commit'] is True:
					clusterpartq.insert()
		return
Beispiel #37
0
    def insertIteration(self, iteration):
        refineparamsq = appiondata.ApEmanRefineIterData()
        refineparamsq['ang'] = iteration['ang']
        refineparamsq['lpfilter'] = iteration['lpfilter']
        refineparamsq['hpfilter'] = iteration['hpfilter']
        refineparamsq['pad'] = iteration['pad']
        refineparamsq['EMAN_maxshift'] = iteration['maxshift']
        refineparamsq['EMAN_hard'] = iteration['hard']
        refineparamsq['EMAN_classkeep'] = iteration['classkeep']
        refineparamsq['EMAN_classiter'] = iteration['classiter']
        refineparamsq['EMAN_filt3d'] = iteration['filt3d']
        refineparamsq['EMAN_shrink'] = iteration['shrink']
        refineparamsq['EMAN_euler2'] = iteration['euler2']
        refineparamsq['EMAN_xfiles'] = iteration['xfiles']
        refineparamsq['EMAN_median'] = iteration['median']
        refineparamsq['EMAN_phasecls'] = iteration['phasecls']
        refineparamsq['EMAN_fscls'] = iteration['fscls']
        refineparamsq['EMAN_refine'] = iteration['refine']
        refineparamsq['EMAN_goodbad'] = iteration['goodbad']
        refineparamsq['EMAN_perturb'] = iteration['perturb']
        refineparamsq['MsgP_cckeep'] = iteration['msgpasskeep']
        refineparamsq['MsgP_minptls'] = iteration['msgpassminp']

        #create Chimera snapshots
        fscfile = os.path.join(self.params['rundir'],
                               "fsc.eotest." + iteration['num'])
        halfres = apRecon.calcRes(fscfile, self.params['boxsize'],
                                  self.params['apix'])
        if self.params['snapfilter']:
            halfres = self.params['snapfilter']
        volumeDensity = 'threed.' + iteration['num'] + 'a.mrc'
        volDensPath = os.path.join(self.params['rundir'], volumeDensity)

        apChimera.filterAndChimera(volDensPath,
                                   halfres,
                                   self.params['apix'],
                                   self.params['boxsize'],
                                   'snapshot',
                                   self.params['contour'],
                                   self.params['zoom'],
                                   sym=iteration['sym']['eman_name'],
                                   mass=self.params['mass'])

        ## uncommment this for chimera image only runs...
        if self.params['chimeraonly'] is True:
            return

        # insert resolution data
        if halfres != True:
            resData = self.getResolutionData(iteration)
        else:
            apDisplay.printWarning(
                "resolution reported as nan, not committing results to database"
            )
            return

        if self.params['package'] == 'EMAN':
            refineclassavg = 'classes_eman.' + iteration['num'] + '.img'
            postrefineclassavg = None
        elif self.params['package'] == 'EMAN/SpiCoran':
            refineclassavg = 'classes_eman.' + iteration['num'] + '.img'
            postrefineclassavg = 'classes_coran.' + iteration['num'] + '.img'
        elif self.params['package'] == 'EMAN/MsgP':
            refineclassavg = 'classes_eman.' + iteration['num'] + '.img'
            postrefineclassavg = 'classes_msgp.' + iteration['num'] + '.img'
        else:
            apDisplay.printError("Refinement Package Not Valid")

        # insert refinement results
        refineq = appiondata.ApRefineIterData()
        refineq['refineRun'] = self.params['refineRun']
        refineq['emanParams'] = refineparamsq
        refineq['iteration'] = iteration['num']
        refineq['resolution'] = resData
        refineq['rMeasure'] = self.getRMeasureData(iteration)
        refineq['mask'] = iteration['mask']
        refineq['imask'] = iteration['imask']
        refineq['symmetry'] = iteration['sym']
        refineq['exemplar'] = False
        classvar = 'classes.' + iteration['num'] + '.var.img'
        refineq['refineClassAverages'] = refineclassavg
        refineq['postRefineClassAverages'] = postrefineclassavg
        if classvar in self.params['classvars']:
            refineq['classVariance'] = classvar
        if volumeDensity in self.params['volumes']:
            refineq['volumeDensity'] = volumeDensity

        apDisplay.printMsg("inserting Refinement Data into database")
        if self.params['commit'] is True:
            refineq.insert()
        else:
            apDisplay.printWarning("not committing results to database")

        #insert FSC data
        fscfile = os.path.join(self.params['rundir'],
                               "fsc.eotest." + iteration['num'])
        self.insertFSC(fscfile, refineq, self.params['commit'])
        halfres = apRecon.calcRes(fscfile, self.params['boxsize'],
                                  self.params['apix'])
        apDisplay.printColor("FSC 0.5 Resolution: " + str(halfres), "cyan")

        # get projections eulers for iteration:
        eulers = self.getEulersFromProj(iteration['num'])

        # get list of bad particles for this iteration
        badprtls = self.readParticleLog(self.params['rundir'],
                                        iteration['num'])

        # expand cls.*.tar into temp file
        clsf = os.path.join(self.params['rundir'],
                            "cls." + iteration['num'] + ".tar")
        #print "reading",clsf
        clstar = tarfile.open(clsf)
        clslist = clstar.getmembers()
        clsnames = clstar.getnames()
        #print "extracting",clsf,"into temp directory"
        for clsfile in clslist:
            clstar.extract(clsfile, self.params['tmpdir'])
        clstar.close()

        # for each class, insert particle alignment info into database
        apDisplay.printColor(
            "Inserting Particle Classification Data for " +
            str(len(clsnames)) + " classes", "magenta")
        t0 = time.time()
        for cls in clsnames:
            self.insertRefineParticleData(cls, iteration, eulers, badprtls,
                                          refineq, len(clsnames))
        apDisplay.printColor(
            "\nFinished in " + apDisplay.timeString(time.time() - t0),
            "magenta")

        # remove temp directory
        for file in os.listdir(self.params['tmpdir']):
            os.remove(os.path.join(self.params['tmpdir'], file))
        os.rmdir(self.params['tmpdir'])

        #create euler freq map
        apDisplay.printMsg("creating euler frequency map")
        refrunid = int(self.params['refineRun'].dbid)
        iternum = int(iteration['num'])
        if self.params['package'] != 'EMAN':
            postrefine = True
        else:
            postrefine = False

        apEulerDraw.createEulerImages(refrunid,
                                      iternum,
                                      path=self.params['rundir'],
                                      postrefine=postrefine)
        return
        def getEulersForIteration2(self, reconid, tiltrunid, stackid, iteration=1):
                """
                returns all classdata for a particular refinement iteration
                """
                #get mirror and good/bad
                t0 = time.time()

                cachefile = "mysql_cache-recon"+str(reconid)+"-iter"+str(iteration)+".pickle"
                if os.path.isfile(cachefile):
                        apDisplay.printColor("Using cached MySQL query data at "+time.asctime(), "cyan")
                        cachef = open(cachefile, 'r')
                        eulertree = cPickle.load(cachef)
                        cachef.close()
                        apDisplay.printMsg("\nFetched "+str(len(eulertree))+" rows in "+apDisplay.timeString(time.time()-t0))
                        return eulertree

                query = (
                        "SELECT \n"
                                +"  tiltd.`REF|ApParticleData|particle1` AS partnum1, \n"
                                +"  tiltd.`REF|ApParticleData|particle2` AS partnum2 \n"
                                +"FROM `ApTiltParticlePairData` AS tiltd \n"
                                +"LEFT JOIN `ApImageTiltTransformData` as transform \n"
                                +"  ON tiltd.`REF|ApImageTiltTransformData|transform` = transform.`DEF_id` \n"
                                +"LEFT JOIN `ApStackParticleData` AS stpart1 \n"
                                +"  ON stpart1.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle1` \n"
                                +"LEFT JOIN `ApStackParticleData` AS stpart2 \n"
                                +"  ON stpart2.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle2` \n"
                                +"WHERE \n"
                                #+"  transform.`REF|ApSelectionRunData|tiltrun` = "+str(tiltrunid)+" \n"
                                #+"AND \n"
                                +"  stpart1.`REF|ApStackData|stack` = "+str(stackid)+" \n"
                                +"AND \n"
                                +"  stpart2.`REF|ApStackData|stack` = "+str(stackid)+" \n"
                                #+"LIMIT 50 \n"
                        )
                #print query
                apDisplay.printColor("Getting all particles via MySQL query at "+time.asctime(), "yellow")
                self.cursor.execute(query)
                numrows = int(self.cursor.rowcount)
                results = self.cursor.fetchall()
                apDisplay.printMsg("Fetched "+str(len(results))+" rows in "+apDisplay.timeString(time.time()-t0))

                if len(results) < 3:
                        print query
                        apDisplay.printError("No tilt pairs found in this stackid="+str(stackid))

                t0 = time.time()
                eulertree = []
                apDisplay.printColor("Getting individual particle info at "+time.asctime(), "yellow")
                count = 0
                for row in results:
                        count += 1
                        if count % 500 == 0:
                                sys.stderr.write(".")
                        eulerpair = { 'part1': {}, 'part2': {} }
                        partid1 = int(row[0])
                        partid2 = int(row[1])
                        query = (
                                "SELECT \n"
                                        +"  stpart.`particleNumber` AS partnum, \n"
                                        +"  stpart.`DEF_id` AS dbid, \n"
                                        +"  partclass.`euler1` AS alt, partclass.`euler2` AS az, partclass.`euler3` AS phi, \n"
                                        +"  partclass.`mirror` AS mirror, partclass.`refine_keep` AS reject \n"
                                        +"FROM `ApStackParticleData` AS stpart \n"
                                        +"LEFT JOIN `ApRefineParticleData` AS partclass \n"
                                        +"  ON partclass.`REF|ApStackParticleData|particle` = stpart.`DEF_id` \n"
                                        +"LEFT JOIN `ApRefineIterData` AS refd \n"
                                        +"  ON partclass.`REF|ApRefineIterData|refineIter` = refd.`DEF_id` \n"
                                        +"WHERE "
                                        +"  stpart.`REF|ApParticleData|particle` = "+str(partid1)+" \n"
                                        +"AND \n"
                                        +"  refd.`REF|ApRefineRunData|refineRun` = "+str(reconid)+" \n"
                                        +"AND \n"
                                        +"  refd.`iteration` = "+str(iteration)+" \n"
                                        +"LIMIT 1 \n"
                        )
                        #print query
                        self.cursor.execute(query)
                        row = self.cursor.fetchone()
                        if not row:
                                continue
                        eulerpair['part1']['partid'] = int(row[0])
                        eulerpair['part1']['dbid']   = int(row[1])
                        eulerpair['part1']['euler1'] = float(row[2])
                        eulerpair['part1']['euler2'] = float(row[3])
                        eulerpair['part1']['euler3'] = float(row[4])
                        eulerpair['part1']['mirror'] = self.nullOrValue(row[5])
                        eulerpair['part1']['reject'] = not self.nullOrValue(row[6])
                        eulerpair['part1']['tilt']   = apStack.getStackParticleTilt(eulerpair['part1']['dbid'])
                        query = (
                                "SELECT \n"
                                        +"  stpart.`particleNumber` AS partnum, \n"
                                        +"  stpart.`DEF_id` AS dbid, \n"
                                        +"  partclass.`euler1` AS alt, partclass.`euler2` AS az, partclass.`euler3` AS phi, \n"
                                        +"  partclass.`mirror` AS mirror, partclass.`refine_keep` AS reject \n"
                                        +"FROM `ApStackParticleData` AS stpart \n"
                                        +"LEFT JOIN `ApRefineParticleData` AS partclass \n"
                                        +"  ON partclass.`REF|ApStackParticleData|particle` = stpart.`DEF_id` \n"
                                        +"LEFT JOIN `ApRefineIterData` AS refd \n"
                                        +"  ON partclass.`REF|ApRefineIterData|refineIter` = refd.`DEF_id` \n"
                                        +"WHERE "
                                        +"  stpart.`REF|ApParticleData|particle` = "+str(partid2)+" \n"
                                        +"AND \n"
                                        +"  refd.`REF|ApRefineRunData|refineRun` = "+str(reconid)+" \n"
                                        +"AND \n"
                                        +"  refd.`iteration` = "+str(iteration)+" \n"
                                        +"LIMIT 1 \n"
                        )
                        #print query
                        self.cursor.execute(query)
                        row = self.cursor.fetchone()
                        if not row:
                                continue
                        eulerpair['part2']['partid'] = int(row[0])
                        eulerpair['part2']['dbid']   = int(row[1])
                        eulerpair['part2']['euler1'] = float(row[2])
                        eulerpair['part2']['euler2'] = float(row[3])
                        eulerpair['part2']['euler3'] = float(row[4])
                        eulerpair['part2']['mirror'] = self.nullOrValue(row[5])
                        eulerpair['part2']['reject'] = not self.nullOrValue(row[6])
                        eulerpair['part2']['tilt']   = apStack.getStackParticleTilt(eulerpair['part2']['dbid'])
                        eulertree.append(eulerpair)
                        #end loop
                cachef = open(cachefile, 'w', 0666)
                cPickle.dump(eulertree, cachef)
                cachef.close()
                apDisplay.printMsg("\nFetched "+str(len(eulertree))+" rows in "+apDisplay.timeString(time.time()-t0))
                return eulertree
Beispiel #39
0
 def checkConflicts(self):
     if self.params['package'] not in self.packages:
         apDisplay.printError(
             "No valid reconstruction package method specified")
     # msgPassing requires a jobId in order to get the jobfile & the paramters
     if ((self.params['package'] == 'EMAN/MsgP'
          or self.params['package'] == 'EMAN/SpiCoran')
             and self.params['jobid'] is None):
         err = self.tryToGetJobID()
         if err:
             apDisplay.printError(
                 self.params['package'] +
                 " refinement requires a jobid. Please enter a jobId," +
                 " e.g. --jobid=734" + '\n' + err)
     if self.params['package'] != "EMAN/SpiCoran":
         ### check if we have coran files
         corans = glob.glob("classes_coran.*.hed")
         if corans and len(corans) > 0:
             apDisplay.printError(
                 "You used coran in the recon, but it was not selected\n" +
                 "set package to coran, e.g. --package='EMAN/SpiCoran'")
     if self.params['stackid'] is None:
         apDisplay.printError("please enter a stack id, e.g. --stackid=734")
     if self.params['modelid'] is None:
         apDisplay.printError(
             "please enter a starting model id, e.g. --modelid=34")
     if self.params['description'] is None:
         apDisplay.printError(
             "please enter a recon description, e.g. --description='my fav recon'"
         )
     if self.params['runname'] is None:
         apDisplay.printError(
             "please enter a recon run name, e.g. --runname=recon11")
     if self.params['jobid']:
         # if jobid is supplied, get the job info from the database
         self.params['jobinfo'] = self.getClusterJobDataFromID(
             self.params['jobid'])
         if self.params['jobinfo'] is None:
             apDisplay.printError("jobid supplied does not exist: " +
                                  str(self.params['jobid']))
     else:
         self.params['jobinfo'] = None
     if self.params['chimeraonly'] is True:
         self.params['commit'] = False
        def checkConflicts(self):
                if self.params['stackid'] is None:
                        apDisplay.printError("stack id was not defined")
                if self.params['description'] is None:
                        apDisplay.printError("run description was not defined")
                if self.params['templatelist'] is None:
                        apDisplay.printError("template list was not provided")

                if self.params['lastring'] is None:
                        apDisplay.printError("a last ring radius was not provided")
                if self.params['runname'] is None:
                        apDisplay.printError("run name was not defined")
                stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
                stackfile = os.path.join(stackdata['path']['path'], stackdata['name'])
                if self.params['numpart'] > apFile.numImagesInStack(stackfile):
                        apDisplay.printError("trying to use more particles "+str(self.params['numpart'])
                                +" than available "+str(apFile.numImagesInStack(stackfile)))

                boxsize = apStack.getStackBoxsize(self.params['stackid'])/self.params['bin']
                if self.params['lastring'] > boxsize/2-2:
                        apDisplay.printError("last ring radius is too big for boxsize "
                                +str(self.params['lastring'])+" > "+str(boxsize/2-2))
                if self.params['lastring']+self.params['xysearch'] > boxsize/2-2:
                        apDisplay.printError("last ring plus xysearch radius is too big for boxsize "
                                +str(self.params['lastring']+self.params['xysearch'])+" > "+str(boxsize/2-2))

                ### convert / check template data
                self.templatelist = self.params['templatelist'].strip().split(",")
                if not self.templatelist or type(self.templatelist) != type([]):
                        apDisplay.printError("could not parse template list="+self.params['templatelist'])
                self.params['numtemplate'] = len(self.templatelist)
                apDisplay.printMsg("Found "+str(self.params['numtemplate'])+" templates")
        def getEulersForIteration(self, reconid, tiltrunid, iteration=1):
                """
                returns all classdata for a particular refinement iteration
                """
                #get mirror and good/bad
                t0 = time.time()
                query = (
                        "SELECT \n"
                                +"  stpart1.particleNumber AS partnum1, \n"
                                +"  stpart1.`DEF_id` AS dbid1, \n"
                                +"  partclass1.`euler1` AS alt1, partclass1.`euler2` AS az1, partclass1.`euler3` AS phi1, \n"
                                +"  partclass1.`mirror` AS mirror1, partclass1.`refine_keep` AS reject1, \n"
                                +"  stpart2.particleNumber AS partnum2, \n"
                                +"  stpart2.`DEF_id` AS dbid2, \n"
                                +"  partclass2.`euler1` AS alt2, partclass2.`euler2` AS az2, partclass2.`euler3` AS phi2, \n"
                                +"  partclass2.`mirror` AS mirror2, partclass2.`refine_keep` AS reject2 \n"
                                +"FROM `ApTiltParticlePairData` AS tiltd \n"
                                +"LEFT JOIN `ApImageTiltTransformData` as transform \n"
                                +"  ON tiltd.`REF|ApImageTiltTransformData|transform`=transform.`DEF_id` \n"
                                +"LEFT JOIN `ApStackParticleData` AS stpart1 \n"
                                +"  ON stpart1.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle1` \n"
                                +"LEFT JOIN `ApStackParticleData` AS stpart2 \n"
                                +"  ON stpart2.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle2` \n"
                                +"LEFT JOIN `ApRefineParticleData` AS partclass1 \n"
                                +"  ON partclass1.`REF|ApStackParticleData|particle` = stpart1.`DEF_id` \n"
                                +"LEFT JOIN `ApRefineParticleData` AS partclass2 \n"
                                +"  ON partclass2.`REF|ApStackParticleData|particle` = stpart2.`DEF_id` \n"
                                +"LEFT JOIN `ApRefineIterData` AS refd1 \n"
                                +"  ON partclass1.`REF|ApRefineIterData|refineIter` = refd1.`DEF_id` \n"
                                +"LEFT JOIN `ApRefineIterData` AS refd2 \n"
                                +"  ON partclass2.`REF|ApRefineIterData|refineIter` = refd2.`DEF_id` \n"
                                +"WHERE transform.`REF|ApSelectionRunData|tiltrun` = "+str(tiltrunid)+" \n"
                                +"  AND refd1.`REF|ApRefineRunData|refineRun` = "+str(reconid)+" \n"
                                +"  AND refd1.`iteration` = "+str(iteration)+" \n"
                                +"  AND refd2.`REF|ApRefineRunData|refineRun` = "+str(reconid)+" \n"
                                +"  AND refd2.`iteration` = "+str(iteration)+" \n"
                                +"ORDER BY stpart1.particleNumber ASC \n"
                                #+"LIMIT 10 \n"
                        )
                #print query

                cachefile = "mysql_cache-recon"+str(reconid)+"-iter"+str(iteration)+".pickle"
                if not os.path.isfile(cachefile):
                        apDisplay.printColor("Running MySQL query at "+time.asctime(), "yellow")
                        self.cursor.execute(query)
                        numrows = int(self.cursor.rowcount)
                        apDisplay.printMsg("Found "+str(numrows)+" rows in "+apDisplay.timeString(time.time()-t0))
                        apDisplay.printMsg("Fetching data at "+time.asctime())
                        results = self.cursor.fetchall()
                        cachef = open(cachefile, 'w', 0666)
                        cPickle.dump(results, cachef)
                else:
                        apDisplay.printColor("Using cached MySQL query data at "+time.asctime(), "cyan")
                        cachef = open(cachefile, 'r')
                        results = cPickle.load(cachef)
                cachef.close()
                apDisplay.printMsg("Fetched "+str(len(results))+" rows in "+apDisplay.timeString(time.time()-t0))

                #convert to tree form
                eulertree = self.convertSQLtoEulerTree(results)

                if len(eulertree) < 10:
                        print query
                        apDisplay.printError("Failed to get euler angles")

                return eulertree
    def checkConflicts(self):
        if self.params['runname'] is None:
            apDisplay.printError("enter a stack run name, e.g. combinestack1")
        if self.params['description'] is None:
            apDisplay.printError("enter a stack description")

        if self.params['stacks'] and ',' in self.params['stacks']:
            #remember stackids are a list of strings
            stackids = self.params['stacks'].split(',')
            self.params['stackids'] = stackids
        else:
            apDisplay.printError(
                "enter a list of stack ids to combine, e.g. --stackids=11,14,7"
            )

        ### check to make sure all pixel and box size are the same
        self.newboxsize = None
        self.newpixelsize = None
        for stackidstr in self.params['stackids']:
            if not re.match("^[0-9]+$", stackidstr):
                apDisplay.printError("Stack id '%s' is not an integer" %
                                     (stackidstr))
            stackid = int(stackidstr)
            boxsize = apStack.getStackBoxsize(stackid, msg=False)
            pixelsize = apStack.getStackPixelSizeFromStackId(stackid,
                                                             msg=False)
            apDisplay.printMsg("Stack id: %d\tBoxsize: %d\tPixelsize: %.3f" %
                               (stackid, boxsize, pixelsize))
            if self.newboxsize is None:
                self.newboxsize = boxsize
            if self.newpixelsize is None:
                self.newpixelsize = pixelsize
            if boxsize != self.newboxsize:
                apDisplay.printError(
                    "Trying to combine stacks with different box sizes")
            if abs(pixelsize - self.newpixelsize) > 0.01:
                apDisplay.printError(
                    "Trying to combine stacks with different pixel sizes")
Beispiel #43
0
 def checkConflicts(self):
     if self.params['stackid'] is None:
         apDisplay.printError("stackid was not defined")
     if self.params['runname'] is None:
         apDisplay.printError("new runname was not defined")
    def processImage(self, imgdata):
        self.ctfvalues = {}
        bestdef = ctfdb.getBestCtfByResolution(imgdata, msg=True)
        apix = apDatabase.getPixelSize(imgdata)
        if (not (self.params['onepass'] and self.params['zeropass'])):
            maskhighpass = False
            ace2inputpath = os.path.join(imgdata['session']['image path'],
                                         imgdata['filename'] + ".mrc")
        else:
            maskhighpass = True
            filterimg = apImage.maskHighPassFilter(imgdata['image'], apix, 1,
                                                   self.params['zeropass'],
                                                   self.params['onepass'])
            ace2inputpath = os.path.join(self.params['rundir'],
                                         imgdata['filename'] + ".mrc")
            mrc.write(filterimg, ace2inputpath)

        # make sure that the image is a square
        dimx = imgdata['camera']['dimension']['x']
        dimy = imgdata['camera']['dimension']['y']
        if dimx != dimy:
            dims = [dimx, dimy]
            dims.sort()
            apDisplay.printMsg("resizing image: %ix%i to %ix%i" %
                               (dimx, dimy, dims[0], dims[0]))
            mrcarray = apImage.mrcToArray(ace2inputpath, msg=False)
            clippedmrc = apImage.frame_cut(mrcarray, [dims[0], dims[0]])
            ace2inputpath = os.path.join(self.params['rundir'],
                                         imgdata['filename'] + ".mrc")
            apImage.arrayToMrc(clippedmrc, ace2inputpath, msg=False)

        ### pad out image to speed up FFT calculations for non-standard image sizes
        print "checking prime factor"
        if primefactor.isGoodStack(dimx) is False:
            goodsize = primefactor.getNextEvenPrime(dimx)
            factor = float(goodsize) / float(dimx)
            apDisplay.printMsg("padding image:  %ix%i to %ix%i" %
                               (dimx, dimy, dimx * factor, dimy * factor))
            mrcarray = apImage.mrcToArray(ace2inputpath, msg=False)
            #			paddedmrc = imagefun.pad(mrcarray, None, factor)
            paddedmrc = apImage.frame_constant(mrcarray,
                                               (dimx * factor, dimy * factor),
                                               cval=mrcarray.mean())
            ace2inputpath = os.path.join(self.params['rundir'],
                                         imgdata['filename'] + ".mrc")
            apImage.arrayToMrc(paddedmrc, ace2inputpath, msg=False)

        inputparams = {
            'input': ace2inputpath,
            'cs': self.params['cs'],
            'kv': imgdata['scope']['high tension'] / 1000.0,
            'apix': apix,
            'binby': self.params['bin'],
        }

        ### make standard input for ACE 2
        apDisplay.printMsg("Ace2 executable: " + self.ace2exe)
        commandline = (self.ace2exe + " -i " + str(inputparams['input']) +
                       " -b " + str(inputparams['binby']) + " -c " +
                       str(inputparams['cs']) + " -k " +
                       str(inputparams['kv']) + " -a " +
                       str(inputparams['apix']) + " -e " +
                       str(self.params['edge_b']) + "," +
                       str(self.params['edge_t']) + " -r " +
                       str(self.params['rotblur']) + "\n")

        ### run ace2
        apDisplay.printMsg("running ace2 at " + time.asctime())
        apDisplay.printColor(commandline, "purple")

        t0 = time.time()

        if self.params['verbose'] is True:
            ace2proc = subprocess.Popen(commandline, shell=True)
        else:
            aceoutf = open("ace2.out", "a")
            aceerrf = open("ace2.err", "a")
            ace2proc = subprocess.Popen(commandline,
                                        shell=True,
                                        stderr=aceerrf,
                                        stdout=aceoutf)

        ace2proc.wait()

        ### check if ace2 worked
        basename = os.path.basename(ace2inputpath)
        imagelog = basename + ".ctf.txt"
        if not os.path.isfile(imagelog) and self.stats['count'] <= 1:
            ### ace2 always crashes on first image??? .fft_wisdom file??
            time.sleep(1)

            if self.params['verbose'] is True:
                ace2proc = subprocess.Popen(commandline, shell=True)
            else:
                aceoutf = open("ace2.out", "a")
                aceerrf = open("ace2.err", "a")
                ace2proc = subprocess.Popen(commandline,
                                            shell=True,
                                            stderr=aceerrf,
                                            stdout=aceoutf)

            ace2proc.wait()

        if self.params['verbose'] is False:
            aceoutf.close()
            aceerrf.close()
        if not os.path.isfile(imagelog):
            lddcmd = "ldd " + self.ace2exe
            lddproc = subprocess.Popen(lddcmd, shell=True)
            lddproc.wait()
            apDisplay.printError("ace2 did not run")
        apDisplay.printMsg("ace2 completed in " +
                           apDisplay.timeString(time.time() - t0))

        ### parse log file
        self.ctfvalues = {
            'cs': self.params['cs'],
            'volts': imgdata['scope']['high tension'],
        }
        logf = open(imagelog, "r")
        apDisplay.printMsg("reading log file %s" % (imagelog))
        for line in logf:
            sline = line.strip()
            if re.search("^Final Defocus: ", sline):
                ### old ACE2
                apDisplay.printError(
                    "This old version of ACE2 has a bug in the astigmastism, please upgrade ACE2 now"
                )
                #parts = sline.split()
                #self.ctfvalues['defocus1'] = float(parts[2])
                #self.ctfvalues['defocus2'] = float(parts[3])
                ### convert to degrees
                #self.ctfvalues['angle_astigmatism'] = math.degrees(float(parts[4]))
            elif re.search("^Final Defocus \(m,m,deg\):", sline):
                ### new ACE2
                apDisplay.printMsg("Reading new ACE2 defocus")
                parts = sline.split()
                #print parts
                self.ctfvalues['defocus1'] = float(parts[3])
                self.ctfvalues['defocus2'] = float(parts[4])
                # ace2 defines negative angle from +x toward +y
                self.ctfvalues['angle_astigmatism'] = -float(parts[5])
            elif re.search("^Amplitude Contrast:", sline):
                parts = sline.split()
                self.ctfvalues['amplitude_contrast'] = float(parts[2])
            elif re.search("^Confidence:", sline):
                parts = sline.split()
                self.ctfvalues['confidence'] = float(parts[1])
                self.ctfvalues['confidence_d'] = float(parts[1])
        logf.close()

        ### summary stats
        apDisplay.printMsg("============")
        avgdf = (self.ctfvalues['defocus1'] + self.ctfvalues['defocus2']) / 2.0
        ampconst = 100.0 * self.ctfvalues['amplitude_contrast']
        pererror = 100.0 * (self.ctfvalues['defocus1'] -
                            self.ctfvalues['defocus2']) / avgdf
        apDisplay.printMsg(
            "Defocus: %.3f x %.3f um (%.2f percent astigmatism)" %
            (self.ctfvalues['defocus1'] * 1.0e6,
             self.ctfvalues['defocus2'] * 1.0e6, pererror))
        apDisplay.printMsg("Angle astigmatism: %.2f degrees" %
                           (self.ctfvalues['angle_astigmatism']))
        apDisplay.printMsg("Amplitude contrast: %.2f percent" % (ampconst))

        apDisplay.printColor(
            "Final confidence: %.3f" % (self.ctfvalues['confidence']), 'cyan')

        ### double check that the values are reasonable
        if avgdf > self.params['maxdefocus'] or avgdf < self.params[
                'mindefocus']:
            apDisplay.printWarning(
                "bad defocus estimate, not committing values to database")
            self.badprocess = True

        if ampconst < 0.0 or ampconst > 80.0:
            apDisplay.printWarning(
                "bad amplitude contrast, not committing values to database")
            self.badprocess = True

        if self.ctfvalues['confidence'] < 0.2:
            apDisplay.printWarning(
                "bad confidence value, not committing values to database")
            self.badprocess = True

        ## create power spectra jpeg
        mrcfile = imgdata['filename'] + ".mrc.edge.mrc"
        if os.path.isfile(mrcfile):
            jpegfile = os.path.join(
                self.powerspecdir,
                apDisplay.short(imgdata['filename']) + ".jpg")
            ps = apImage.mrcToArray(mrcfile, msg=False)
            c = numpy.array(ps.shape) / 2.0
            ps[c[0] - 0, c[1] - 0] = ps.mean()
            ps[c[0] - 1, c[1] - 0] = ps.mean()
            ps[c[0] - 0, c[1] - 1] = ps.mean()
            ps[c[0] - 1, c[1] - 1] = ps.mean()
            #print "%.3f -- %.3f -- %.3f"%(ps.min(), ps.mean(), ps.max())
            ps = numpy.log(ps + 1.0)
            ps = (ps - ps.mean()) / ps.std()
            cutoff = -2.0 * ps.min()
            ps = numpy.where(ps > cutoff, cutoff, ps)
            cutoff = ps.mean()
            ps = numpy.where(ps < cutoff, cutoff, ps)
            #print "%.3f -- %.3f -- %.3f"%(ps.min(), ps.mean(), ps.max())
            apImage.arrayToJpeg(ps, jpegfile, msg=False)
            apFile.removeFile(mrcfile)
            self.ctfvalues['graph3'] = jpegfile
        otherfiles = glob.glob(imgdata['filename'] + ".*.txt")

        ### remove extra debugging files
        for filename in otherfiles:
            if filename[-9:] == ".norm.txt":
                continue
            elif filename[-8:] == ".ctf.txt":
                continue
            else:
                apFile.removeFile(filename)

        if maskhighpass and os.path.isfile(ace2inputpath):
            apFile.removeFile(ace2inputpath)

        return
        def insertRefBasedRun(self, partlist, imagicstack, insert=False):
                apDisplay.printMsg("committing results to DB")

                ### setup alignment run
                alignrunq = appiondata.ApAlignRunData()
                alignrunq['runname'] = self.params['runname']
                alignrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                uniquerun = alignrunq.query(results=1)
                if uniquerun:
                        apDisplay.printError("Run name '"+runparams['runname']+"' and path already exist in database")

                ### setup ref based run
                refrunq = appiondata.ApRefBasedRunData()
                refrunq['runname'] = self.params['runname']
                refrunq['xysearch'] = self.params['xysearch']
                refrunq['xystep'] = self.params['xystep']
                refrunq['first_ring'] = self.params['firstring']
                refrunq['last_ring'] = self.params['lastring']
                refrunq['num_iter'] = self.params['numiter']
                refrunq['invert_templs'] = self.params['inverttemplates']
                refrunq['num_templs'] = self.params['numtemplate']
                #refrunq['csym', int),
                refrunq['run_seconds'] = self.params['runtime']

                ### finish alignment run
                alignrunq = appiondata.ApAlignRunData()
                alignrunq['refbasedrun'] = refrunq
                alignrunq['hidden'] = False
                alignrunq['bin'] = self.params['bin']
                alignrunq['hp_filt'] = self.params['highpass']
                alignrunq['lp_filt'] = self.params['lowpass']
                alignrunq['runname'] = self.params['runname']
                alignrunq['description'] = self.params['description']

                ### setup alignment stack
                alignstackq = appiondata.ApAlignStackData()
                alignstackq['alignrun'] = alignrunq

                alignstackq['imagicfile'] = imagicstack
                alignstackq['avgmrcfile'] = "average.mrc"
                emancmd = "proc2d templatestack%02d.spi templatestack%02d.hed"%(self.params['numiter'],self.params['numiter'])
                apEMAN.executeEmanCmd(emancmd)
                alignstackq['refstackfile'] = ("templatestack%02d.hed"%(self.params['numiter']))
                alignstackq['alignrun'] = alignrunq
                alignstackq['iteration'] = self.params['numiter']
                alignstackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                ### check to make sure files exist
                imagicfile = os.path.join(self.params['rundir'], alignstackq['imagicfile'])
                if not os.path.isfile(imagicfile):
                        apDisplay.printError("could not find stack file: "+imagicfile)
                avgmrcfile = os.path.join(self.params['rundir'], alignstackq['avgmrcfile'])
                if not os.path.isfile(avgmrcfile):
                        apDisplay.printError("could not find average mrc file: "+avgmrcfile)
                refstackfile = os.path.join(self.params['rundir'], alignstackq['refstackfile'])
                if not os.path.isfile(refstackfile):
                        apDisplay.printError("could not find reference stack file: "+refstackfile)
                alignstackq['stack'] = self.stack['data']
                alignstackq['boxsize'] = self.boxsize
                alignstackq['pixelsize'] = self.stack['apix']*self.params['bin']
                alignstackq['description'] = self.params['description']
                alignstackq['hidden'] = False
                alignstackq['num_particles'] = self.params['numpart']

                if insert is True:
                        alignstackq.insert()

                ### insert reference data
                reflist = []

                for j in range(self.params['numiter']):
                        iternum = j+1
                        for i in range(len(self.templatelist)):
                                refnum = i+1
                                templateid = self.templatelist[i]
                                refq = appiondata.ApAlignReferenceData()
                                refq['refnum'] = refnum
                                refq['iteration'] = iternum
                                refq['template'] = apTemplate.getTemplateFromId(templateid)
                                refq['mrcfile'] = ("templateavg%02d-%02d.mrc"%(iternum,refnum))
                                refpath = os.path.join(self.params['rundir'], "templates")
                                refq['path'] = appiondata.ApPathData(path=os.path.abspath(refpath))
                                refq['alignrun'] = alignrunq
                                if refnum  in self.resdict:
                                        refq['ssnr_resolution'] = self.resdict[refnum]
                                if insert is True:
                                        refq.insert()
                                if iternum == self.params['numiter']:
                                        reflist.append(refq)
                #refq['varmrcfile', str),
                #refq['frc_resolution', float),

                ### insert particle data
                apDisplay.printColor("Inserting particle alignment data, please wait", "cyan")
                for partdict in partlist:
                        ### see apSpider.alignment.alignStack() for more info
                        """
                        partdict.keys()
                        'num': int(data[0]), #SPIDER NUMBERING: 1,2,3,...
                        'template': int(abs(templatenum)), #SPIDER NUMBERING: 1,2,3,...
                        'mirror': checkMirror(templatenum),
                        'score': float(data[3]),
                        'rot': float(data[4]),
                        'xshift': float(data[5]),
                        'yshift': float(data[6]),
                        """

                        alignpartq = appiondata.ApAlignParticleData()
                        alignpartq['ref'] = reflist[partdict['template']-1]
                        alignpartq['partnum'] = partdict['num']
                        alignpartq['alignstack'] = alignstackq
                        stackpartdata = apStack.getStackParticle(self.params['stackid'], partdict['num'])
                        alignpartq['stackpart'] = stackpartdata
                        alignpartq['xshift'] = partdict['xshift']
                        alignpartq['yshift'] = partdict['yshift']
                        alignpartq['rotation'] = partdict['rot']
                        alignpartq['score'] = partdict['score']
                        alignpartq['mirror'] = partdict['mirror']

                        if insert is True:
                                alignpartq.insert()
                return