def createAlignedReferenceStack(self):
		searchstr = "part"+self.params['timestamp']+"_ref0*.xmp"
		files = glob.glob(searchstr)
		files.sort()
		stack = []
		reflist = self.readRefDocFile()
		for i in range(len(files)):
			fname = files[i]
			refdict = reflist[i]
			if refdict['partnum'] != i+1:
				print i, refdict['partnum']
				apDisplay.printError("sorting error in reflist, see neil")
			refarray = spider.read(fname)
			xyshift = (refdict['xshift'], refdict['yshift'])
			alignrefarray = apImage.xmippTransform(refarray, rot=refdict['inplane'],
				shift=xyshift, mirror=refdict['mirror'])
			stack.append(alignrefarray)
		stackarray = numpy.asarray(stack, dtype=numpy.float32)
		#print stackarray.shape
		avgstack = "part"+self.params['timestamp']+"_average.hed"
		apFile.removeStack(avgstack, warn=False)
		apImagicFile.writeImagic(stackarray, avgstack)
		### create a average mrc
		avgdata = stackarray.mean(0)
		apImage.arrayToMrc(avgdata, "average.mrc")
		return
 def writeToStack(self,partarray):
         if self.partperiter == 0:
                 arrayshape = partarray.shape
                 partperiter = int(1e9/(arrayshape[0]*arrayshape[1])/16.)
                 if partperiter > 4096:
                         partperiter = 4096
                 self.partperiter = partperiter
                 apDisplay.printMsg("Using %d particle per iteration"%(partperiter))
         stackroot = self.outstackfile[:-4]
         imgnum = self.imgnum
         index = imgnum % self.partperiter
         ### Process images
         startmem = mem.active()
         index = imgnum % self.partperiter
         if imgnum % 100 == 0:
                 sys.stderr.write(".")
                 #sys.stderr.write("%03.1fM %d\n"%((mem.active()-startmem)/1024., index))
                 if mem.active()-startmem > 2e6:
                         apDisplay.printWarning("Out of memory")
         if index < 1:
                 ### deal with large stacks, reset loop
                 if imgnum > 0:
                         sys.stderr.write("\n")
                         stackname = "%s-%d.hed"%(stackroot, imgnum)
                         apDisplay.printMsg("writing single particles to file "+stackname)
                         self.stacklist.append(stackname)
                         apFile.removeStack(stackname, warn=False)
                         apImagicFile.writeImagic(self.stackarray, stackname, msg=False)
                         perpart = (time.time()-self.starttime)/imgnum
                         apDisplay.printColor("%d  :: %.1fM mem :: %s/part "%
                                 (imgnum+1, (mem.active()-startmem)/1024. , apDisplay.timeString(perpart)), 
                                 "blue")
                 self.stackarray = []
                 ### merge particles
         self.stackarray.append(partarray)
def boxerRotate(imgfile, parttree, outstack, boxsize):
        """
        boxes the particles with expanded size,
        applies a rotation to particle,
        reduces boxsize to requested size,
        and saves them to a imagic file
        """
        # size needed is sqrt(2)*boxsize, using 1.5 to be extra safe
        bigboxsize = int(math.ceil(1.5*boxsize))
        imgarray = mrc.read(imgfile)
        bigboxedparticles = boxerMemory(imgarray, parttree, bigboxsize)
        
        boxedparticles = []
        boxshape = (boxsize,boxsize)
        apDisplay.printMsg("Rotating particles...")
        for i in range(len(bigboxedparticles)):
                if i % 10 == 0:
                        sys.stderr.write(".")
                bigboxpart = bigboxedparticles[i]
                partdict = parttree[i]
                ### add 90 degrees because database angle is from x-axis not y-axis
                angle = partdict['angle']+90.0
                rotatepart = ndimage.rotate(bigboxpart, angle=angle, reshape=False, order=1)
                boxpart = imagefilter.frame_cut(rotatepart, boxshape)
                boxedparticles.append(boxpart)
        sys.stderr.write("done\n")
        apImagicFile.writeImagic(boxedparticles, outstack)
        return True
def boxMaskStack(bmstackf, partdatas, box, xmask, ymask, falloff, imask=None, norotate=False):
        from appionlib.apSpider import operations
        from appionlib import apEMAN
        import os

        # create blank image for mask using SPIDER
        maskfile = "boxmask.spi"
        operations.createBoxMask(maskfile,box,xmask,ymask,falloff,imask)

        # convert mask to MRC
        apEMAN.executeEmanCmd("proc2d boxmask.spi boxmask.mrc",verbose=False,showcmd=False)
        os.remove("boxmask.spi")

        maskarray = mrc.read("boxmask.mrc")

        # box particles
        maskedparts = []
        for i in range(len(partdatas)):
                if norotate is True:
                        rotatemask = maskarray
                else:
                        angle = (-partdatas[i]['angle'])-90
                        rotatemask = ndimage.rotate(maskarray, angle=angle, reshape=False, order=1)
                maskedparts.append(rotatemask)

        # write to stack
        apImagicFile.writeImagic(maskedparts, bmstackf)
        os.remove("boxmask.mrc")
        return bmstackf
	def createAlignedStacks(self, partlist):
		stackid = self.params['stackid']
		stackdata = apStack.getOnlyStackData(stackid)
		origstackfile = os.path.join(stackdata['path']['path'], stackdata['name'])
		imagesdict = apImagicFile.readImagic(origstackfile)
		i = 0
		t0 = time.time()
		apDisplay.printMsg("rotating and shifting particles at "+time.asctime())
		alignstack = []
		while i < len(partlist):
			partimg = imagesdict['images'][i]
			partdict = partlist[i]
			partnum = i+1
			#print partnum, partdict, partimg.shape
			if partdict['partnum'] != partnum:
				apDisplay.printError("particle shifting "+str(partnum)+" != "+str(partdict))
			xyshift = (partdict['xshift'], partdict['yshift'])
			alignpartimg = apImage.xmippTransform(partimg, rot=partdict['inplane'], 
				shift=xyshift, mirror=partdict['mirror'])
			alignstack.append(alignpartimg)
			i += 1
		apDisplay.printMsg("rotate then shift %d particles in %s"%(i,apDisplay.timeString(time.time()-t0)))
		alignstackarray = numpy.asarray(alignstack)
		self.alignimagicfile = "alignstack.hed"
		apImagicFile.writeImagic(alignstackarray, self.alignimagicfile)
        def runAffinityPropagation(self, alignedstack):
                ### Get initial correlation values
                ### this is really, really slow
                similarfile, simlist = self.fillSimilarityMatrix(alignedstack)

                ### Preference value stats
                preffile = self.setPreferences(simlist)

                ### run apcluster.exe program
                outfile = "clusters.out"
                apDisplay.printMsg("Run apcluster.exe program")
                apclusterexe = os.path.join(apParam.getAppionDirectory(), "bin/apcluster.exe")
                apFile.removeFile(outfile)
                clustercmd = apclusterexe+" "+similarfile+" "+preffile+" "+outfile
                clusttime = time.time()
                proc = subprocess.Popen(clustercmd, shell=True)
                proc.wait()
                apDisplay.printMsg("apCluster time: "+apDisplay.timeString(time.time()-clusttime))

                if not os.path.isfile(outfile):
                        apDisplay.printError("apCluster did not run")

                ### Parse apcluster output file: clusters.out
                apDisplay.printMsg("Parse apcluster output file: "+outfile)
                clustf = open(outfile, "r")
                ### each line is the particle and the number is the class
                partnum = 0
                classes = {}
                for line in clustf:
                        sline = line.strip()
                        if sline:
                                partnum += 1
                                classnum = int(sline)
                                if not classnum in classes:
                                        classes[classnum] = [partnum,]
                                else:
                                        classes[classnum].append(partnum)
                clustf.close()
                apDisplay.printMsg("Found %d classes"%(len(classes.keys())))

                ### Create class averages
                classavgdata = []
                classnames = classes.keys()
                classnames.sort()
                for classnum in classnames:
                        apDisplay.printMsg("Class %d, %d members"%(classnum, len(classes[classnum])))
                        #clsf = open('subcls%04d.lst'%(classnum), 'w')
                        #for partnum in classes[classnum]:
                        #       clsf.write("%d\n"%(partnum))
                        #clsf.close()
                        classdatalist = apImagicFile.readParticleListFromStack(alignedstack, classes[classnum], msg=False)
                        classdatarray = numpy.asarray(classdatalist)
                        classavgarray = classdatarray.mean(0)
                        #mrc.write(classavgarray, 'subcls%04d.mrc'%(classnum))
                        classavgdata.append(classavgarray)
                apFile.removeStack("classaverage-"+self.timestamp+".hed")
                apImagicFile.writeImagic(classavgdata, "classaverage-"+self.timestamp+".hed")

                return classes
def boxer(imgfile, parttree, outstack, boxsize):
        """
        boxes the particles and saves them to a imagic file
        """
        imgarray = mrc.read(imgfile)
        boxedparticles = boxerMemory(imgarray, parttree, boxsize)
        apImagicFile.writeImagic(boxedparticles, outstack)
        return True
 def createReferenceStack(self):
         # Create a stack for the class averages at each level
         Nlevels=len(glob.glob("part"+self.params['timestamp']+"_level_??_.sel"))
         for level in range(Nlevels):
                 stack=[]
                 for f in glob.glob("part"+self.params['timestamp']+"_level_%02d_[0-9]*.xmp"%level):
                         stack.append(spider.read(f))
                 apImagicFile.writeImagic(stack, "part"+self.params['timestamp']+"_level_%02d_.hed"%level)
         if self.params['align']:
                 apXmipp.gatherSingleFilesIntoStack("partlist.sel","alignedStack.hed")
         return
	def calcResolution(self, level):
		self.resdict = {}
		D=self.getClassificationAtLevel(level)
		for classref in D:
			stack=[]
			for partnum in D[classref]:
				stack.append(apImagicFile.readSingleParticleFromStack("alignedStack.hed",int(partnum)+1,msg=False))
			apImagicFile.writeImagic(stack,"tmp.hed")

			frcdata = apFourier.spectralSNRStack("tmp.hed", self.apix)
			self.resdict[classref] = apFourier.getResolution(frcdata, self.apix, self.boxsize)
		apFile.removeStack("tmp.hed")
	def calcResolution(self, level):
		self.resdict = {}
		D=self.getClassificationAtLevel(level)
		for classref in D:
			stack=[]
			for partnum in D[classref]:
				### NOTE: RESOLUTION WILL NOT BE CALCULATED IF ALIGNED STACK IS NOT CREATED
				stack.append(apImagicFile.readSingleParticleFromStack(self.params['timestamp']+".hed",int(partnum),msg=False))
			apImagicFile.writeImagic(stack,"tmp.hed")

			frcdata = apFourier.spectralSNRStack("tmp.hed", self.apix)
			self.resdict[classref] = apFourier.getResolution(frcdata, self.apix, self.boxsize)
		apFile.removeStack("tmp.hed")
	def calcResolution(self, alignedStack):
		self.resdict = {}
		for classref, partlist in self.classD.iteritems():
			if len(partlist) == 0:
				continue
			stack=[]
			for partnum in partlist:
				### NOTE: RESOLUTION WILL NOT BE CALCULATED IF ALIGNED STACK IS NOT CREATED
				stack.append(apImagicFile.readSingleParticleFromStack(alignedStack,int(partnum),msg=False))
			apImagicFile.writeImagic(stack,"tmp.hed")

			frcdata = apFourier.spectralSNRStack("tmp.hed", self.apix)
			self.resdict[classref] = apFourier.getResolution(frcdata, self.apix, self.boxsize)
		apFile.removeStack("tmp.hed")
def boxerFrameStack(framestackpath, parttree, outstack, boxsize,framelist):
        """
        boxes the particles and returns them as a list of numpy arrays
        """
        start_frame = framelist[0]
        nframe = len(framelist)
        apDisplay.printMsg("boxing %d particles from sum of total %d frames starting from frame %d using mmap" % (len(parttree),nframe,start_frame))
        boxedparticles = []
        stack = mrc.mmap(framestackpath)
        for partdict in parttree:
                x1,x2,y1,y2 = getBoxBoundary(partdict, boxsize)
                apDisplay.printDebug(' crop range of (x,y)=(%d,%d) to (%d,%d)' % (x1,y1,x2-1,y2-1))
                #numpy arrays are rows,cols --> y,x not x,y
                boxpart = numpy.sum(stack[tuple(framelist),y1:y2,x1:x2],axis=0)
                boxedparticles.append(boxpart)
        apImagicFile.writeImagic(boxedparticles, outstack)
        return True
	def createReferenceStack(self):
		avgstack = "part"+self.timestamp+"_average.hed"
		apFile.removeStack(avgstack, warn=False)
		searchstr = "part"+self.timestamp+"_ref0*.xmp"
		files = glob.glob(searchstr)
		if len(files) == 0:
			apDisplay.printError("Xmipp did not run")
		files.sort()
		stack = []
		for i in range(len(files)):
			fname = files[i]
			refarray = spider.read(fname)
			stack.append(refarray)
		apImagicFile.writeImagic(stack, avgstack)
		### create a average mrc
		stackarray = numpy.asarray(stack, dtype=numpy.float32)
		avgdata = stackarray.mean(0)
		apImage.arrayToMrc(avgdata, "average.mrc")
		return
        def postLoop(self):
                if len(self.stackarray) > 0:
                        stackroot = self.outstackfile[:-4]
                        stackname = "%s-%d.hed"%(stackroot, self.imgnum)
                        apDisplay.printMsg("writing single particles to file "+stackname)
                        self.stacklist.append(stackname)
                        apFile.removeStack(stackname, warn=False)
                        apImagicFile.writeImagic(self.stackarray, stackname, msg=False)
                ### merge stacks
                apFile.removeStack(self.outstackfile, warn=False)
                apImagicFile.mergeStacks(self.stacklist, self.outstackfile)
                filepart = apFile.numImagesInStack(self.outstackfile)
                if filepart != self.imgnum:
                        apDisplay.printError("number merged particles (%d) not equal number expected particles (%d)"%
                                (filepart, numpart))
                for stackname in self.stacklist:
                        apFile.removeStack(stackname, warn=False)

                ### summarize
                apDisplay.printColor("merged %d particles in %s"%(self.imgnum, apDisplay.timeString(time.time()-self.starttime)), "cyan")
	def scaleTemplates(self):
		reffile = os.path.join(self.params['rundir'], "references.hed")
		if self.params['apix'] != self.templatestack['apix']:
			scalefactor = float(self.templatestack['apix']) / self.params['apix']
			templates = apImagicFile.readImagic(reffile)
			scaledtemplates = []
			for templatearray in templates['images']:
				newarray = apTemplate.scaleTemplate(templatearray, scalefactor)
				scaledtemplates.append(newarray)
			apImagicFile.writeImagic(scaledtemplates, reffile)

		refbox = apFile.getBoxSize(reffile)[0]		
		stbox = self.params['boxsize']		
	
		### now clip the references to get identical boxsizes
		if stbox != refbox:
			while os.path.isfile(reffile+".new.img"):
				apFile.removeStack(reffile+".new.img")
			emancmd = "proc2d "+reffile+" "+reffile+".new.hed clip="+str(stbox)+" edgenorm"
			apParam.runCmd(emancmd, "EMAN")
			os.rename(reffile+".new.hed", reffile)
			os.rename(reffile+".new.img", reffile[:-4]+".img")
			
		return
	def tiltPhaseFlipParticles(self, imgdata, imgstackfile, partdatas):
		apDisplay.printMsg("Applying per-particle CTF")
		ctfvalue = ctfdb.getBestTiltCtfValueForImage(imgdata)
		if ctfvalue is None:
			apDisplay.printError("Failed to get ctf parameters")
		apix = apDatabase.getPixelSize(imgdata)
		ctfimgstackfile = os.path.join(self.params['rundir'], apDisplay.short(imgdata['filename'])+"-ctf.hed")
		ampconst = ctfvalue['amplitude_contrast']

		### calculate defocus at given position
		dimx = imgdata['camera']['dimension']['x']
		dimy = imgdata['camera']['dimension']['y']
		CX = dimx/2
		CY = dimy/2

		if ctfvalue['tilt_axis_angle'] is not None:
			N1 = -1.0 * math.sin( math.radians(ctfvalue['tilt_axis_angle']) )
			N2 = math.cos( math.radians(ctfvalue['tilt_axis_angle']) )
		else:
			N1 = 0.0
			N2 = 1.0
		PSIZE = apix

		### High tension on CM is given in kv instead of v so do not divide by 1000 in that case
		if imgdata['scope']['tem']['name'] == "CM":
			voltage = imgdata['scope']['high tension']
		else:
			voltage = (imgdata['scope']['high tension'])/1000

		# find cs
		cs = self.getCS(ctfvalue)

		imagicdata = apImagicFile.readImagic(imgstackfile, msg=False)
		ctfpartstack = []
		for i in range(len(partdatas)):
			partdata = partdatas[i]
			prepartarray = imagicdata['images'][i]
			prepartmrc = "rawpart.dwn.mrc"
			postpartmrc = "ctfpart.dwn.mrc"
			apImage.arrayToMrc(prepartarray, prepartmrc, msg = False)

			### calculate ctf based on position
			NX = partdata['xcoord']
			NY = dimy-partdata['ycoord'] # reverse due to boxer flip

			DX = CX - NX
			DY = CY - NY
			DF = (N1*DX + N2*DY) * PSIZE * math.tan( math.radians(ctfvalue['tilt_angle']) )
			### defocus is in Angstroms
			DFL1 = abs(ctfvalue['defocus1'])*1.0e10 + DF
			DFL2 = abs(ctfvalue['defocus2'])*1.0e10 + DF
			DF_final = (DFL1+DFL2)/2.0

			### convert defocus to microns
			defocus = DF_final*-1.0e-4

			### check to make sure defocus is a reasonable value for applyctf
			self.checkDefocus(defocus, apDisplay.short(imgdata['filename']))

			parmstr = ("parm=%f,200,1,%.3f,0,17.4,9,1.53,%i,%.1f,%f"
				%(defocus, ampconst, voltage, cs, apix))
			emancmd = ("applyctf %s %s %s setparm flipphase" % (prepartmrc, postpartmrc, parmstr))
			apEMAN.executeEmanCmd(emancmd, showcmd = False)

			ctfpartarray = apImage.mrcToArray(postpartmrc, msg=False)
			ctfpartstack.append(ctfpartarray)

		apImagicFile.writeImagic(ctfpartstack, ctfimgstackfile)
		return ctfimgstackfile
Beispiel #17
0
	def radonAlign(self, stackfile):
		"""
		performs the meat of the program aligning the particles and creating references
		"""
		### FUTURE: only read a few particles into memory at one time
		imageinfo = apImagicFile.readImagic(stackfile, msg=False)
		imagelist = imageinfo['images']
		reflist = self.createReferences(imagelist)
		radonimagelist = self.getRadons(imagelist)
		
		### a pre-normalization value so the reference pixels do not overflow
		partperref = self.params['numpart'] / float(self.params['numrefs'])
		
		for iternum in range(self.params['numiter']):
			### save references to a file
			apImagicFile.writeImagic(reflist, "reflist%02d.hed"%(iternum), msg=False)
		
			### create Radon transforms for references
			radonreflist = self.getRadons(reflist)
		
			### create empty references
			newreflist = []
			newrefcount = []
			shape = imagelist[0].shape
			for i in range(self.params['numrefs']):
				newrefcount.append(0)
				newreflist.append(numpy.zeros(shape))

			### get alignment parameters
			aligndatalist = []
			cclist = []
			t0 = time.time()
			for i in range(len(imagelist)):
				if i % 50 == 0:
					### FUTURE: add time estimate
					sys.stderr.write(".")
				image = imagelist[i]
				radonimage = radonimagelist[i]
				aligndata = self.getBestAlignForImage(image, radonimage, reflist, radonreflist, None)
				#aligndatalist.append(aligndata)
				refid = aligndata['refid']
				cclist.append(aligndata['bestcc'])

				### create new references
				refimage = reflist[refid]
				alignedimage = self.transformImage(image, aligndata, refimage)
				newreflist[refid] += alignedimage/partperref
				newrefcount[refid] += 1
			sys.stderr.write("\n")
			print "Alignment complete in %s"%(apDisplay.timeString(time.time()-t0))

			### report median cross-correlation, it should get better each iter
			mediancc = numpy.median(numpy.array(cclist))
			apDisplay.printMsg("Iter %02d, Median CC: %.8f"%(iternum, mediancc))
			print newrefcount

			### FUTURE: re-calculate Radon transform for particles with large shift

			### new references are now the old references
			shape = reflist[0].shape
			reflist = []
			for i in range(self.params['numrefs']):
				if newrefcount[i] == 0:
					### reference with no particles -- just add noise
					apDisplay.printWarning("Reference %02d has no particles"%(i+1))
					ref = numpy.random.random(shape)
				else:
					ref = (newreflist[i] / newrefcount[i]) * partperref
				reflist.append(ref)

		return aligndatalist
    def compute_stack_of_class_averages_and_reprojections(self, iteration):
        ''' takes Xmipp single files, doc and sel files in projection-matching, creates a stack of class averages in the results directory '''

        if bool(self.runparams['package_params']['CleanUpFiles']) is False:

            os.chdir(
                os.path.join(self.projmatchpath, "Iter_%d" % iteration,
                             "ProjMatchClasses"))

            ### make projections, and put them back into resultspath
            selfile = "proj_match_classes.sel"
            refvolume = "../Iter_%d_reconstruction.vol" % iteration
            docfile = "proj_match_classes.doc"

            #			apXmipp.compute_stack_of_class_averages_and_reprojections(d, selfile, refvolume, docfile, \
            #				self.runparams['boxsize'], self.resultspath, self.params['timestamp'], iteration)

            ### remove "lastdir" component from selfile (created by Xmipp program), then extract header information to docfile
            f = open(selfile, "r")
            lines = f.readlines()
            newlines = [
                re.sub("ProjMatchClasses/", "", line) for line in lines
            ]
            f.close()
            f = open(selfile[:-4] + "_new.sel", "w")
            f.writelines(newlines)
            f.close()

            ### create a projection params file and project the volume along identical Euler angles
            f = open("paramfile.descr", "w")
            f.write("%s\n" % refvolume)
            f.write("tmpproj 1 xmp\n")
            f.write("%d %d\n" %
                    (self.runparams['boxsize'], self.runparams['boxsize']))
            f.write("%s rot tilt psi\n" % docfile)
            f.write("NULL\n")
            f.write("0 0\n")
            f.write("0 0\n")
            f.write("0 0\n")
            f.write("0 0\n")
            f.write("0 0\n")
            f.close()
            projectcmd = "xmipp_project -i paramfile.descr"
            apParam.runCmd(projectcmd, "Xmipp")

            ### get order of projections in docfile
            d = open(docfile, "r")
            lines = d.readlines()[1:]
            d.close()
            projfile_sequence = []
            for i, l in enumerate(lines):
                if i % 2 == 0:
                    filename = os.path.basename(l.split()[1])
                    projfile_sequence.append(filename)
                else:
                    pass

            ### create stack of projections and class averages
            projections = glob.glob("tmpproj**xmp")
            projections.sort()
            if len(projections) != len(projfile_sequence):
                apDisplay.printWarning(
                    "number of projections does not match number of classes")
            stackarray = []
            stackname = os.path.join(
                self.resultspath, "proj-avgs_%s_it%.3d_vol%.3d.hed" %
                (self.params['timestamp'], iteration, 1))
            for i in range(len(projections)):
                stackarray.append(spider.read(projections[i]))
                stackarray.append(spider.read(projfile_sequence[i]))
            apImagicFile.writeImagic(stackarray, stackname, msg=False)

            ### remove unnecessary files
            for file in glob.glob("tmpproj*"):
                apFile.removeFile(file)
            os.chdir(self.params['rundir'])
        else:
            apDisplay.printWarning(
                "all projection-matching files were cleaned up ... NOT creating class-average / re-projection stack"
            )

        return
        def insertRotKerDenSOM(self):
                inserttime = time.time()
                ### Preliminary data
                projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid'])
                alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
                numclass = self.params['xdim']*self.params['ydim']
                pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))

                ### rotKerDen SOM Params object
                rotkerdenson = appiondata.ApRotKerDenSOMParamsData()
                #rotkerdenson['mask_diam'] = 2.0*self.params['maskrad']
                rotkerdenson['x_dimension'] = self.params['xdim']
                rotkerdenson['y_dimension'] = self.params['ydim']
                #rotkerdenson['convergence'] = self.params['converge']
                rotkerdenson['run_seconds'] = time.time()-self.t0
                rotkerdenson['initregulfact'] = self.params['initregulfact']
                rotkerdenson['finalregulfact'] = self.params['finalregulfact']
                rotkerdenson['incrementregulfact'] = self.params['incrementregulfact']
                rotkerdenson['spectrainnerradius'] = self.params['spectrainnerradius']
                rotkerdenson['spectraouterradius'] = self.params['spectraouterradius']
                rotkerdenson['spectralowharmonic'] = self.params['spectralowharmonic']
                rotkerdenson['spectrahighharmonic'] = self.params['spectrahighharmonic']

                ### Analysis Run object
                analysisq = appiondata.ApAlignAnalysisRunData()
                analysisq['runname'] = self.params['runname']
                analysisq['path'] = pathdata
                analysisq['description'] = self.params['description']
                analysisq['alignstack'] = alignstackdata
                analysisq['hidden'] = False

                ### Clustering Run object
                clusterrunq = appiondata.ApClusteringRunData()
                clusterrunq['runname'] = self.params['runname']
                clusterrunq['description'] = self.params['description']
                clusterrunq['boxsize'] = alignstackdata['boxsize']
                clusterrunq['pixelsize'] = alignstackdata['pixelsize']
                clusterrunq['num_particles'] = self.params['numpart']
                clusterrunq['alignstack'] = alignstackdata
                clusterrunq['analysisrun'] = analysisq
                clusterrunq['rotkerdenparams'] = rotkerdenson

                ### Clustering Stack object
                #Stack with cluster averages??????
                template =os.path.join(self.params['rundir'],self.spectraTemporalFilesMask + ".png")
                files = glob.glob(template)
                imglist = []
                for listname in files:
                        a=apImage.readPNG(listname)
                        imglist.append(a)
                apImagicFile.writeImagic(imglist,"rotkerdenstack" +self.timestamp + ".hed")
                clusterstackq = appiondata.ApClusteringStackData()
                clusterstackq['avg_imagicfile'] = "rotkerdenstack"+self.timestamp+".hed"
                clusterstackq['num_classes'] = numclass
                clusterstackq['clusterrun'] = clusterrunq
                clusterstackq['path'] = pathdata
                clusterstackq['hidden'] = False
                imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile'])
                if not os.path.isfile(imagicfile):
                        apDisplay.printError("could not find average stack file: "+imagicfile)

                ### looping over clusters
                apDisplay.printColor("Inserting particle classification data, please wait", "cyan")
                numclass = self.params['xdim']*self.params['ydim']
                for i in range(numclass):
                        classnum = i+1
                        classroot = "%s.%d"% (self.timestamp, classnum-1)
                        classdocfile = os.path.join(self.params['rundir'], classroot)
                        partlist = self.readClassDocFile(classdocfile)
                        ### Clustering Particle object
                        # MRC image for each code node but plot or image
                        clusterrefq = appiondata.ApClusteringReferenceData()
                        clusterrefq['refnum'] = classnum
                        clusterrefq['avg_mrcfile'] = classroot+".mrc"
                        clusterrefq['clusterrun'] = clusterrunq
                        clusterrefq['path'] = pathdata
                        clusterrefq['num_particles'] = len(partlist)

                        ### looping over particles
                        #which particles belong to which code node
                        sys.stderr.write(".")
                        for partnum in partlist:
                                alignpartdata = self.getAlignParticleData(partnum, alignstackdata)

                                ### Clustering Particle objects
                                clusterpartq = appiondata.ApClusteringParticleData()
                                clusterpartq['clusterstack'] = clusterstackq
                                clusterpartq['alignparticle'] = alignpartdata
                                clusterpartq['partnum'] = partnum
                                clusterpartq['refnum'] = classnum
                                clusterpartq['clusterreference'] = clusterrefq

                                ### finally we can insert parameters
                                if self.params['commit'] is True:
                                        clusterpartq.insert()
                sys.stderr.write("\n")
                apDisplay.printMsg("Insertion complete in %s"%(apDisplay.timeString(time.time()-inserttime)))
    def makeNewStacks(self, parttree):
        ### untilted stack
        self.notstackdata = apStack.getOnlyStackData(self.params['notstackid'])
        notstackfile = os.path.join(self.notstackdata['path']['path'],
                                    self.notstackdata['name'])

        ### tilted stack
        if not self.tiltstackdata:
            self.tiltstackdata = apStack.getOnlyStackData(
                self.params['tiltstackid'])
        tiltstackfile = os.path.join(self.tiltstackdata['path']['path'],
                                     self.tiltstackdata['name'])

        ### make doc file of Euler angles
        #eulerfile = self.makeEulerDoc(parttree)
        eulerfile = os.path.join(self.params['rundir'],
                                 "eulersdoc" + self.timestamp + ".spi")
        if os.path.isfile(eulerfile):
            apFile.removeFile(eulerfile)

        count = 0
        notstacklist = []
        tiltstacklist = []
        sizelimit = 2048
        notbox = apImagicFile.getBoxsize(notstackfile)
        tiltbox = apImagicFile.getBoxsize(tiltstackfile)
        tiltstacks = []
        notstacks = []
        t0 = time.time()
        for partdict in parttree:
            ### print friendly message
            if count % 100 == 0:
                backs = "\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b"
                sys.stderr.write(backs + backs + backs + backs)
                if count > sizelimit:
                    esttime = (len(parttree) / float(count) -
                               1.0) * (time.time() - t0)
                    sys.stderr.write(
                        str(count) + " particles of " + str(len(parttree)) +
                        ", " + apDisplay.timeString(esttime) + " remaining")
                else:
                    sys.stderr.write(
                        str(count) + " particles of " + str(len(parttree)))
            ### save stacks to file to save memory
            if count % sizelimit == 0:
                if count > 1:
                    apDisplay.printMsg("Writing stacks to file")
                    t1 = time.time()
                    tiltname = os.path.join(self.params['rundir'],
                                            "tiltstack%d.hed" % (count))
                    apFile.removeStack(tiltname)
                    apImagicFile.writeImagic(tiltstacklist,
                                             tiltname,
                                             msg=False)
                    tiltstacks.append(tiltname)
                    apDisplay.printMsg("finished tilted stack in " +
                                       apDisplay.timeString(time.time() - t1))
                    t1 = time.time()
                    notname = os.path.join(self.params['rundir'],
                                           "notstack%d.hed" % (count))
                    apFile.removeStack(notname)
                    apImagicFile.writeImagic(notstacklist, notname, msg=False)
                    notstacks.append(notname)
                    apDisplay.printMsg("finished untilted stack in " +
                                       apDisplay.timeString(time.time() - t1))
                ### reset stacks
                apDisplay.printMsg("Reset stacks in memory")
                notstacklist = []
                tiltstacklist = []
            ### increment count
            count += 1
            ### write to Euler doc
            self.appendEulerDoc(eulerfile, partdict['tilt'], count)
            ### untilted stack
            notpartarray = apImagicFile.readSingleParticleFromStack(
                notstackfile, partdict['not'], notbox, False)
            notstacklist.append(notpartarray)
            ### tilted stack
            tiltpartarray = apImagicFile.readSingleParticleFromStack(
                tiltstackfile, partdict['tilt'], tiltbox, False)
            tiltstacklist.append(tiltpartarray)
        ### write remaining particles to stack
        if len(notstacklist) > 0:
            apDisplay.printMsg("Writing stacks to file")
            t1 = time.time()
            tiltname = os.path.join(self.params['rundir'],
                                    "tiltstack%d.hed" % (count))
            apFile.removeStack(tiltname)
            apImagicFile.writeImagic(tiltstacklist, tiltname, msg=False)
            tiltstacks.append(tiltname)
            apDisplay.printMsg("finished tilted stack in " +
                               apDisplay.timeString(time.time() - t1))
            t1 = time.time()
            notname = os.path.join(self.params['rundir'],
                                   "notstack%d.hed" % (count))
            apFile.removeStack(notname)
            apImagicFile.writeImagic(notstacklist, notname, msg=False)
            notstacks.append(notname)
            apDisplay.printMsg("finished untilted stack in " +
                               apDisplay.timeString(time.time() - t1))

        ### merge NOT stack
        notname = os.path.join(self.params['rundir'], "notstack.hed")
        apImagicFile.mergeStacks(notstacks, notname)
        for stackname in notstacks:
            apFile.removeStack(stackname, warn=False)

        ### merge TILT stack
        tiltname = os.path.join(self.params['rundir'], "tiltstack.hed")
        apImagicFile.mergeStacks(tiltstacks, tiltname)
        for stackname in tiltstacks:
            apFile.removeStack(stackname, warn=False)

        ### upload results
        if self.params['commit'] is True:
            self.uploadResults()
	def createMontageInMemory(self, apix):
		self.cluster_resolution = []
		apDisplay.printMsg("Converting files")

		### Set binning of images
		boxsize = apImagicFile.getBoxsize(self.instack)
		bin = 1
		while boxsize/bin > 200:
			bin+=1
		binboxsize = boxsize/bin

		### create averages
		files = glob.glob(self.timestamp+".[0-9]*")
		files.sort(self.sortFile)
		montage = []
		montagepngs = []
		i = 0
		for listname in files:
			i += 1
			apDisplay.printMsg("%d of %d classes"%(i,len(files)))
			pngfile = listname+".png"
			if not os.path.isfile(listname) or apFile.fileSize(listname) < 1:
				### create a ghost particle
				sys.stderr.write("skipping "+listname+"\n")
				blank = numpy.ones((binboxsize, binboxsize), dtype=numpy.float32)

				### add to montage stack
				montage.append(blank)
				self.cluster_resolution.append(None)

				### create png
				apImage.arrayToPng(blank, pngfile)

			else:
				### read particle list
				partlist = self.readListFile(listname)

				### average particles
				partdatalist = apImagicFile.readParticleListFromStack(self.instack, partlist, boxsize, msg=False)
				partdataarray = numpy.asarray(partdatalist)
				finaldata = partdataarray.mean(0)
				if bin > 1:
					finaldata = apImage.binImg(finaldata, bin)

				### add to montage stack
				montage.append(finaldata)
				res = apFourier.spectralSNR(partdatalist, apix)
				self.cluster_resolution.append(res)

				### create png
				apImage.arrayToPng(finaldata, pngfile)

			### check for png file
			if os.path.isfile(pngfile):
				montagepngs.append(pngfile)
			else:
				apDisplay.printError("failed to create montage")

		stackname = "kerdenstack"+self.timestamp+".hed"
		apImagicFile.writeImagic(montage, stackname)
		### create montage
		montagecmd = ("montage -geometry +4+4 -tile %dx%d "%(self.params['xdim'], self.params['ydim']))
		for monpng in montagepngs:
			montagecmd += monpng+" "
		montagecmd += "montage.png"
		apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False)
		time.sleep(1)
		apFile.removeFilePattern(self.timestamp+".*.png")
		return bin
Beispiel #22
0
        partlist.append(part)
    return partlist


if __name__ == "__main__":
    ### generate random image data
    shape = (128, 128)
    partlist = []
    for i in range(16):
        part = numpy.random.random(shape)
        part = ndimage.gaussian_filter(part, sigma=shape[0] / 16)
        partlist.append(part)

    ### save original data
    apFile.removeStack("original.hed", warn=False)
    apImagicFile.writeImagic(partlist, "original.hed", msg=False)

    ### read and write with Appion
    apFile.removeStack("resave.hed", warn=False)
    imagic = apImagicFile.readImagic("original.hed", msg=False)
    partlist2 = imagic['images']
    apImagicFile.writeImagic(partlist2, "resave.hed", msg=False)
    print "============\nCompare APPION IMAGIC"
    if not isSameStack(partlist, partlist2):
        print "Stacks are different"
        #sys.exit(1)

    ### read and write with EMAN mrc
    emanMrcToStack(partlist)
    imagic = apImagicFile.readImagic("emanmrc.hed", msg=False)
    partlist3 = imagic['images']
Beispiel #23
0
def makeStackMeanPlot(stackid, gridpoints=16):
        if gridpoints > 30:
                apDisplay.printError("Too large of a grid")
        apDisplay.printMsg("creating Stack Mean Plot montage for stackid: "+str(stackid))
        t0 = time.time()
        ### big stacks are too slow
        boxsize = apStack.getStackBoxsize(stackid)
        bin = 1
        if boxsize is not None:
                while boxsize/bin > 128:
                        bin+=1
        apDisplay.printMsg("binning stack by "+str(bin))
        stackdata = apStack.getOnlyStackData(stackid, msg=False)
        stackfile = os.path.join(stackdata['path']['path'], stackdata['name'])
        partdatas = apStack.getStackParticlesFromId(stackid, msg=False)
        #check only first 100 particles for now
        #partdatas = partdatas[:500]
        apFile.removeFile("montage"+str(stackid)+".png")

        ### find limits
        limits = {'minmean': 1e12, 'maxmean': -1e12, 'minstdev': 1e12, 'maxstdev': -1e12,}
        for partdata in partdatas:
                if partdata['mean'] is None:
                        continue
                mean = partdata['mean']
                stdev = partdata['stdev']
                if mean < limits['minmean']:
                        limits['minmean'] = mean
                if mean > limits['maxmean']:
                        limits['maxmean'] = mean
                if stdev < limits['minstdev']:
                        limits['minstdev'] = stdev
                if stdev > limits['maxstdev']:
                        limits['maxstdev'] = stdev
        if limits['minmean'] > 1e11:
                apDisplay.printWarning("particles have no mean values in database")
                return
        apDisplay.printMsg(str(limits))

        ### create particle bins
        partlists = {}
        for i in range(gridpoints):
                for j in range(gridpoints):
                        key = ("%02dx%02d"%(i,j))
                        partlists[key] = []

        ### sort paritcles into bins
        for partdata in partdatas:
                key = meanStdevToKey(partdata['mean'], partdata['stdev'], limits, gridpoints)
                partnum = int(partdata['particleNumber'])
                partlists[key].append(partnum)

        printPlot(partlists, gridpoints)

        ### createStackAverages
        keys = partlists.keys()
        keys.sort()
        count = 0
        backs = "\b\b\b\b\b\b\b\b\b\b\b"
        montagestack = "montage"+str(stackid)+".hed"
        apFile.removeStack(montagestack)
        mystack = []
        for key in keys:
                count += 1
                sys.stderr.write(backs+backs+backs+backs)
                sys.stderr.write("% 3d of % 3d, %s: % 6d"%(count, len(keys), key, len(partlists[key])))
                avgimg = averageSubStack(partlists[key], stackfile, bin)
                if avgimg is not False:
                        mystack.append(avgimg)
        apImagicFile.writeImagic(mystack, montagestack)
        sys.stderr.write("\n")
        assemblePngs(keys, str(stackid), montagestack)
        apDisplay.printMsg("/bin/mv -v montage"+str(stackid)+".??? "+stackdata['path']['path'])
        apDisplay.printMsg("finished in "+apDisplay.timeString(time.time()-t0))
Beispiel #24
0
    def runAffinityPropagation(self, alignedstack):
        ### Get initial correlation values
        ### this is really, really slow
        similarfile, simlist = self.fillSimilarityMatrix(alignedstack)

        ### Preference value stats
        preffile = self.setPreferences(simlist)

        ### run apcluster.exe program
        outfile = "clusters.out"
        apDisplay.printMsg("Run apcluster.exe program")
        apclusterexe = os.path.join(apParam.getAppionDirectory(),
                                    "bin/apcluster.exe")
        apFile.removeFile(outfile)
        clustercmd = apclusterexe + " " + similarfile + " " + preffile + " " + outfile
        clusttime = time.time()
        proc = subprocess.Popen(clustercmd, shell=True)
        proc.wait()
        apDisplay.printMsg("apCluster time: " +
                           apDisplay.timeString(time.time() - clusttime))

        if not os.path.isfile(outfile):
            apDisplay.printError("apCluster did not run")

        ### Parse apcluster output file: clusters.out
        apDisplay.printMsg("Parse apcluster output file: " + outfile)
        clustf = open(outfile, "r")
        ### each line is the particle and the number is the class
        partnum = 0
        classes = {}
        for line in clustf:
            sline = line.strip()
            if sline:
                partnum += 1
                classnum = int(sline)
                if not classnum in classes:
                    classes[classnum] = [
                        partnum,
                    ]
                else:
                    classes[classnum].append(partnum)
        clustf.close()
        apDisplay.printMsg("Found %d classes" % (len(classes.keys())))

        ### Create class averages
        classavgdata = []
        classnames = classes.keys()
        classnames.sort()
        for classnum in classnames:
            apDisplay.printMsg("Class %d, %d members" %
                               (classnum, len(classes[classnum])))
            #clsf = open('subcls%04d.lst'%(classnum), 'w')
            #for partnum in classes[classnum]:
            #	clsf.write("%d\n"%(partnum))
            #clsf.close()
            classdatalist = apImagicFile.readParticleListFromStack(
                alignedstack, classes[classnum], msg=False)
            classdatarray = numpy.asarray(classdatalist)
            classavgarray = classdatarray.mean(0)
            #mrc.write(classavgarray, 'subcls%04d.mrc'%(classnum))
            classavgdata.append(classavgarray)
        apFile.removeStack("classaverage-" + self.timestamp + ".hed")
        apImagicFile.writeImagic(classavgdata,
                                 "classaverage-" + self.timestamp + ".hed")

        return classes
def start():
        ### backup old cls files
        classfile = os.path.join(params['emandir'], "cls.%d.tar"%(params['iter']))
        oldclassfile = os.path.join(params['emandir'], "cls.%d.old.tar"%(params['iter']))
        shutil.move(classfile, oldclassfile)

        projhed = os.path.join(params['emandir'], 'proj.hed')
        projhed = os.path.join(params['emandir'], 'proj.img')
        numproj = apFile.numImagesInStack(projhed)

        ### extract cls files
        tar = tarfile.open(oldclassfile)
        tar.extractall(path=params['rundir'])
        tar.close()
        clslist = glob.glob("cls*.lst")

        if numproj != len(clslist):
                apDisplay.printError("array length mismatch")

        ### loop through classes
        clsnum = 0
        goodavg = []
        for clsfile in clslist:
                clsnum += 1
                clsf = open(clsfile, 'r')
                partlist = clsf.readlines()
                clsf.close()

                ### write the projection???
                #e=projections[clsNum].getEuler()
                #projections[clsNum].setNImg(-1)
                #projections[clsNum].writeImage('goodavgs.hed', -1)

                if len(partlist) < params['minpart']:
                        ### not enough particles skip to next projection
                        #origaverage =
                        goodavg.append()
                        #emanClsAvgs[(clsNum+1)*2 - 1].writeImage('goodavgs.hed',-1)
                        continue

                ### make aligned stack
                if params['eotest'] is False:
                        command='clstoaligned.py -c' + cls
                elif params['eotest']=='odd':
                        fw=open(cls,'r')
                        Ptcls = fw.readlines()
                        fw.close()
                        fw = open('cls_odd.lst', 'w')
                        fw.writelines(Ptcls[0])
                        fw.writelines(Ptcls[1])
                        for i1 in range(2,len(Ptcls)):
                                if i1%2==0:
                                        fw.writelines(Ptcls[i1])
                        fw.close()
                        command='clstoaligned.py -c cls_odd.lst'
                elif params['eotest']=='even':
                        fw=open(cls,'r')
                        Ptcls = fw.readlines()
                        fw.close()
                        fw = open('cls_even.lst', 'w')
                        fw.writelines(Ptcls[0])
                        fw.writelines(Ptcls[1])
                        for i1 in range(2,len(Ptcls)):
                                if i1%2==1:
                                        fw.writelines(Ptcls[i1])
                        fw.close()
                        command='clstoaligned.py -c cls_even.lst'
                apDisplay.printMsg(command)
                proc = subprocess.Popen(command, shell=True)
                proc.wait()
                #set up cls dir
                clsdir=cls.split('.')[0]+'.dir'
                os.mkdir(clsdir)
                os.rename('aligned.spi',os.path.join(clsdir,'aligned.spi'))
                alignedImgsName = os.path.join(clsdir,'aligned.spi')
                #alignedImgs = EMAN.readImages(alignedImgsName,-1,-1,0)
                #N = len(alignedImgs)

                apDisplay.printMsg("Starting cluster process for "+clsdir)
                ### fill similarity matrix with CC values
                similarfile, simlist = fillSimilarityMatrix(alignedImgsName)

                ### set preferences
                preffile = setPreferences(simlist, params['preftype'])

                ### run apcluster.exe program
                outfile = "clusters.out"
                apDisplay.printMsg("Run apcluster.exe program")
                apclusterexe = os.path.join("apcluster.exe")
                if os.path.isfile(outfile):
                        os.remove(outfile)
                clustercmd = apclusterexe+" "+similarfile+" "+preffile+" "+outfile
                proc = subprocess.Popen(clustercmd, shell=True)
                proc.wait()

                if not os.path.isfile(outfile):
                        apDisplay.printError("affinity propagration cluster program did not run")

                ### Parse apcluster output file: clusters.out
                apDisplay.printMsg("Parse apcluster output file: "+outfile)
                clustf = open(outfile, "r")
                ### each line is the particle and the number is the class
                partnum = 0
                classes = {}
                for line in clustf:
                        sline = line.strip()
                        if sline:
                                partnum += 1
                                classnum = int(sline)
                                if not classnum in classes:
                                        classes[classnum] = [partnum,]
                                else:
                                        classes[classnum].append(partnum)
                clustf.close()
                apDisplay.printMsg("Found %d classes"%(len(classes.keys())))

                ### Create class averages
                classavgdata = []
                classnames = classes.keys()
                classnames.sort()
                for classnum in classnames:
                        apDisplay.printMsg("Class %d, %d members"%(classnum, len(classes[classnum])))
                        clsf = open('subcls%03d.lst'%(classnum), 'w')
                        for partnum in classes[classnum]:
                                clsf.write("%d\n"%(partnum))
                        clsf.close()
                        classdatalist = apImagicFile.readParticleListFromStack(stackfile, classes[classnum], msg=False)
                        classdatarray = numpy.asarray(classdatalist)
                        classavgarray = classdatarray.mean(0)
                        classavgdata.append(classavgarray)
                apFile.removeStack("classaverage.hed")
                apImagicFile.writeImagic(classavgdata, "classaverage.hed")

                k=0
                for i in range(0,len(E)):
                        if len(E[i])==0:
                                continue
                        else:
                                f1=open('%s/subcls%02d.lst' % (str1,k), 'w')
                                for j in range(0,len(E[i])):
                                        f1.write('%d aligned.spi clusterCenterImgNum%d\n' % (E[i][j], i))
                                f1.close()
                                proc = subprocess.Popen('proc2d aligned.spi tempClsAvg.hed list=%s/subcls%02d.lst mask=%d average edgenorm' % (str1,k,params['mask']), shell=True)
                                proc.wait()
                                k=k+1

                clsAvgs = EMAN.readImages('tempClsAvg.hed',-1,-1,0)
                j=0
                for i in range(0,len(E)):
                        if len(E[i])==0:
                                continue
                        else:
                                clsAvgs[j].setNImg(len(E[i]))
                                clsAvgs[j].writeImage('subclasses_avg.hed',-1)
                                j=j+1
                os.chdir('../')


                ### Determine best averages

                proc = subprocess.Popen('/bin/rm tempClsAvg.*', shell=True)
                proc.wait()
                proc = subprocess.Popen('proc2d %s/aligned.spi tempClsAvg.hed mask=%d average edgenorm' % (clsdir, params['mask']), shell=True)
                proc.wait()
                class_avg = EMAN.readImages('tempClsAvg.hed',-1,-1,0)

                avgname=os.path.join(clsdir,'subclasses_avg.hed')
                averages=EMAN.readImages(avgname,-1,-1,0)

                cclist=[]
                for avg in averages:
                        cclist.append(cc(projections[clsNum],avg))

                f1 = open('%s/CCValues.txt'%(clsdir), 'w')
                for i in range(len(cclist)):
                        f1.write(str(cclist[i])+'\n')
                f1.close()

                ### Merge top best subclasses

                ccListSort = cclist
                ccListSort.sort()
                Ptcls = []
                for i in range(0,len(ccListSort)):
                        cci = ccListSort[len(ccListSort)-i-1]
                        if cci>=params['corCutOff']:
                                bestclass_i=cclist.index(cci)
                                classname_i=clslist[clsNum].split('.')[0]+'.dir/subClassAvgs/subcls'+string.zfill(bestclass_i,2)+'.lst'
                                f1=open(classname_i,'r')
                                Ptcls_i = f1.readlines()
                                f1.close()
                                Ptcls.extend(Ptcls_i)
                        else:
                                print "Not included - ", cci
                                pass
                if len(Ptcls)>0:

                        fw=open('mergeClasses.lst', 'w')
                        fw.writelines(Ptcls)
                        fw.close()

                        proc = subprocess.Popen('/bin/rm mergedClsAvg.spi', shell=True)
                        proc.wait()
                        proc = subprocess.Popen('proc2d %s/aligned.spi mergedClsAvg.spi list=mergeClasses.lst mask=%d average' % (clsdir, params['mask']), shell=True)
                        proc.wait()
                        mergedavg=EMAN.readImages('mergedClsAvg.spi',-1,-1,0)

                        mergedavg[0].setNImg(len(Ptcls))
                        mergedavg[0].setRAlign(e)
                        mergedavg[0].writeImage('goodavgs.hed',-1)
                else:
                        pass

                writeNewClsfile(cls,pretext,Ptext,Ptcls)

        #Create list of cc values
        for cls in range(0,len(clslist)):
                clsdir=clslist[cls].split('.')[0]+'.dir'
                apDisplay.printMsg("Starting class number %d" %(cls))

                #break
        pad=params['boxsize']*1.25
        if pad%2:
                pad=pad+1
        if params['sym']==None:
                make3dcommand='make3d goodavgs.hed out=threed.%d.mrc mask=%d pad=%d mode=2 hard=%d' % (params['iter'], params['mask'], pad, params['hard'])
        else:
                make3dcommand='make3d goodavgs.hed out=threed.%d.mrc mask=%d sym=%s pad=%d mode=2 hard=%d' % (params['iter'], params['mask'], params['sym'], pad, params['hard'])
        apDisplay.printMsg(make3dcommand)
        proc = subprocess.Popen(make3dcommand, shell=True)
        proc.wait()
        proc3dcommand='proc3d threed.%d.mrc threed.%da.mrc mask=%d norm' % (params['iter'],params['iter'],params['mask'])
        apDisplay.printMsg(proc3dcommand)
        proc = subprocess.Popen(proc3dcommand, shell=True)
        proc.wait()
        if params['eotest'] is False:
                #copy the resulting class average images to the main recon directory
                proc = subprocess.Popen('/bin/cp threed.%da.mrc ../.'%(params['iter']), shell=True)
                proc.wait()
                proc = subprocess.Popen('/bin/cp goodavgs.hed ../classes_msgp.%d.hed' %(params['iter']), shell=True)
                proc.wait()
                proc = subprocess.Popen('/bin/cp goodavgs.img ../classes_msgp.%d.img' %(params['iter']), shell=True)
                proc.wait()
                #link msgp result as the final result for this iteration
                rmcommand='/bin/rm -f ../classes.%d.hed ../classes.%d.img' % (params['iter'], params['iter'])
                proc = subprocess.Popen(rmcommand, shell=True)
                proc.wait()
                lncommand='ln -s classes_msgp.%d.hed ../classes.%d.hed' % (params['iter'], params['iter'])
                proc = subprocess.Popen(lncommand, shell=True)
                proc.wait()
                lncommand='ln -s classes_msgp.%d.img ../classes.%d.img' % (params['iter'], params['iter'])
                proc = subprocess.Popen(lncommand, shell=True)
                proc.wait()
        elif params['eotest']=='odd':
                proc = subprocess.Popen('/bin/cp threed.%da.mrc ../threed.%da.o.mrc' %(params['iter'], params['iter']), shell=True)
                proc.wait()
        elif params['eotest']=='even':
                proc = subprocess.Popen('/bin/cp threed.%da.mrc ../threed.%da.e.mrc' %(params['iter'], params['iter']), shell=True)
                proc.wait()
                proc = subprocess.Popen('proc3d threed.%da.mrc ../threed.%da.o.mrc fsc=../corEO%d.fsc.dat' %(params['iter'], params['iter'], params['iter']), shell=True)
                proc.wait()

        #replace the old cls*.lst with the new extended one
        proc = subprocess.Popen('tar cvzf %s %s' % (newclassfile,"cls*.lst.new"), shell=True)
        proc.wait()
        proc = subprocess.Popen('/bin/cp %s ../%s' %(newclassfile,classfile), shell=True)
        proc.wait()

        apDisplay.printMsg("Done!")
	def compute_stack_of_class_averages_and_reprojections(self, iteration):
		''' takes Xmipp single files, doc and sel files in projection-matching, creates a stack of class averages in the results directory '''

		if bool(self.runparams['package_params']['CleanUpFiles']) is False:

			os.chdir(os.path.join(self.projmatchpath, "Iter_%d" % iteration, "ProjMatchClasses"))

			### make projections, and put them back into resultspath
			selfile = "proj_match_classes.sel"
			refvolume = "../Iter_%d_reconstruction.vol" % iteration
			docfile = "proj_match_classes.doc"

#			apXmipp.compute_stack_of_class_averages_and_reprojections(d, selfile, refvolume, docfile, \
#				self.runparams['boxsize'], self.resultspath, self.params['timestamp'], iteration)
					
			### remove "lastdir" component from selfile (created by Xmipp program), then extract header information to docfile
			f = open(selfile, "r")
			lines = f.readlines()
			newlines = [re.sub("ProjMatchClasses/", "", line) for line in lines]
			f.close()
			f = open(selfile[:-4]+"_new.sel", "w")
			f.writelines(newlines)
			f.close()

			### create a projection params file and project the volume along identical Euler angles
			f = open("paramfile.descr", "w")
			f.write("%s\n" % refvolume)
			f.write("tmpproj 1 xmp\n")
			f.write("%d %d\n" % (self.runparams['boxsize'], self.runparams['boxsize']))
			f.write("%s rot tilt psi\n" % docfile)
			f.write("NULL\n")
			f.write("0 0\n")
			f.write("0 0\n")
			f.write("0 0\n")
			f.write("0 0\n")
			f.write("0 0\n")
			f.close()
			projectcmd = "xmipp_project -i paramfile.descr"
			apParam.runCmd(projectcmd, "Xmipp")
			
			### get order of projections in docfile
			d = open(docfile, "r")
			lines = d.readlines()[1:]
			d.close()
			projfile_sequence = []
			for i, l in enumerate(lines):
				if i % 2 == 0:
					filename = os.path.basename(l.split()[1])
					projfile_sequence.append(filename)
				else: pass
				
			### create stack of projections and class averages
			projections = glob.glob("tmpproj**xmp")
			projections.sort()
			if len(projections) != len(projfile_sequence):
				apDisplay.printWarning("number of projections does not match number of classes")
			stackarray = []
			stackname = os.path.join(self.resultspath, "proj-avgs_%s_it%.3d_vol%.3d.hed" % (self.params['timestamp'], iteration, 1))
			for i in range(len(projections)):
				stackarray.append(spider.read(projections[i]))
				stackarray.append(spider.read(projfile_sequence[i]))
			apImagicFile.writeImagic(stackarray, stackname, msg=False)
			
			### remove unnecessary files
			for file in glob.glob("tmpproj*"):
				apFile.removeFile(file)
			os.chdir(self.params['rundir'])
		else:
			apDisplay.printWarning("all projection-matching files were cleaned up ... NOT creating class-average / re-projection stack")

		return
    def insertRotKerDenSOM(self):
        inserttime = time.time()
        ### Preliminary data
        projectid = apProject.getProjectIdFromAlignStackId(
            self.params['alignstackid'])
        alignstackdata = appiondata.ApAlignStackData.direct_query(
            self.params['alignstackid'])
        numclass = self.params['xdim'] * self.params['ydim']
        pathdata = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))

        ### rotKerDen SOM Params object
        rotkerdenson = appiondata.ApRotKerDenSOMParamsData()
        #rotkerdenson['mask_diam'] = 2.0*self.params['maskrad']
        rotkerdenson['x_dimension'] = self.params['xdim']
        rotkerdenson['y_dimension'] = self.params['ydim']
        #rotkerdenson['convergence'] = self.params['converge']
        rotkerdenson['run_seconds'] = time.time() - self.t0
        rotkerdenson['initregulfact'] = self.params['initregulfact']
        rotkerdenson['finalregulfact'] = self.params['finalregulfact']
        rotkerdenson['incrementregulfact'] = self.params['incrementregulfact']
        rotkerdenson['spectrainnerradius'] = self.params['spectrainnerradius']
        rotkerdenson['spectraouterradius'] = self.params['spectraouterradius']
        rotkerdenson['spectralowharmonic'] = self.params['spectralowharmonic']
        rotkerdenson['spectrahighharmonic'] = self.params[
            'spectrahighharmonic']

        ### Analysis Run object
        analysisq = appiondata.ApAlignAnalysisRunData()
        analysisq['runname'] = self.params['runname']
        analysisq['path'] = pathdata
        analysisq['description'] = self.params['description']
        analysisq['alignstack'] = alignstackdata
        analysisq['hidden'] = False

        ### Clustering Run object
        clusterrunq = appiondata.ApClusteringRunData()
        clusterrunq['runname'] = self.params['runname']
        clusterrunq['description'] = self.params['description']
        clusterrunq['boxsize'] = alignstackdata['boxsize']
        clusterrunq['pixelsize'] = alignstackdata['pixelsize']
        clusterrunq['num_particles'] = self.params['numpart']
        clusterrunq['alignstack'] = alignstackdata
        clusterrunq['analysisrun'] = analysisq
        clusterrunq['rotkerdenparams'] = rotkerdenson

        ### Clustering Stack object
        #Stack with cluster averages??????
        template = os.path.join(self.params['rundir'],
                                self.spectraTemporalFilesMask + ".png")
        files = glob.glob(template)
        imglist = []
        for listname in files:
            a = apImage.readPNG(listname)
            imglist.append(a)
        apImagicFile.writeImagic(imglist,
                                 "rotkerdenstack" + self.timestamp + ".hed")
        clusterstackq = appiondata.ApClusteringStackData()
        clusterstackq[
            'avg_imagicfile'] = "rotkerdenstack" + self.timestamp + ".hed"
        clusterstackq['num_classes'] = numclass
        clusterstackq['clusterrun'] = clusterrunq
        clusterstackq['path'] = pathdata
        clusterstackq['hidden'] = False
        imagicfile = os.path.join(self.params['rundir'],
                                  clusterstackq['avg_imagicfile'])
        if not os.path.isfile(imagicfile):
            apDisplay.printError("could not find average stack file: " +
                                 imagicfile)

        ### looping over clusters
        apDisplay.printColor(
            "Inserting particle classification data, please wait", "cyan")
        numclass = self.params['xdim'] * self.params['ydim']
        for i in range(numclass):
            classnum = i + 1
            classroot = "%s.%d" % (self.timestamp, classnum - 1)
            classdocfile = os.path.join(self.params['rundir'], classroot)
            partlist = self.readClassDocFile(classdocfile)
            ### Clustering Particle object
            # MRC image for each code node but plot or image
            clusterrefq = appiondata.ApClusteringReferenceData()
            clusterrefq['refnum'] = classnum
            clusterrefq['avg_mrcfile'] = classroot + ".mrc"
            clusterrefq['clusterrun'] = clusterrunq
            clusterrefq['path'] = pathdata
            clusterrefq['num_particles'] = len(partlist)

            ### looping over particles
            #which particles belong to which code node
            sys.stderr.write(".")
            for partnum in partlist:
                alignpartdata = self.getAlignParticleData(
                    partnum, alignstackdata)

                ### Clustering Particle objects
                clusterpartq = appiondata.ApClusteringParticleData()
                clusterpartq['clusterstack'] = clusterstackq
                clusterpartq['alignparticle'] = alignpartdata
                clusterpartq['partnum'] = partnum
                clusterpartq['refnum'] = classnum
                clusterpartq['clusterreference'] = clusterrefq

                ### finally we can insert parameters
                if self.params['commit'] is True:
                    clusterpartq.insert()
        sys.stderr.write("\n")
        apDisplay.printMsg("Insertion complete in %s" %
                           (apDisplay.timeString(time.time() - inserttime)))
	def compute_stack_of_class_averages_and_reprojections(self, iteration, reference_number):
		''' takes Xmipp single files, doc and sel files associated with ML3D, creates a stack of class averages in the results directory '''
			
		'''			
		### make projections, and put them back into resultspath
		selfile = "ml3d_it%.6d_vol%.6d.sel" % (iteration, reference_number)
		refvolume = "ml3d_it%.6d_vol%.6d.vol\n" % (iteration, reference_number)
		docfile = "ml3d_it%.6d_vol%.6d.doc" % (iteration, reference_number)
		
		apXmipp.compute_stack_of_class_averages_and_reprojections(self.ml3dpath, selfile, refvolume, docfile, \
			self.runparams['boxsize'], self.resultspath, self.params['timestamp'], iteration, reference_number, extract=True)

		return				
		'''		
		os.chdir(self.ml3dpath)		
				
		### remove "RunML3D/" from selfile (created by ML3D program), then extract header information to docfile
		selfile = "ml3d_it%.6d_vol%.6d.sel" % (iteration, reference_number)
		f = open(selfile, "r")
		lines = f.readlines()
		newlines = [re.sub("RunML3D/", "", line) for line in lines]
		f.close()
		f = open(selfile, "w")
		f.writelines(newlines)
		f.close()
		extractcmd = "xmipp_header_extract -i ml3d_it%.6d_vol%.6d.sel -o ml3d_it%.6d_vol%.6d.doc" \
			% (iteration, reference_number, iteration, reference_number)
		apParam.runCmd(extractcmd, "Xmipp")
		
		### create a projection params file and project the volume along identical Euler angles
		f = open("paramfile.descr", "w")
		f.write("ml3d_it%.6d_vol%.6d.vol\n" % (iteration, reference_number))
		f.write("tmpproj 1 xmp\n")
		f.write("%d %d\n" % (self.runparams['boxsize'], self.runparams['boxsize']))
		f.write("ml3d_it%.6d_vol%.6d.doc rot tilt psi\n" % (iteration, reference_number))
		f.write("NULL\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.close()
		projectcmd = "xmipp_project -i paramfile.descr"
		apParam.runCmd(projectcmd, "Xmipp")
		
		### get order of projections in docfile
		docfile = "ml3d_it%.6d_vol%.6d.doc" % (iteration, reference_number)
		d = open(docfile, "r")
		lines = d.readlines()[1:]
		d.close()
		projfile_sequence = []
		for i, l in enumerate(lines):
			if i % 2 == 0:
				filename = os.path.basename(l.split()[1])
				projfile_sequence.append(filename)
			else: pass
		
		### create stack of projections and class averages
		projections = glob.glob("tmpproj**xmp")
		projections.sort()
		if len(projections) != len(projfile_sequence):
			apDisplay.printWarning("number of projections does not match number of classes for model %d, iteration %d")
		stackarray = []
		stackname = os.path.join(self.resultspath, "proj-avgs_%s_it%.3d_vol%.3d.hed" % (self.params['timestamp'], iteration, reference_number))
		for i in range(len(projections)):
			stackarray.append(spider.read(projections[i]))
			stackarray.append(spider.read(projfile_sequence[i]))
		apImagicFile.writeImagic(stackarray, stackname, msg=False)
		
		### remove unnecessary files
		for file in glob.glob("tmpproj*"):
			apFile.removeFile(file)			
		
		os.chdir(self.params['rundir'])

		return
	def createAlignedStacks(self, partlist, origstackfile):
		partperiter = min(4096,apImagicFile.getPartSegmentLimit(origstackfile))
		numpart = len(partlist)
		if numpart < partperiter:
			partperiter = numpart

		t0 = time.time()
		imgnum = 0
		stacklist = []
		apDisplay.printMsg("rotating and shifting particles at "+time.asctime())
		while imgnum < len(partlist):
			index = imgnum % partperiter
			if imgnum % 100 == 0:
				sys.stderr.write(".")
			if index == 0:
				### deal with large stacks
				if imgnum > 0:
					sys.stderr.write("\n")
					stackname = "alignstack%d.hed"%(imgnum)
					apDisplay.printMsg("writing aligned particles to file "+stackname)
					stacklist.append(stackname)
					apFile.removeStack(stackname, warn=False)
					apImagicFile.writeImagic(alignstack, stackname, msg=False)
					perpart = (time.time()-t0)/imgnum
					apDisplay.printColor("particle %d of %d :: %s per part :: %s remain"%
						(imgnum+1, numpart, apDisplay.timeString(perpart),
						apDisplay.timeString(perpart*(numpart-imgnum))), "blue")
				alignstack = []
				imagesdict = apImagicFile.readImagic(origstackfile, first=imgnum+1, last=imgnum+partperiter, msg=False)

			### align particles
			partimg = imagesdict['images'][index]
			partdict = partlist[imgnum]
			partnum = imgnum+1
			if partdict['partnum'] != partnum:
				apDisplay.printError("particle shifting "+str(partnum)+" != "+str(partdict))
			xyshift = (partdict['xshift'], partdict['yshift'])
			alignpartimg = apImage.xmippTransform(partimg, rot=partdict['inplane'],
				shift=xyshift, mirror=partdict['mirror'])
			alignstack.append(alignpartimg)
			imgnum += 1

		### write remaining particle to file
		sys.stderr.write("\n")
		stackname = "alignstack%d.hed"%(imgnum)
		apDisplay.printMsg("writing aligned particles to file "+stackname)
		stacklist.append(stackname)
		apImagicFile.writeImagic(alignstack, stackname, msg=False)

		### merge stacks
		alignimagicfile = "alignstack.hed"
		apFile.removeStack(alignimagicfile, warn=False)
		apImagicFile.mergeStacks(stacklist, alignimagicfile)
		#for stackname in stacklist:
		#	emancmd = "proc2d %s %s"%(stackname, alignimagicfile)
		#	apEMAN.executeEmanCmd(emancmd, verbose=False)
		filepart = apFile.numImagesInStack(alignimagicfile)
		if filepart != numpart:
			apDisplay.printError("number aligned particles (%d) not equal number expected particles (%d)"%
				(filepart, numpart))
		for stackname in stacklist:
			apFile.removeStack(stackname, warn=False)

		### summarize
		apDisplay.printMsg("rotated and shifted %d particles in %s"%(imgnum, apDisplay.timeString(time.time()-t0)))

		return alignimagicfile
	def mergeImageStackIntoBigStack(self, imgstackfile, imgdata):
		t0 = time.time()
		apDisplay.printMsg("filtering particles and adding to stack")
		# if applying a boxmask, write to a temporary file before adding to main stack
		bigimgstack = os.path.join(self.params['rundir'], self.params['single'])
		if self.params['boxmask'] is not None:
			bigimgstack = os.path.splitext(imgstackfile)[0]+"-premask.hed"
		### here is the craziness
		### step 1: read imgstackfile into memory
		imgstackmemmap = imagic.read(imgstackfile)
		### when only particle is read it defaults to a 2D array instead of 3D array
		if len(imgstackmemmap.shape) < 3:
			imgstackmemmap = imgstackmemmap.reshape(1, imgstackmemmap.shape[0], imgstackmemmap.shape[1])
		if self.params['debug'] is True:
			print "imgstackmemmap.shape", imgstackmemmap.shape
		apix = self.params['apix'] #apDatabase.getPixelSize(imgdata)

		boxshape = (self.boxsize, self.boxsize)
		processedParticles = []
		for particle in imgstackmemmap:

			### step 2: filter particles
			### high / low pass filtering
			#if self.params['pixlimit']:
			#	particle = imagefilter.pixelLimitFilter(particle, self.params['pixlimit'])
			if self.params['lowpass']:
				particle = imagefilter.lowPassFilter(particle, apix=apix, radius=self.params['lowpass'])
			if self.params['highpass']:
				particle = imagefilter.highPassFilter2(particle, self.params['highpass'], apix=apix)
			### unless specified, invert the images
			if self.params['inverted'] is True:
				particle = -1.0 * particle
			if particle.shape != boxshape:
				if self.boxsize <= particle.shape[0] and self.boxsize <= particle.shape[1]:
					particle = imagefilter.frame_cut(particle, boxshape)
				else:
					apDisplay.printError("particle shape (%dx%d) is smaller than boxsize (%d)"
						%(particle.shape[0], particle.shape[1], self.boxsize))

			### step 3: normalize particles
			#self.normoptions = ('none', 'boxnorm', 'edgenorm', 'rampnorm', 'parabolic') #normalizemethod
			if self.params['normalizemethod'] == 'boxnorm':
				particle = imagenorm.normStdev(particle)
			elif self.params['normalizemethod'] == 'edgenorm':
				particle = imagenorm.edgeNorm(particle)
			elif self.params['normalizemethod'] == 'rampnorm':
				particle = imagenorm.rampNorm(particle)
			elif self.params['normalizemethod'] == 'parabolic':
				particle = imagenorm.parabolicNorm(particle)

			### step 4: decimate/bin particles if specified
			### binning is last, so we maintain most detail and do not have to deal with binned apix
			if self.params['bin'] > 1:
				particle = imagefun.bin2(particle, self.params['bin'])

			#from scipy.misc import toimage
			#toimage(particle).show()

			processedParticles.append(particle)

		### step 5: merge particle list with larger stack
		apImagicFile.appendParticleListToStackFile(processedParticles, bigimgstack,
			msg=self.params['debug'])

		#remove original image stack from memory
		del imgstackmemmap
		del processedParticles

		t0 = time.time()
		# if applying boxmask, now mask the particles & append to stack
		if self.params['boxmask'] is not None:
			# normalize particles before boxing, since zeros in mask
			# can affect subsequent processing if not properly normalized
			apEMAN.executeEmanCmd("proc2d %s %s edgenorm inplace"%(bigimgstack,bigimgstack),showcmd=False)
			imgstack = apImagicFile.readImagic(bigimgstack, msg=False)
			maskstack = apImagicFile.readImagic(self.params['boxmaskf'],msg=False)
			for i in range(len(imgstack['images'])):
				imgstack['images'][i]*=maskstack['images'][i]
			maskedpartstack = os.path.splitext(imgstackfile)[0]+"-aftermask.hed"
			apImagicFile.writeImagic(imgstack['images'], maskedpartstack)
			bigimgstack = os.path.join(self.params['rundir'], self.params['single'])
			apEMAN.executeEmanCmd("proc2d %s %s flip"%(maskedpartstack,bigimgstack))

		### count particles
		bigcount = apFile.numImagesInStack(bigimgstack, self.boxsize/self.params['bin'])
		imgcount = apFile.numImagesInStack(imgstackfile, self.boxsize)

		### append to particle log file
		partlogfile = os.path.join(self.params['rundir'], self.timestamp+"-particles.info")
		f = open(partlogfile, 'a')
		for i in range(imgcount):
			partnum = self.particleNumber + i + 1
			line = str(partnum)+'\t'+os.path.join(imgdata['session']['image path'], imgdata['filename']+".mrc")
			f.write(line+"\n")
		f.close()

		self.mergestacktimes.append(time.time()-t0)

		return bigcount
def compute_stack_of_class_averages_and_reprojections(dir, selfile, refvolume, docfile, boxsize, resultspath, timestamp, iteration, reference_number=1, extract=False):
	''' takes Xmipp single files, doc and sel files in routine, creates a stack of class averages in the results directory '''
	
	workingdir = os.getcwd()
	os.chdir(dir)
	if dir.endswith("/"):
		dir = dir[:-1]
	head, tail = os.path.split(dir)
	
	### remove "lastdir" component from selfile (created by Xmipp program), then extract header information to docfile
	f = open(selfile, "r")
	lines = f.readlines()
	newlines = [re.sub(str(tail)+"/", "", line) for line in lines]
	f.close()
	f = open(selfile[:-4]+"_new.sel", "w")
	f.writelines(newlines)
	f.close()
	if extract is True:
		extractcmd = "xmipp_header_extract -i %s.sel -o %s.doc" % (selfile[:-4], docfile[:-4])
		apParam.runCmd(extractcmd, "Xmipp")

	### create a projection params file and project the volume along identical Euler angles
	f = open("paramfile.descr", "w")
	f.write("%s\n" % refvolume)
	f.write("tmpproj 1 xmp\n")
	f.write("%d %d\n" % (boxsize, boxsize))
	f.write("%s rot tilt psi\n" % docfile)
	f.write("NULL\n")
	f.write("0 0\n")
	f.write("0 0\n")
	f.write("0 0\n")
	f.write("0 0\n")
	f.write("0 0\n")
	f.close()
	projectcmd = "xmipp_project -i paramfile.descr"
	apParam.runCmd(projectcmd, "Xmipp")
	
	### get order of projections in docfile
	d = open(docfile, "r")
	lines = d.readlines()[1:]
	d.close()
	projfile_sequence = []
	for i, l in enumerate(lines):
		if i % 2 == 0:
			filename = os.path.basename(l.split()[1])
			projfile_sequence.append(filename)
		else: pass
		
	### create stack of projections and class averages
	projections = glob.glob("tmpproj**xmp")
	projections.sort()
	if len(projections) != len(projfile_sequence):
		apDisplay.printWarning("number of projections does not match number of classes")
	stackarray = []
	stackname = os.path.join(resultspath, "proj-avgs_%s_it%.3d_vol%.3d.hed" % (timestamp, iteration, reference_number))
	for i in range(len(projections)):
		stackarray.append(spider.read(projections[i]))
		stackarray.append(spider.read(projfile_sequence[i]))
	apImagicFile.writeImagic(stackarray, stackname, msg=False)
	
	### remove unnecessary files
	for file in glob.glob("tmpproj*"):
		apFile.removeFile(file)
	os.chdir(workingdir)

	return 
	def compute_stack_of_class_averages_and_reprojections(self, iteration, reference_number):
		''' takes Xmipp single files, doc and sel files associated with ML3D, creates a stack of class averages in the results directory '''
			
		'''			
		### make projections, and put them back into resultspath
		selfile = "ml3d_it%.6d_vol%.6d.sel" % (iteration, reference_number)
		refvolume = "ml3d_it%.6d_vol%.6d.vol\n" % (iteration, reference_number)
		docfile = "ml3d_it%.6d_vol%.6d.doc" % (iteration, reference_number)
		
		apXmipp.compute_stack_of_class_averages_and_reprojections(self.ml3dpath, selfile, refvolume, docfile, \
			self.runparams['boxsize'], self.resultspath, self.params['timestamp'], iteration, reference_number, extract=True)

		return				
		'''		
		os.chdir(self.ml3dpath)		
				
		### remove "RunML3D/" from selfile (created by ML3D program), then extract header information to docfile
		selfile = "ml3d_it%.6d_vol%.6d.sel" % (iteration, reference_number)
		f = open(selfile, "r")
		lines = f.readlines()
		newlines = [re.sub("RunML3D/", "", line) for line in lines]
		f.close()
		f = open(selfile, "w")
		f.writelines(newlines)
		f.close()
		extractcmd = "xmipp_header_extract -i ml3d_it%.6d_vol%.6d.sel -o ml3d_it%.6d_vol%.6d.doc" \
			% (iteration, reference_number, iteration, reference_number)
		apParam.runCmd(extractcmd, "Xmipp")
		
		### create a projection params file and project the volume along identical Euler angles
		f = open("paramfile.descr", "w")
		f.write("ml3d_it%.6d_vol%.6d.vol\n" % (iteration, reference_number))
		f.write("tmpproj 1 xmp\n")
		f.write("%d %d\n" % (self.runparams['boxsize'], self.runparams['boxsize']))
		f.write("ml3d_it%.6d_vol%.6d.doc rot tilt psi\n" % (iteration, reference_number))
		f.write("NULL\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.write("0 0\n")
		f.close()
		projectcmd = "xmipp_project -i paramfile.descr"
		apParam.runCmd(projectcmd, "Xmipp")
		
		### get order of projections in docfile
		docfile = "ml3d_it%.6d_vol%.6d.doc" % (iteration, reference_number)
		d = open(docfile, "r")
		lines = d.readlines()[1:]
		d.close()
		projfile_sequence = []
		for i, l in enumerate(lines):
			if i % 2 == 0:
				filename = os.path.basename(l.split()[1])
				projfile_sequence.append(filename)
			else: pass
		
		### create stack of projections and class averages
		projections = glob.glob("tmpproj**xmp")
		projections.sort()
		if len(projections) != len(projfile_sequence):
			apDisplay.printWarning("number of projections does not match number of classes for model %d, iteration %d")
		stackarray = []
		stackname = os.path.join(self.resultspath, "proj-avgs_%s_it%.3d_vol%.3d.hed" % (self.params['timestamp'], iteration, reference_number))
		for i in range(len(projections)):
			stackarray.append(spider.read(projections[i]))
			stackarray.append(spider.read(projfile_sequence[i]))
		apImagicFile.writeImagic(stackarray, stackname, msg=False)
		
		### remove unnecessary files
		for file in glob.glob("tmpproj*"):
			apFile.removeFile(file)			
		
		os.chdir(self.params['rundir'])

		return
        def makeNewStacks(self, parttree):
                ### untilted stack
                self.notstackdata = apStack.getOnlyStackData(self.params['notstackid'])
                notstackfile = os.path.join(self.notstackdata['path']['path'], self.notstackdata['name'])

                ### tilted stack
                if not self.tiltstackdata:
                        self.tiltstackdata = apStack.getOnlyStackData(self.params['tiltstackid'])
                tiltstackfile = os.path.join(self.tiltstackdata['path']['path'], self.tiltstackdata['name'])

                ### make doc file of Euler angles
                #eulerfile = self.makeEulerDoc(parttree)
                eulerfile = os.path.join(self.params['rundir'], "eulersdoc"+self.timestamp+".spi")
                if os.path.isfile(eulerfile):
                        apFile.removeFile(eulerfile)

                count = 0
                notstacklist = []
                tiltstacklist = []
                sizelimit = 2048
                notbox = apImagicFile.getBoxsize(notstackfile)
                tiltbox  = apImagicFile.getBoxsize(tiltstackfile)
                tiltstacks = []
                notstacks = []
                t0 = time.time()
                for partdict in parttree:
                        ### print friendly message
                        if count % 100 == 0:
                                backs = "\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b"
                                sys.stderr.write(backs+backs+backs+backs)
                                if count > sizelimit:
                                        esttime = (len(parttree)/float(count)-1.0)*(time.time()-t0)
                                        sys.stderr.write(str(count)+" particles of "+str(len(parttree))
                                                +", "+apDisplay.timeString(esttime)+" remaining")
                                else:
                                        sys.stderr.write(str(count)+" particles of "+str(len(parttree)))
                        ### save stacks to file to save memory
                        if count%sizelimit == 0:
                                if count > 1:
                                        apDisplay.printMsg("Writing stacks to file")
                                        t1 = time.time()
                                        tiltname = os.path.join(self.params['rundir'], "tiltstack%d.hed"%(count))
                                        apFile.removeStack(tiltname)
                                        apImagicFile.writeImagic(tiltstacklist, tiltname, msg=False)
                                        tiltstacks.append(tiltname)
                                        apDisplay.printMsg("finished tilted stack in "+apDisplay.timeString(time.time()-t1))
                                        t1 = time.time()
                                        notname = os.path.join(self.params['rundir'], "notstack%d.hed"%(count))
                                        apFile.removeStack(notname)
                                        apImagicFile.writeImagic(notstacklist, notname, msg=False)
                                        notstacks.append(notname)
                                        apDisplay.printMsg("finished untilted stack in "+apDisplay.timeString(time.time()-t1))
                                ### reset stacks
                                apDisplay.printMsg("Reset stacks in memory")
                                notstacklist = []
                                tiltstacklist = []
                        ### increment count
                        count += 1
                        ### write to Euler doc
                        self.appendEulerDoc(eulerfile, partdict['tilt'], count)
                        ### untilted stack
                        notpartarray = apImagicFile.readSingleParticleFromStack(notstackfile, partdict['not'], notbox, False)
                        notstacklist.append(notpartarray)
                        ### tilted stack
                        tiltpartarray = apImagicFile.readSingleParticleFromStack(tiltstackfile, partdict['tilt'], tiltbox, False)
                        tiltstacklist.append(tiltpartarray)
                ### write remaining particles to stack
                if len(notstacklist) > 0:
                        apDisplay.printMsg("Writing stacks to file")
                        t1 = time.time()
                        tiltname = os.path.join(self.params['rundir'], "tiltstack%d.hed"%(count))
                        apFile.removeStack(tiltname)
                        apImagicFile.writeImagic(tiltstacklist, tiltname, msg=False)
                        tiltstacks.append(tiltname)
                        apDisplay.printMsg("finished tilted stack in "+apDisplay.timeString(time.time()-t1))
                        t1 = time.time()
                        notname = os.path.join(self.params['rundir'], "notstack%d.hed"%(count))
                        apFile.removeStack(notname)
                        apImagicFile.writeImagic(notstacklist, notname, msg=False)
                        notstacks.append(notname)
                        apDisplay.printMsg("finished untilted stack in "+apDisplay.timeString(time.time()-t1))

                ### merge NOT stack
                notname = os.path.join(self.params['rundir'], "notstack.hed")
                apImagicFile.mergeStacks(notstacks, notname)
                for stackname in notstacks:
                        apFile.removeStack(stackname, warn=False)

                ### merge TILT stack
                tiltname = os.path.join(self.params['rundir'], "tiltstack.hed")
                apImagicFile.mergeStacks(tiltstacks, tiltname)
                for stackname in tiltstacks:
                        apFile.removeStack(stackname, warn=False)

                ### upload results
                if self.params['commit'] is True:
                        self.uploadResults()
	def createAlignedStacks(self, partlist, origstackfile):
		partperiter = min(4096,apImagicFile.getPartSegmentLimit(origstackfile))
		numpart = len(partlist)
		if numpart < partperiter:
			partperiter = numpart

		t0 = time.time()
		imgnum = 0
		stacklist = []
		apDisplay.printMsg("rotating and shifting particles at "+time.asctime())
		while imgnum < len(partlist):
			index = imgnum % partperiter
			if imgnum % 100 == 0:
				sys.stderr.write(".")
			if index == 0:
				### deal with large stacks
				if imgnum > 0:
					sys.stderr.write("\n")
					stackname = "alignstack%d.hed"%(imgnum)
					apDisplay.printMsg("writing aligned particles to file "+stackname)
					stacklist.append(stackname)
					apFile.removeStack(stackname, warn=False)
					apImagicFile.writeImagic(alignstack, stackname, msg=False)
					perpart = (time.time()-t0)/imgnum
					apDisplay.printColor("particle %d of %d :: %s per part :: %s remain"%
						(imgnum+1, numpart, apDisplay.timeString(perpart),
						apDisplay.timeString(perpart*(numpart-imgnum))), "blue")
				alignstack = []
				imagesdict = apImagicFile.readImagic(origstackfile, first=imgnum+1, last=imgnum+partperiter, msg=False)

			### align particles
			partimg = imagesdict['images'][index]
			partdict = partlist[imgnum]
			partnum = imgnum+1
			if partdict['partnum'] != partnum:
				apDisplay.printError("particle shifting "+str(partnum)+" != "+str(partdict))
			xyshift = (partdict['xshift'], partdict['yshift'])
			alignpartimg = apImage.xmippTransform(partimg, rot=partdict['inplane'],
				shift=xyshift, mirror=partdict['mirror'])
			alignstack.append(alignpartimg)
			imgnum += 1

		### write remaining particle to file
		sys.stderr.write("\n")
		stackname = "alignstack%d.hed"%(imgnum)
		apDisplay.printMsg("writing aligned particles to file "+stackname)
		stacklist.append(stackname)
		apImagicFile.writeImagic(alignstack, stackname, msg=False)

		### merge stacks
		alignimagicfile = "alignstack.hed"
		apFile.removeStack(alignimagicfile, warn=False)
		apImagicFile.mergeStacks(stacklist, alignimagicfile)
		#for stackname in stacklist:
		#	emancmd = "proc2d %s %s"%(stackname, alignimagicfile)
		#	apEMAN.executeEmanCmd(emancmd, verbose=False)
		filepart = apFile.numImagesInStack(alignimagicfile)
		if filepart != numpart:
			apDisplay.printError("number aligned particles (%d) not equal number expected particles (%d)"%
				(filepart, numpart))
		for stackname in stacklist:
			apFile.removeStack(stackname, warn=False)

		### summarize
		apDisplay.printMsg("rotated and shifted %d particles in %s"%(imgnum, apDisplay.timeString(time.time()-t0)))

		return alignimagicfile
                apFile.removeFile(spifile)
                partlist.append(part)
        return partlist

if __name__ == "__main__":
        ### generate random image data
        shape = (128,128)
        partlist = []
        for i in range(16):
                part = numpy.random.random(shape)
                part = ndimage.gaussian_filter(part, sigma=shape[0]/16)
                partlist.append(part)

        ### save original data
        apFile.removeStack("original.hed", warn=False)
        apImagicFile.writeImagic(partlist, "original.hed", msg=False)

        ### read and write with Appion
        apFile.removeStack("resave.hed", warn=False)
        imagic = apImagicFile.readImagic("original.hed", msg=False)
        partlist2 = imagic['images']
        apImagicFile.writeImagic(partlist2, "resave.hed", msg=False)
        print "============\nCompare APPION IMAGIC"
        if not isSameStack(partlist, partlist2):
                print "Stacks are different"
                #sys.exit(1)
        
        ### read and write with EMAN mrc
        emanMrcToStack(partlist)
        imagic = apImagicFile.readImagic("emanmrc.hed", msg=False)
        partlist3 = imagic['images']
Beispiel #36
0
def start():
	### backup old cls files
	classfile = os.path.join(params['emandir'], "cls.%d.tar"%(params['iter']))
	oldclassfile = os.path.join(params['emandir'], "cls.%d.old.tar"%(params['iter']))
	shutil.move(classfile, oldclassfile)

	projhed = os.path.join(params['emandir'], 'proj.hed')
	projhed = os.path.join(params['emandir'], 'proj.img')
	numproj = apFile.numImagesInStack(projhed)

	### extract cls files
	tar = tarfile.open(oldclassfile)
	tar.extractall(path=params['rundir'])
	tar.close()
	clslist = glob.glob("cls*.lst")

	if numproj != len(clslist):
		apDisplay.printError("array length mismatch")

	### loop through classes
	clsnum = 0
	goodavg = []
	for clsfile in clslist:
		clsnum += 1
		clsf = open(clsfile, 'r')
		partlist = clsf.readlines()
		clsf.close()

		### write the projection???
		#e=projections[clsNum].getEuler()
		#projections[clsNum].setNImg(-1)
		#projections[clsNum].writeImage('goodavgs.hed', -1)

		if len(partlist) < params['minpart']:
			### not enough particles skip to next projection
			#origaverage =
			goodavg.append()
			#emanClsAvgs[(clsNum+1)*2 - 1].writeImage('goodavgs.hed',-1)
			continue

		### make aligned stack
		if params['eotest'] is False:
			command='clstoaligned.py -c' + cls
		elif params['eotest']=='odd':
			fw=open(cls,'r')
			Ptcls = fw.readlines()
			fw.close()
			fw = open('cls_odd.lst', 'w')
			fw.writelines(Ptcls[0])
			fw.writelines(Ptcls[1])
			for i1 in range(2,len(Ptcls)):
				if i1%2==0:
					fw.writelines(Ptcls[i1])
			fw.close()
			command='clstoaligned.py -c cls_odd.lst'
		elif params['eotest']=='even':
			fw=open(cls,'r')
			Ptcls = fw.readlines()
			fw.close()
			fw = open('cls_even.lst', 'w')
			fw.writelines(Ptcls[0])
			fw.writelines(Ptcls[1])
			for i1 in range(2,len(Ptcls)):
				if i1%2==1:
					fw.writelines(Ptcls[i1])
			fw.close()
			command='clstoaligned.py -c cls_even.lst'
		apDisplay.printMsg(command)
		proc = subprocess.Popen(command, shell=True)
		proc.wait()
		#set up cls dir
		clsdir=cls.split('.')[0]+'.dir'
		os.mkdir(clsdir)
		os.rename('aligned.spi',os.path.join(clsdir,'aligned.spi'))
		alignedImgsName = os.path.join(clsdir,'aligned.spi')
		#alignedImgs = EMAN.readImages(alignedImgsName,-1,-1,0)
		#N = len(alignedImgs)

		apDisplay.printMsg("Starting cluster process for "+clsdir)
		### fill similarity matrix with CC values
		similarfile, simlist = fillSimilarityMatrix(alignedImgsName)

		### set preferences
		preffile = setPreferences(simlist, params['preftype'])

		### run apcluster.exe program
		outfile = "clusters.out"
		apDisplay.printMsg("Run apcluster.exe program")
		apclusterexe = os.path.join("apcluster.exe")
		if os.path.isfile(outfile):
			os.remove(outfile)
		clustercmd = apclusterexe+" "+similarfile+" "+preffile+" "+outfile
		proc = subprocess.Popen(clustercmd, shell=True)
		proc.wait()

		if not os.path.isfile(outfile):
			apDisplay.printError("affinity propagration cluster program did not run")

		### Parse apcluster output file: clusters.out
		apDisplay.printMsg("Parse apcluster output file: "+outfile)
		clustf = open(outfile, "r")
		### each line is the particle and the number is the class
		partnum = 0
		classes = {}
		for line in clustf:
			sline = line.strip()
			if sline:
				partnum += 1
				classnum = int(sline)
				if not classnum in classes:
					classes[classnum] = [partnum,]
				else:
					classes[classnum].append(partnum)
		clustf.close()
		apDisplay.printMsg("Found %d classes"%(len(classes.keys())))

		### Create class averages
		classavgdata = []
		classnames = classes.keys()
		classnames.sort()
		for classnum in classnames:
			apDisplay.printMsg("Class %d, %d members"%(classnum, len(classes[classnum])))
			clsf = open('subcls%03d.lst'%(classnum), 'w')
			for partnum in classes[classnum]:
				clsf.write("%d\n"%(partnum))
			clsf.close()
			classdatalist = apImagicFile.readParticleListFromStack(stackfile, classes[classnum], msg=False)
			classdatarray = numpy.asarray(classdatalist)
			classavgarray = classdatarray.mean(0)
			classavgdata.append(classavgarray)
		apFile.removeStack("classaverage.hed")
		apImagicFile.writeImagic(classavgdata, "classaverage.hed")

		k=0
		for i in range(0,len(E)):
			if len(E[i])==0:
				continue
			else:
				f1=open('%s/subcls%02d.lst' % (str1,k), 'w')
				for j in range(0,len(E[i])):
					f1.write('%d aligned.spi clusterCenterImgNum%d\n' % (E[i][j], i))
				f1.close()
				proc = subprocess.Popen('proc2d aligned.spi tempClsAvg.hed list=%s/subcls%02d.lst mask=%d average edgenorm' % (str1,k,params['mask']), shell=True)
				proc.wait()
				k=k+1

		clsAvgs = EMAN.readImages('tempClsAvg.hed',-1,-1,0)
		j=0
		for i in range(0,len(E)):
			if len(E[i])==0:
				continue
			else:
				clsAvgs[j].setNImg(len(E[i]))
				clsAvgs[j].writeImage('subclasses_avg.hed',-1)
				j=j+1
		os.chdir('../')


		### Determine best averages

		proc = subprocess.Popen('/bin/rm tempClsAvg.*', shell=True)
		proc.wait()
		proc = subprocess.Popen('proc2d %s/aligned.spi tempClsAvg.hed mask=%d average edgenorm' % (clsdir, params['mask']), shell=True)
		proc.wait()
		class_avg = EMAN.readImages('tempClsAvg.hed',-1,-1,0)

		avgname=os.path.join(clsdir,'subclasses_avg.hed')
		averages=EMAN.readImages(avgname,-1,-1,0)

		cclist=[]
		for avg in averages:
			cclist.append(cc(projections[clsNum],avg))

		f1 = open('%s/CCValues.txt'%(clsdir), 'w')
		for i in range(len(cclist)):
			f1.write(str(cclist[i])+'\n')
		f1.close()

		### Merge top best subclasses

		ccListSort = cclist
		ccListSort.sort()
		Ptcls = []
		for i in range(0,len(ccListSort)):
			cci = ccListSort[len(ccListSort)-i-1]
			if cci>=params['corCutOff']:
				bestclass_i=cclist.index(cci)
				classname_i=clslist[clsNum].split('.')[0]+'.dir/subClassAvgs/subcls'+string.zfill(bestclass_i,2)+'.lst'
				f1=open(classname_i,'r')
				Ptcls_i = f1.readlines()
				f1.close()
				Ptcls.extend(Ptcls_i)
			else:
				print "Not included - ", cci
				pass
		if len(Ptcls)>0:

			fw=open('mergeClasses.lst', 'w')
			fw.writelines(Ptcls)
			fw.close()

			proc = subprocess.Popen('/bin/rm mergedClsAvg.spi', shell=True)
			proc.wait()
			proc = subprocess.Popen('proc2d %s/aligned.spi mergedClsAvg.spi list=mergeClasses.lst mask=%d average' % (clsdir, params['mask']), shell=True)
			proc.wait()
			mergedavg=EMAN.readImages('mergedClsAvg.spi',-1,-1,0)

			mergedavg[0].setNImg(len(Ptcls))
			mergedavg[0].setRAlign(e)
			mergedavg[0].writeImage('goodavgs.hed',-1)
		else:
			pass

		writeNewClsfile(cls,pretext,Ptext,Ptcls)

	#Create list of cc values
	for cls in range(0,len(clslist)):
		clsdir=clslist[cls].split('.')[0]+'.dir'
		apDisplay.printMsg("Starting class number %d" %(cls))

		#break
	pad=params['boxsize']*1.25
	if pad%2:
		pad=pad+1
	if params['sym']==None:
		make3dcommand='make3d goodavgs.hed out=threed.%d.mrc mask=%d pad=%d mode=2 hard=%d' % (params['iter'], params['mask'], pad, params['hard'])
	else:
		make3dcommand='make3d goodavgs.hed out=threed.%d.mrc mask=%d sym=%s pad=%d mode=2 hard=%d' % (params['iter'], params['mask'], params['sym'], pad, params['hard'])
	apDisplay.printMsg(make3dcommand)
	proc = subprocess.Popen(make3dcommand, shell=True)
	proc.wait()
	proc3dcommand='proc3d threed.%d.mrc threed.%da.mrc mask=%d norm' % (params['iter'],params['iter'],params['mask'])
	apDisplay.printMsg(proc3dcommand)
	proc = subprocess.Popen(proc3dcommand, shell=True)
	proc.wait()
	if params['eotest'] is False:
		#copy the resulting class average images to the main recon directory
		proc = subprocess.Popen('/bin/cp threed.%da.mrc ../.'%(params['iter']), shell=True)
		proc.wait()
		proc = subprocess.Popen('/bin/cp goodavgs.hed ../classes_msgp.%d.hed' %(params['iter']), shell=True)
		proc.wait()
		proc = subprocess.Popen('/bin/cp goodavgs.img ../classes_msgp.%d.img' %(params['iter']), shell=True)
		proc.wait()
		#link msgp result as the final result for this iteration
		rmcommand='/bin/rm -f ../classes.%d.hed ../classes.%d.img' % (params['iter'], params['iter'])
		proc = subprocess.Popen(rmcommand, shell=True)
		proc.wait()
		lncommand='ln -s classes_msgp.%d.hed ../classes.%d.hed' % (params['iter'], params['iter'])
		proc = subprocess.Popen(lncommand, shell=True)
		proc.wait()
		lncommand='ln -s classes_msgp.%d.img ../classes.%d.img' % (params['iter'], params['iter'])
		proc = subprocess.Popen(lncommand, shell=True)
		proc.wait()
	elif params['eotest']=='odd':
		proc = subprocess.Popen('/bin/cp threed.%da.mrc ../threed.%da.o.mrc' %(params['iter'], params['iter']), shell=True)
		proc.wait()
	elif params['eotest']=='even':
		proc = subprocess.Popen('/bin/cp threed.%da.mrc ../threed.%da.e.mrc' %(params['iter'], params['iter']), shell=True)
		proc.wait()
		proc = subprocess.Popen('proc3d threed.%da.mrc ../threed.%da.o.mrc fsc=../corEO%d.fsc.dat' %(params['iter'], params['iter'], params['iter']), shell=True)
		proc.wait()

	#replace the old cls*.lst with the new extended one
	proc = subprocess.Popen('tar cvzf %s %s' % (newclassfile,"cls*.lst.new"), shell=True)
	proc.wait()
	proc = subprocess.Popen('/bin/cp %s ../%s' %(newclassfile,classfile), shell=True)
	proc.wait()

	apDisplay.printMsg("Done!")
def gatherSingleFilesIntoStack(selfile, stackfile, filetype="spider"):
	"""
	takes a selfile and creates an EMAN stack
	"""
	selfile = os.path.abspath(selfile)
	stackfile = os.path.abspath(stackfile)
	if stackfile[-4:] != ".hed":
		apDisplay.printWarning("Stack file does not end in .hed")
		stackfile = stackfile[:-4]+".hed"

	apDisplay.printColor("Merging files into a stack, this can take a while", "cyan")

	starttime = time.time()

	if not os.path.isfile(selfile):
		apDisplay.printError("selfile does not exist: "+selfile)

	### Process selfile
	fh = open(selfile, 'r')
	filelist = []
	for line in fh:
		sline = line.strip()
		if sline:
			args=sline.split()
			if (len(args)>1):
				filename = args[0].strip()
				filelist.append(filename)
	fh.close()

	### Set variables
	boxsize = apFile.getBoxSize(filelist[0])
	partperiter = int(1e9/(boxsize[0]**2)/16.)
	if partperiter > 4096:
		partperiter = 4096
	apDisplay.printMsg("Using %d particle per iteration"%(partperiter))
	numpart = len(filelist)
	if numpart < partperiter:
		partperiter = numpart

	### Process images
	imgnum = 0
	stacklist = []
	stackroot = stackfile[:-4]
	### get memory in kB
	startmem = mem.active()
	while imgnum < len(filelist):
		filename = filelist[imgnum]
		index = imgnum % partperiter
		if imgnum % 100 == 0:
			sys.stderr.write(".")
			#sys.stderr.write("%03.1fM %d\n"%((mem.active()-startmem)/1024., index))
			if mem.active()-startmem > 2e6:
				apDisplay.printWarning("Out of memory")
		if index < 1:
			#print "img num", imgnum
			### deal with large stacks, reset loop
			if imgnum > 0:
				sys.stderr.write("\n")
				stackname = "%s-%d.hed"%(stackroot, imgnum)
				apDisplay.printMsg("writing single particles to file "+stackname)
				stacklist.append(stackname)
				apFile.removeStack(stackname, warn=False)
				apImagicFile.writeImagic(stackarray, stackname, msg=False)
				perpart = (time.time()-starttime)/imgnum
				apDisplay.printColor("part %d of %d :: %.1fM mem :: %s/part :: %s remain"%
					(imgnum+1, numpart, (mem.active()-startmem)/1024. , apDisplay.timeString(perpart), 
					apDisplay.timeString(perpart*(numpart-imgnum))), "blue")
			stackarray = []
		### merge particles
		if filetype == "mrc":
			partimg = mrc.read(filename)
		else:
			partimg = spider.read(filename)
		stackarray.append(partimg)
		imgnum += 1

	### write remaining particles to file
	sys.stderr.write("\n")
	stackname = "%s-%d.hed"%(stackroot, imgnum)
	apDisplay.printMsg("writing particles to file "+stackname)
	stacklist.append(stackname)
	apImagicFile.writeImagic(stackarray, stackname, msg=False)

	### merge stacks
	apFile.removeStack(stackfile, warn=False)
	apImagicFile.mergeStacks(stacklist, stackfile)
	print stackfile
	filepart = apFile.numImagesInStack(stackfile)
	if filepart != numpart:
		apDisplay.printError("number merged particles (%d) not equal number expected particles (%d)"%
			(filepart, numpart))
	for stackname in stacklist:
		apFile.removeStack(stackname, warn=False)

	### summarize
	apDisplay.printColor("merged %d particles in %s"%(imgnum, apDisplay.timeString(time.time()-starttime)), "cyan")
def makeStackMeanPlot(stackid, gridpoints=16):
	if gridpoints > 30:
		apDisplay.printError("Too large of a grid")
	apDisplay.printMsg("creating Stack Mean Plot montage for stackid: "+str(stackid))
	t0 = time.time()
	### big stacks are too slow
	boxsize = apStack.getStackBoxsize(stackid)
	bin = 1
	if boxsize is not None:
		while boxsize/bin > 128:
			bin+=1
	apDisplay.printMsg("binning stack by "+str(bin))
	stackdata = apStack.getOnlyStackData(stackid, msg=False)
	stackpath = stackdata['path']['path']
	stackfile = os.path.join(stackpath, stackdata['name'])
	# if no stackfile, likely virtual stack
	if not os.path.isfile(stackfile):
		apDisplay.printMsg("possible virtual stack, searching for original stack")
		vstackdata = apStack.getVirtualStackParticlesFromId(stackid)
		partdatas = vstackdata['particles']
		stackfile = vstackdata['filename']
		stackdata = apStack.getOnlyStackData(vstackdata['stackid'], msg=False)
	# otherwise get stack info
	else:
		# get stats from stack:
		sqlcmd = "SELECT " + \
			"particleNumber, mean, stdev " + \
			"FROM ApStackParticleData " + \
			"WHERE `REF|ApStackData|stack` = %i"%(stackid)
		partdatas = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)
	#check only first 100 particles for now
	#partdatas = partdatas[:500]
	apFile.removeFile("montage"+str(stackid)+".png")

	### find limits
	limits = {'minmean': 1e12, 'maxmean': -1e12, 'minstdev': 1e12, 'maxstdev': -1e12,}
	for partdata in partdatas:
		if partdata['mean'] is None:
			continue
		mean = partdata['mean']
		stdev = partdata['stdev']
		if mean < limits['minmean']:
			limits['minmean'] = mean
		if mean > limits['maxmean']:
			limits['maxmean'] = mean
		if stdev < limits['minstdev']:
			limits['minstdev'] = stdev
		if stdev > limits['maxstdev']:
			limits['maxstdev'] = stdev
	if limits['minmean'] > 1e11:
		apDisplay.printWarning("particles have no mean values in database")
		return
	apDisplay.printMsg(str(limits))

	### create particle bins
	partlists = {}
	for i in range(gridpoints):
		for j in range(gridpoints):
			key = ("%02dx%02d"%(i,j))
			partlists[key] = []

	### sort particles into bins
	for partdata in partdatas:
		key = meanStdevToKey(partdata['mean'], partdata['stdev'], limits, gridpoints)
		partnum = int(partdata['particleNumber'])
		partlists[key].append(partnum)

	printPlot(partlists, gridpoints)

	### createStackAverages
	keys = partlists.keys()
	keys.sort()
	count = 0
	backs = "\b\b\b\b\b\b\b\b\b\b\b"
	montagestack = os.path.join(stackpath,"montage"+str(stackid)+".hed")
	apFile.removeStack(montagestack)
	mystack = []
	for key in keys:
		count += 1
		sys.stderr.write(backs+backs+backs+backs)
		sys.stderr.write("% 3d of % 3d, %s: % 6d"%(count, len(keys), key, len(partlists[key])))
		avgimg = averageSubStack(partlists[key], stackfile, bin)
		if avgimg is not False:
			avgimg = numpy.fliplr(avgimg)
			mystack.append(avgimg)
	apImagicFile.writeImagic(mystack, montagestack)
	sys.stderr.write("\n")
	assemblePngs(keys, str(stackid), montagestack)
	apDisplay.printMsg("/bin/mv -v montage"+str(stackid)+".??? "+stackpath)
	apDisplay.printMsg("finished in "+apDisplay.timeString(time.time()-t0))
    def createMontageInMemory(self, apix):
        self.cluster_resolution = []
        apDisplay.printMsg("Converting files")

        ### Set binning of images
        boxsize = apImagicFile.getBoxsize(self.instack)
        bin = 1
        while boxsize / bin > 200:
            bin += 1
        binboxsize = boxsize / bin

        ### create averages
        files = glob.glob(self.timestamp + ".[0-9]*")
        files.sort(self.sortFile)
        montage = []
        montagepngs = []
        i = 0
        for listname in files:
            i += 1
            apDisplay.printMsg("%d of %d classes" % (i, len(files)))
            pngfile = listname + ".png"
            if not os.path.isfile(listname) or apFile.fileSize(listname) < 1:
                ### create a ghost particle
                sys.stderr.write("skipping " + listname + "\n")
                blank = numpy.ones((binboxsize, binboxsize),
                                   dtype=numpy.float32)

                ### add to montage stack
                montage.append(blank)
                self.cluster_resolution.append(None)

                ### create png
                apImage.arrayToPng(blank, pngfile)

            else:
                ### read particle list
                partlist = self.readListFile(listname)

                ### average particles
                partdatalist = apImagicFile.readParticleListFromStack(
                    self.instack, partlist, boxsize, msg=False)
                partdataarray = numpy.asarray(partdatalist)
                finaldata = partdataarray.mean(0)
                if bin > 1:
                    finaldata = apImage.binImg(finaldata, bin)

                ### add to montage stack
                montage.append(finaldata)
                res = apFourier.spectralSNR(partdatalist, apix)
                self.cluster_resolution.append(res)

                ### create png
                apImage.arrayToPng(finaldata, pngfile)

            ### check for png file
            if os.path.isfile(pngfile):
                montagepngs.append(pngfile)
            else:
                apDisplay.printError("failed to create montage")

        stackname = "kerdenstack" + self.timestamp + ".hed"
        apImagicFile.writeImagic(montage, stackname)
        ### create montage
        montagecmd = ("montage -geometry +4+4 -tile %dx%d " %
                      (self.params['xdim'], self.params['ydim']))
        for monpng in montagepngs:
            montagecmd += monpng + " "
        montagecmd += "montage.png"
        apEMAN.executeEmanCmd(montagecmd, showcmd=True, verbose=False)
        time.sleep(1)
        apFile.removeFilePattern(self.timestamp + ".*.png")
        return bin