def postLoopFunctions(self):
		### Delete CTF corrected images
		if self.params['keepall'] is False:
			pattern = os.path.join(self.params['rundir'], self.params['sessionname']+'*.dwn.mrc')
			apFile.removeFilePattern(pattern)
			### remove Ace2 images
			pattern = os.path.join(self.params['rundir'], self.params['sessionname']+'*mrc.corrected.mrc')
			apFile.removeFilePattern(pattern)
			### remove Spider images
			if self.params['fliptype'] == 'spiderimage':
				pattern = os.path.join(self.params['rundir'], self.params['sessionname']+'*_out.spi')
				apFile.removeFilePattern(pattern)
				pattern = os.path.join(self.params['rundir'], self.params['sessionname']+'*_tf.spi')
				apFile.removeFilePattern(pattern)
		if self.noimages is True:
			return

		stackpath = os.path.join(self.params['rundir'], self.params['single'])
		### delete this after testing
		apStack.averageStack(stack = stackpath)
		### Create Stack Mean Plot
		if self.params['commit'] is True and self.params['meanplot'] is True:
			stackid = apStack.getStackIdFromPath(stackpath)
			if stackid is not None:
				apStackMeanPlot.makeStackMeanPlot(stackid)

		apDisplay.printColor("Timing stats", "blue")
		self.printTimeStats("Batch Boxer", self.batchboxertimes)
		self.printTimeStats("Ctf Correction", self.ctftimes)
		self.printTimeStats("Stack Merging", self.mergestacktimes)
		self.printTimeStats("Mean/Std Read", self.meanreadtimes)
		self.printTimeStats("DB Insertion", self.insertdbtimes)
	def start(self):
		### universal particle counter
		self.partnum = 1

		### final stack file
		self.combinefile = os.path.join( self.params['rundir'], self.params['stackfilename'] )
		if os.path.isfile(self.combinefile):
			apDisplay.printError("A stack with name "+self.params['stackfilename']+" and path "
				+self.params['rundir']+" already exists.")

		### loop through stacks
		for stackstr in self.params['stackids']:
			stackid = int(stackstr)

			### get stack data
			stackdata = apStack.getOnlyStackData(stackid)

			### append particle to stack file
			self.appendToStack(stackdata)

			if self.params['commit'] is True:
				### insert stack data
				apDisplay.printColor("inserting new stack particles from stackid="+str(stackid), "cyan")
				self.commitStack(stackid)
			else:
				apDisplay.printWarning("not committing data to database")

		apStack.averageStack(stack=self.combinefile)
Exemple #3
0
    def start(self):
        ### copy stack to rundir
        newstack = os.path.join(self.params['rundir'], "start.hed")
        if os.path.isfile(newstack):
            apDisplay.printError("Stack already exists")
        emancmd = "proc2d %s %s" % (self.params['stackfile'], newstack)
        if self.params['normalize'] is True:
            emancmd += " edgenorm"
        apEMAN.executeEmanCmd(emancmd)

        ### set final parameters
        boxsize = apFile.getBoxSize(newstack)
        apDisplay.printMsg("Boxsize: %i" % boxsize[0])
        if not boxsize or boxsize <= 0:
            apDisplay.printError("Could not determine stack size")
        else:
            self.boxsize = boxsize[0]
        self.numpart = apFile.numImagesInStack(newstack)
        apDisplay.printMsg("Num part: %i" % self.numpart)
        if not self.numpart or self.numpart <= 0:
            apDisplay.printError("Could not determine number of particles")
        if self.numpart <= 1:
            apDisplay.printError("Not enough particles to upload")

        apStack.averageStack(newstack)

        #self.params['commit'] = False
        self.createStackData()
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        ### checks
        centerstack = os.path.join(self.params['rundir'], 'align.img')
        badstack = os.path.join(self.params['rundir'], 'bad.img')
        apStack.checkForPreviousStack(centerstack)

        ### run centering algorithm
        keeplist = self.centerParticles(oldstack, centerstack, badstack)
        if not os.path.isfile(centerstack):
            apDisplay.printError("No stack was created")

        self.params['keepfile'] = os.path.join(self.params['rundir'],
                                               'keepfile.txt')

        ### get number of particles
        self.params['description'] += (
            (" ... %d eman centered substack id %d" %
             (numparticles, self.params['stackid'])))

        apStack.commitSubStack(self.params,
                               newname=os.path.basename(centerstack),
                               centered=True)
        apStack.averageStack(stack=centerstack)
        if os.path.isfile(badstack):
            apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
        def start(self):
                #new stack path
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

                #make sure that old stack is numbered
                apEMAN.checkStackNumbering(oldstack)

                alignedstack = os.path.join(self.params['rundir'], 'ali.img')
                badstack = os.path.join(self.params['rundir'], 'bad.img')
                apStack.checkForPreviousStack(alignedstack)

                #run centering algorithm
                apStack.centerParticles(oldstack, self.params['mask'], self.params['maxshift'])
                self.params['keepfile'] = os.path.join(self.params['rundir'],'keepfile.txt')
                apEMAN.writeStackParticlesToFile(alignedstack, self.params['keepfile'])
                if not os.path.isfile(alignedstack, ):
                        apDisplay.printError("No stack was created")

                #get number of particles
                f = open(self.params['keepfile'], "r")
                numparticles = len(f.readlines())
                f.close()
                self.params['description'] += (
                        (" ... %d eman centered substack id %d" 
                        % (numparticles, self.params['stackid']))
                )
                
                apStack.commitSubStack(self.params, newname='ali.hed', centered=True)
                apStack.averageStack(stack=alignedstack)
                if (os.path.exists(badstack)):
                        apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		### make sure that old stack is numbered
		alignedstack = os.path.join(self.params['rundir'], 'alignstack.hed')
		apStack.checkForPreviousStack(alignedstack)

		### run centering algorithm
		self.runMaxlike()

		### create aligned stacks
		partlist = self.readPartDocFile()
		stackfile = self.createAlignedStacks(partlist)
		if not os.path.isfile(alignedstack):
			apDisplay.printError("No stack was created")

		### get number of particles
		numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
		self.writeFakeKeepFile(numpart)
		self.params['description'] += (
			" ... %d maxlike centered substack id %d" 
			% (numpart, self.params['stackid']))
		
		apStack.commitSubStack(self.params, newname='alignstack.hed', centered=True)
		apStack.averageStack(stack=alignedstack)
	def start(self):
		#new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		#make sure that old stack is numbered
		apEMAN.checkStackNumbering(oldstack)

		alignedstack = os.path.join(self.params['rundir'], 'ali.img')
		badstack = os.path.join(self.params['rundir'], 'bad.img')
		apStack.checkForPreviousStack(alignedstack)

		#run centering algorithm
		apStack.centerParticles(oldstack, self.params['mask'], self.params['maxshift'])
		self.params['keepfile'] = os.path.join(self.params['rundir'],'keepfile.txt')
		apEMAN.writeStackParticlesToFile(alignedstack, self.params['keepfile'])
		if not os.path.isfile(alignedstack, ):
			apDisplay.printError("No stack was created")

		#get number of particles
		f = open(self.params['keepfile'], "r")
		numparticles = len(f.readlines())
		f.close()
		self.params['description'] += (
			(" ... %d eman centered substack id %d" 
			% (numparticles, self.params['stackid']))
		)
		
		apStack.commitSubStack(self.params, newname='ali.hed', centered=True)
		apStack.averageStack(stack=alignedstack)
		if (os.path.exists(badstack)):
			apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
	def start(self):
		### copy stack to rundir
		newstack = os.path.join(self.params['rundir'], "start.hed")
		if os.path.isfile(newstack):
			apDisplay.printError("Stack already exists")
		emancmd = "proc2d %s %s"%(self.params['stackfile'], newstack)
		if self.params['normalize'] is True:
			emancmd += " edgenorm"
		apEMAN.executeEmanCmd(emancmd)

		### set final parameters
		boxsize = apFile.getBoxSize(newstack)
		apDisplay.printMsg("Boxsize: %i"%boxsize[0])
		if not boxsize or boxsize <= 0:
			apDisplay.printError("Could not determine stack size")
		else:
			self.boxsize = boxsize[0]
		self.numpart = apFile.numImagesInStack(newstack)
		apDisplay.printMsg("Num part: %i"%self.numpart)
		if not self.numpart or self.numpart <= 0:
			apDisplay.printError("Could not determine number of particles")
		if self.numpart <= 1:
			apDisplay.printError("Not enough particles to upload")

		apStack.averageStack(newstack)

		#self.params['commit'] = False
		self.createStackData()
	def start(self):
		knownstackdata = apStack.getOnlyStackData(self.params['knownstackid'])
		fullstackdata = apStack.getOnlyStackData(self.params['fullstackid'])

		### get good particle numbers
		includeParticle, tiltParticlesData = self.getGoodParticles()
		self.numpart = len(includeParticle)

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile"+self.timestamp+".lst")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### make new stack of tilted particle from that run
		fullstackfile = os.path.join(fullstackdata['path']['path'], fullstackdata['name'])
		sb = os.path.splitext(fullstackdata['name'])
		newname = "tiltpairsub%d" % self.params['knownstackid']+sb[-1]
		newstackfile = os.path.join(self.params['rundir'], newname)
		apFile.removeStack(newstackfile, warn=False)
		apStack.makeNewStack(fullstackfile, newstackfile, self.params['keepfile'])
		if not os.path.isfile(newstackfile):
			apDisplay.printError("No stack was created")
		self.params['stackid'] = self.params['fullstackid']
		apStack.commitSubStack(self.params, newname, sorted=False)
		apStack.averageStack(stack=newstackfile)
		newstackid = apStack.getStackIdFromPath(newstackfile)
		if self.params['meanplot'] is True:
			apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
			apStackMeanPlot.makeStackMeanPlot(newstackid)
Exemple #10
0
	def start(self):
		### load parameters
		runparams = self.readRunParameters()

		### align references
		self.alignReferences(runparams)

		### create an aligned stack
		self.createAlignedReferenceStack()

		### read particles
		self.lastiter = self.findLastIterNumber()
		if self.params['sort'] is True:
			self.sortFolder()
		reflist = self.readRefDocFile()
		partlist = self.readPartDocFile(reflist)
		self.writePartDocFile(partlist)

		### create aligned stacks
		alignimagicfile = self.createAlignedStacks(partlist, runparams['localstack'])
		apStack.averageStack(alignimagicfile)

		### calculate resolution for each reference
		apix = apStack.getStackPixelSizeFromStackId(runparams['stackid'])*runparams['bin']
		self.calcResolution(partlist, alignimagicfile, apix)

		### insert into databse
		self.insertRunIntoDatabase(alignimagicfile, runparams)
		self.insertParticlesIntoDatabase(runparams['stackid'], partlist)

		apFile.removeStack(runparams['localstack'], warn=False)
		rmcmd = "/bin/rm -fr partfiles/*"
		apEMAN.executeEmanCmd(rmcmd, verbose=False, showcmd=False)
		apFile.removeFilePattern("partfiles/*")
Exemple #11
0
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        ### make sure that old stack is numbered
        alignedstack = os.path.join(self.params['rundir'], 'alignstack.hed')
        apStack.checkForPreviousStack(alignedstack)

        ### run centering algorithm
        self.runMaxlike()

        ### create aligned stacks
        partlist = self.readPartDocFile()
        stackfile = self.createAlignedStacks(partlist)
        if not os.path.isfile(alignedstack):
            apDisplay.printError("No stack was created")

        ### get number of particles
        numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
        self.writeFakeKeepFile(numpart)
        self.params['description'] += (
            " ... %d maxlike centered substack id %d" %
            (numpart, self.params['stackid']))

        apStack.commitSubStack(self.params,
                               newname='alignstack.hed',
                               centered=True)
        apStack.averageStack(stack=alignedstack)
    def start(self):
        ### universal particle counter
        self.partnum = 1

        ### final stack file
        self.combinefile = os.path.join(self.params['rundir'],
                                        self.params['stackfilename'])
        if os.path.isfile(self.combinefile):
            apDisplay.printError("A stack with name " +
                                 self.params['stackfilename'] + " and path " +
                                 self.params['rundir'] + " already exists.")

        ### loop through stacks
        for stackstr in self.params['stackids']:
            stackid = int(stackstr)

            ### get stack data
            stackdata = apStack.getOnlyStackData(stackid)

            ### append particle to stack file
            self.appendToStack(stackdata)

            if self.params['commit'] is True:
                ### insert stack data
                apDisplay.printColor(
                    "inserting new stack particles from stackid=" +
                    str(stackid), "cyan")
                self.commitStack(stackid)
            else:
                apDisplay.printWarning("not committing data to database")

        apStack.averageStack(stack=self.combinefile)
        def start(self):
                # Path of the stack
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                fn_oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

                # Convert the stack to Xmipp
                apXmipp.breakupStackIntoSingleFiles(fn_oldstack)

                # Run sort junk
                cmd = "xmipp_sort_by_statistics -i partlist.sel"
                apDisplay.printColor(cmd, "cyan")
                proc = subprocess.Popen(cmd, shell=True)
                proc.wait()

                # Create sorted stack
                apXmipp.gatherSingleFilesIntoStack("sort_junk.sel","sorted.hed")

                # Create average MRC
                apStack.averageStack("sorted.hed")

                # Remove intermediate stuff
                #os.unlink("partlist.sel")
                #shutil.rmtree("partfiles")

                # Upload results
                self.uploadResults()

                time.sleep(1)
                return
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		### checks
		centerstack = os.path.join(self.params['rundir'], 'align.img')
		badstack = os.path.join(self.params['rundir'], 'bad.img')
		apStack.checkForPreviousStack(centerstack)

		### run centering algorithm
		keeplist = self.centerParticles(oldstack, centerstack, badstack)
		if not os.path.isfile(centerstack):
			apDisplay.printError("No stack was created")

		self.params['keepfile'] = os.path.join(self.params['rundir'], 'keepfile.txt')

		### get number of particles
		self.params['description'] += (
			(" ... %d eman centered substack id %d" 
			% (numparticles, self.params['stackid']))
		)
		
		apStack.commitSubStack(self.params, newname=os.path.basename(centerstack), centered=True)
		apStack.averageStack(stack=centerstack)
		if os.path.isfile(badstack):
			apStack.averageStack(stack=badstack, outfile='badaverage.mrc')
    def start(self):
        # Path of the stack
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        fn_oldstack = os.path.join(stackdata['path']['path'],
                                   stackdata['name'])

        # Convert the stack to Xmipp
        apXmipp.breakupStackIntoSingleFiles(fn_oldstack)

        # Run sort junk
        cmd = "xmipp_sort_by_statistics -i partlist.sel"
        apDisplay.printColor(cmd, "cyan")
        proc = subprocess.Popen(cmd, shell=True)
        proc.wait()

        # Create sorted stack
        apXmipp.gatherSingleFilesIntoStack("sort_junk.sel", "sorted.hed")

        # Create average MRC
        apStack.averageStack("sorted.hed")

        # Remove intermediate stuff
        #os.unlink("partlist.sel")
        #shutil.rmtree("partfiles")

        # Upload results
        self.uploadResults()

        time.sleep(1)
        return
    def start(self):
        self.stackdata = appiondata.ApStackData.direct_query(
            self.params['stackid'])
        if self.params['vertical'] is not True:
            self.alignstackdata = appiondata.ApAlignStackData.direct_query(
                self.params['alignstackid'])

        # Path of the stack
        self.stackdata = apStack.getOnlyStackData(self.params['stackid'])
        fn_oldstack = os.path.join(self.stackdata['path']['path'],
                                   self.stackdata['name'])

        rotfile = None
        if self.params['vertical'] is not True:
            # get averaged image:
            self.alignstackdata = appiondata.ApAlignStackData.direct_query(
                self.params['alignstackid'])
            avgimg = os.path.join(self.alignstackdata['path']['path'],
                                  self.alignstackdata['avgmrcfile'])

            # Convert averaged aligned mrcfile to spider
            spiavg = os.path.join(self.params['rundir'], "avg.spi")
            emancmd = "proc2d %s %s spiderswap edgenorm" % (avgimg, spiavg)
            apEMAN.executeEmanCmd(emancmd, verbose=True)

            # find rotation for vertical alignment
            rot = self.findRotation(spiavg)
            apDisplay.printMsg("found average rotation: %.2f" % rot)

            rotlist = self.getInplaneRotations()
            rotfile = self.createRotationSpiList(rotlist, rot)

        # Convert the original stack to spider
        spistack = self.convertStackToSpider(fn_oldstack)
        # boxmask the particles
        spimaskfile = "masked" + self.timestamp + ".spi"
        self.boxMask(spistack, spimaskfile, rotfile)
        # Convert the spider stack to imagic
        imgstack = self.convertStackToImagic(spimaskfile)

        # Create average MRC
        apStack.averageStack(imgstack)

        # Clean up
        apDisplay.printMsg("deleting temporary processing files")
        os.remove(spistack)
        os.remove(spimaskfile)

        # Upload results
        if self.params['commit'] is True:
            oldstackparts = apStack.getStackParticlesFromId(
                self.params['stackid'])
            apStack.commitMaskedStack(self.params,
                                      oldstackparts,
                                      newname='start.hed')

        time.sleep(1)
        return
Exemple #17
0
    def start(self):

        self.stackdata = apStack.getOnlyStackData(self.params['stackid'],
                                                  msg=False)

        # creating a keepfile, fixed filename
        self.params['keepfile'] = os.path.join(self.params['newstackpath'],
                                               "keepfile.lst")

        #path to the old stack
        oldstack = os.path.join(self.stackdata['path']['path'],
                                self.stackdata['name'])

        #path to the new stack. the stack path will be provided by the db in the future
        newstack = os.path.join(self.params['newstackpath'],
                                self.params['newstack'])

        #messy way to count the number of particles in a stack
        h = open(newstack, 'r')
        numimg = 0
        while h.read(1024):
            numimg += 1

        #have to use this function to make sure i get the same particle number like in the download
        stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])

        #since the keepfile has to be a proc2d like file, i create a dictionary to transfer the
        #uniqe particle id into the stack position. I have to decrement 1 to make it count from 0
        #to numing
        partdict = {}
        dbids = [(part.dbid, part['particleNumber']) for part in stackpartdata]
        for part in dbids:
            partdict[int(part[0])] = int(part[1] - 1)

        #writing the keepfile
        f = open(self.params['keepfile'], 'w')
        for i in range(0, numimg):
            partnumber = partdict[int(
                numpy.memmap(newstack,
                             dtype="float32",
                             offset=i * 1024 + 19 * 4)[0])]
            f.write('%d\n' % partnumber)
        f.close()

        newcreatestack = os.path.join(self.params['rundir'],
                                      self.params['newstack'])
        apStack.makeNewStack(oldstack,
                             newcreatestack,
                             self.params['keepfile'],
                             bad=True)
        apStack.commitSubStack(self.params,
                               self.params['newstack'],
                               sorted=False)
        apStack.averageStack(stack=newcreatestack)
        newstackid = apStack.getStackIdFromPath(newcreatestack)
    def start(self):
        stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        newname = stackdata['name']

        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], newname)

        # calculate slop and intercept from the four points given
        slope = (self.params['maxy'] - self.params['miny']) / (
            self.params['maxx'] - self.params['minx'])
        intercept = self.params['miny'] - (slope * self.params['minx'])

        #               print slope
        #               print intercept

        numparticles = 0

        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        f = open(self.params['keepfile'], 'w')

        for stackpart in stackparts:
            #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
            if stackpart['mean'] > self.params['minx'] and stackpart[
                    'mean'] < self.params['maxx']:
                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                calcY = slope * stackpart['mean'] + intercept
                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                    emanpartnum = stackpart['particleNumber'] - 1
                    f.write('%i\n' % emanpartnum)
                    numparticles += 1

        f.close()
        self.params['description'] += (
            (" ... %d particle substack of stackid %d" %
             (numparticles, self.params['stackid'])))

        #create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=True)
        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")
        apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
        apStack.averageStack(stack=newstack)

        # stack mean plot
        newstackid = apStack.getStackIdFromPath(newstack)
        apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
        apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid'])
		if self.params['vertical'] is not True:
			self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])

		# Path of the stack
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'])
		fn_oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		rotfile = None
		if self.params['vertical'] is not True:
			# get averaged image:
			self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
			avgimg = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['avgmrcfile'])

			# Convert averaged aligned mrcfile to spider
			spiavg = os.path.join(self.params['rundir'],"avg.spi")
			emancmd = "proc2d %s %s spiderswap edgenorm"%(avgimg,spiavg)
			apEMAN.executeEmanCmd(emancmd, verbose=True)

			# find rotation for vertical alignment
			rot = self.findRotation(spiavg)
			apDisplay.printMsg("found average rotation: %.2f"%rot)

			rotlist = self.getInplaneRotations()
			rotfile = self.createRotationSpiList(rotlist,rot)

		# Convert the original stack to spider
		spistack = self.convertStackToSpider(fn_oldstack)
		# boxmask the particles
		spimaskfile = "masked"+self.timestamp+".spi"
		self.boxMask(spistack,spimaskfile,rotfile)
		# Convert the spider stack to imagic
		imgstack = self.convertStackToImagic(spimaskfile)

		# Create average MRC
		apStack.averageStack(imgstack)

		# Clean up
		apDisplay.printMsg("deleting temporary processing files")
		os.remove(spistack)
		os.remove(spimaskfile)

		# Upload results
		if self.params['commit'] is True:
			oldstackparts = apStack.getStackParticlesFromId(self.params['stackid'])
			apStack.commitMaskedStack(self.params, oldstackparts, newname='start.hed')

		time.sleep(1)
		return
        def start(self):
                stackparts = apStack.getStackParticlesFromId(self.params['stackid'])
                
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                newname = stackdata['name']
                
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                newstack = os.path.join(self.params['rundir'], newname)

                # calculate slop and intercept from the four points given       
                slope = (self.params['maxy'] - self.params['miny']) / (self.params['maxx'] - self.params['minx'])
                intercept = self.params['miny'] - (slope*self.params['minx'])
                
#               print slope
#               print intercept
                
                numparticles = 0
                
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                f=open(self.params['keepfile'],'w')
                
                for stackpart in stackparts:
                        #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                        if stackpart['mean'] > self.params['minx'] and stackpart['mean'] < self.params['maxx']:
                                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                                calcY = slope*stackpart['mean']+intercept 
                                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                                        emanpartnum = stackpart['particleNumber']-1
                                        f.write('%i\n' % emanpartnum)
                                        numparticles+=1
                                        
                f.close()
                self.params['description'] +=(
                                (" ... %d particle substack of stackid %d" 
                                 % (numparticles, self.params['stackid']))
                        )

                #create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)
                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")
                apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
                apStack.averageStack(stack=newstack)

                # stack mean plot
                newstackid = apStack.getStackIdFromPath(newstack)
                apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
                apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		
		# creating a keepfile, fixed filename
		self.params['keepfile'] = os.path.join(self.params['newstackpath'],"keepfile.lst")

		#path to the old stack
		oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		#path to the new stack. the stack path will be provided by the db in the future
		newstack = os.path.join(self.params['newstackpath'], self.params['newstack'])

		#messy way to count the number of particles in a stack
		h = open(newstack, 'r')
		numimg = 0
		while h.read(1024):
			numimg += 1

		#have to use this function to make sure i get the same particle number like in the download
		stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])
		
		#since the keepfile has to be a proc2d like file, i create a dictionary to transfer the 
		#uniqe particle id into the stack position. I have to decrement 1 to make it count from 0 
		#to numing
		partdict = {}
		dbids = [(part.dbid,part['particleNumber']) for part in stackpartdata]
		for part in dbids:
			partdict[int(part[0])] = int(part[1]-1)

		#writing the keepfile
		f = open(self.params['keepfile'], 'w')
		for i in range(0,numimg):
			partnumber = partdict[int(numpy.memmap(newstack, dtype="float32", offset=i*1024+19*4)[0])]
			f.write('%d\n' % partnumber)
		f.close()

		newcreatestack = os.path.join(self.params['rundir'],self.params['newstack'])
		apStack.makeNewStack(oldstack, newcreatestack, self.params['keepfile'], bad=True)
		apStack.commitSubStack(self.params, self.params['newstack'], sorted=False)
		apStack.averageStack(stack=newcreatestack)
		newstackid = apStack.getStackIdFromPath(newcreatestack)
    def start(self):
        ### load parameters

        runparams = self.readRunParameters()
        runparams['localstack'] = "start1.hdf"
        self.params.update(runparams)

        alignedClassStackHDF = "alignedClasses.hdf"
        alignedPartStackHDF = "alignedParticles.hdf"
        apFile.removeStack(alignedClassStackHDF, warn=False)
        apFile.removeStack(alignedPartStackHDF, warn=False)

        ISACParser = parseISAC()
        ISACParser.setAlignParams(self.params)
        ISACParser.trackParticlesInISAC()

        ###  align classes
        ISACParser.alignClassAverages(alignedClassStackHDF)
        alignedClassStack = apEMAN2.stackHDFToIMAGIC(alignedClassStackHDF)
        apStack.averageStack(alignedClassStack)
        self.lastiter = ISACParser.numGenerations

        ###  align particles to classes AND create aligned stacks
        ISACParser.alignParticlesToClasses(self.params['localstack'],
                                           alignedClassStackHDF,
                                           alignedPartStackHDF)
        alignedPartStack = apEMAN2.stackHDFToIMAGIC(alignedPartStackHDF)
        self.numPart = sparx.EMUtil.get_image_count(alignedPartStackHDF)

        partList = ISACParser.createPartList()

        ### calculate resolution for each reference
        apix = apStack.getStackPixelSizeFromStackId(
            self.params['stackid']) * self.params['bin']
        self.calcResolution(partList, alignedPartStack, apix)

        ### insert into database
        self.insertRunIntoDatabase(alignedPartStack, alignedClassStack,
                                   self.params)

        self.insertParticlesIntoDatabase(self.params['stackid'], partList)
	def start(self):
		### load parameters

		runparams = self.readRunParameters()
		runparams['localstack'] = "start1.hdf"
		self.params.update(runparams)

		alignedClassStackHDF = "alignedClasses.hdf"
		alignedPartStackHDF = "alignedParticles.hdf"
		apFile.removeStack(alignedClassStackHDF, warn=False)
		apFile.removeStack(alignedPartStackHDF, warn=False)
		
		ISACParser = parseISAC()
		ISACParser.setAlignParams(self.params)
		ISACParser.trackParticlesInISAC()
		
		###  align classes
		ISACParser.alignClassAverages(alignedClassStackHDF)
		alignedClassStack = apEMAN2.stackHDFToIMAGIC(alignedClassStackHDF)
		apStack.averageStack(alignedClassStack)
		self.lastiter = ISACParser.numGenerations

		###  align particles to classes AND create aligned stacks
		ISACParser.alignParticlesToClasses(self.params['localstack'], alignedClassStackHDF, alignedPartStackHDF)
		alignedPartStack = apEMAN2.stackHDFToIMAGIC(alignedPartStackHDF)
		self.numPart = sparx.EMUtil.get_image_count(alignedPartStackHDF)

		partList = ISACParser.createPartList()
		
		### calculate resolution for each reference
		apix = apStack.getStackPixelSizeFromStackId(self.params['stackid'])*self.params['bin']
		self.calcResolution(partList, alignedPartStack, apix)

		### insert into database
		self.insertRunIntoDatabase(alignedPartStack, alignedClassStack, self.params)

		self.insertParticlesIntoDatabase(self.params['stackid'], partList)
    def commitToDatabase(self):
        """
		insert the results into the database
		"""
        ### expected result for an alignment run:
        ### 1. aligned particle stack in IMAGIC
        ### 2. rotation, shift, and quality parameters for each particle
        ### 3. which particles belongs to which class
        ### 4. stack file with the class averages

        alignedstack = os.path.join(self.params['rundir'], "ptcl.hed")
        refstack = os.path.join(self.params['rundir'], "iter.final.hed")
        averagemrc = os.path.join(self.params['rundir'], "average.mrc")
        apStack.averageStack(alignedstack, averagemrc)
        particlemapping = self.determineClassOwnership()

        ### setup alignment run
        alignrunq = appiondata.ApAlignRunData()
        alignrunq['runname'] = self.params['runname']
        alignrunq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        uniquerun = alignrunq.query(results=1)
        if uniquerun:
            apDisplay.printError(
                "Run name '" + runparams['runname'] +
                "' and path already exisclassmappingt in database")

        ### setup eman refine2d run
        emanrefinetwodq = appiondata.ApEMANRefine2dRunData()
        emanrefinetwodq['runname'] = self.params['runname']
        emanrefinetwodq['run_seconds'] = time.time() - self.t0
        emanrefinetwodq['num_iters'] = self.params['numiter']
        emanrefinetwodq['num_classes'] = self.params['numclasses']

        ### finish alignment run
        alignrunq['refine2drun'] = emanrefinetwodq
        alignrunq['hidden'] = False
        alignrunq['runname'] = self.params['runname']
        alignrunq['description'] = self.params['description']
        alignrunq['lp_filt'] = self.params['lowpass']
        alignrunq['hp_filt'] = self.params['highpass']
        alignrunq['bin'] = self.params['bin']

        ### setup alignment stackalignimagicfile
        alignstackq = appiondata.ApAlignStackData()
        alignstackq['imagicfile'] = os.path.basename(alignedstack)
        alignstackq['avgmrcfile'] = os.path.basename(averagemrc)
        alignstackq['refstackfile'] = os.path.basename(refstack)
        alignstackq['iteration'] = self.params['numiter']
        alignstackq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        alignstackq['alignrun'] = alignrunq

        ### check to make sure files exist
        alignimagicfilepath = os.path.join(self.params['rundir'],
                                           alignstackq['imagicfile'])
        if not os.path.isfile(alignimagicfilepath):
            apDisplay.printError("could not find stack file: " +
                                 alignimagicfilepath)
        avgmrcfile = os.path.join(self.params['rundir'],
                                  alignstackq['avgmrcfile'])
        if not os.path.isfile(avgmrcfile):
            apDisplay.printError("could not find average mrc file: " +
                                 avgmrcfile)
        refstackfile = os.path.join(self.params['rundir'],
                                    alignstackq['refstackfile'])
        if not os.path.isfile(refstackfile):
            apDisplay.printErrrefqor("could not find reference stack file: " +
                                     refstackfile)

        ### continue setting values
        alignstackq['stack'] = apStack.getOnlyStackData(self.params['stackid'])
        alignstackq['boxsize'] = apFile.getBoxSize(alignimagicfilepath)[0]
        alignstackq['pixelsize'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid']) * self.params['bin']
        alignstackq['description'] = self.params['description']
        alignstackq['hidden'] = False
        alignstackq['num_particles'] = apFile.numImagesInStack(
            alignimagicfilepath)

        ### inserting particles and references
        apDisplay.printColor("Inserting particle alignment data, please wait",
                             "cyan")
        for emanpartnum in range(self.params['numpart']):
            partnum = emanpartnum + 1
            if partnum % 100 == 0:
                sys.stderr.write(".")

            ### setup reference
            refq = appiondata.ApAlignReferenceData()
            refnum = particlemapping[emanpartnum]
            refq['refnum'] = refnum
            refq['iteration'] = self.params['numiter']
            refq['path'] = appiondata.ApPathData(
                path=os.path.abspath(self.params['rundir']))
            refq['alignrun'] = alignrunq

            ### TODO: create mrc file
            #refq['mrcfile'] = refbase+".mrc"
            #reffile = os.path.join(self.params['rundir'], refq['mrcfile'])
            #if not os.path.isfile(reffile):
            #	emancmd = "proc2d "+refstack+".xmp "+refstack+".mrc"
            #	apEMAN.executeEmanCmd(emancmd, verbose=False)
            #if not os.path.isfile(reffile):
            #	apDisplay.printError("could not find reference file: "+reffile)

            ### TODO: get resolution
            #refq['ssnr_resolution'] = TODO

            ### setup particle
            alignpartq = appiondata.ApAlignParticleData()
            alignpartq['partnum'] = partnum
            alignpartq['alignstack'] = alignstackq
            stackpartdata = apStack.getStackParticle(self.params['stackid'],
                                                     partnum)
            alignpartq['stackpart'] = stackpartdata
            ### TODO: get the alignment parameters
            #alignpartq['xshift'] = partdict['xshift']
            #alignpartq['yshift'] = partdict['yshift']
            #alignpartq['rotation'] = partdict['inplane']
            #alignpartq['mirror'] = partdict['mirror']
            alignpartq['ref'] = refq
            ### TODO: get the score
            #alignpartq['score'] = partdict['score']

            ### insert
            if self.params['commit'] is True:
                alignpartq.insert()

        return
Exemple #25
0
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], stackdata['name'])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params['dropclasslist'] is not None:
            excludestrlist = self.params['dropclasslist'].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params['keepclasslist'] is not None:
            includestrlist = self.params['keepclasslist'].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

        ### or read from keepfile
        elif self.params['keepfile'] is not None:
            keeplistfile = open(self.params['keepfile'])
            for line in keeplistfile:
                if self.params['excludefrom'] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()
        if self.params['alignid'] is not None:
            alignpartq = appiondata.ApAlignParticleData()
            alignpartq['alignstack'] = self.alignstackdata
            particles = alignpartq.query()
        elif self.params['clusterid'] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq['clusterstack'] = self.clusterstackdata
            particles = clusterpartq.query()
        apDisplay.printMsg("Complete in " +
                           apDisplay.timeString(time.time() - q0))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0
        f = open("test.log", "w")
        count = 0
        for part in particles:
            count += 1
            #partnum = part['partnum']-1
            if 'alignparticle' in part:
                alignpart = part['alignparticle']
                classnum = int(part['refnum']) - 1
            else:
                alignpart = part
                classnum = int(part['ref']['refnum']) - 1
            emanstackpartnum = alignpart['stackpart']['particleNumber'] - 1

            ### check shift
            if self.params['maxshift'] is not None:
                shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
                if shift > self.params['maxshift']:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badshift += 1
                    continue

            if self.params['minscore'] is not None:
                ### check score
                if (alignpart['score'] is not None
                        and alignpart['score'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badscore += 1
                    continue

                ### check spread
                if (alignpart['spread'] is not None
                        and alignpart['spread'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badspread += 1
                    continue

            if includelist and classnum in includelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            elif excludelist and not classnum in excludelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\t%d\texclude\n" %
                        (count, emanstackpartnum, classnum))

        f.close()
        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding " + str(excludeParticle) +
                           " particles")

        #print includeParticle

        ### write kept particles to file
        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params['keepfile'])
        kf = open(self.params['keepfile'], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes excluded" %
                (numparticles, self.params['dropclasslist']))
        elif includelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes included" %
                (numparticles, self.params['keepclasslist']))

        ### create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=self.params['savebad'])

        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        apStack.averageStack(stack=newstack)
        if self.params['commit'] is True:
            apStack.commitSubStack(self.params)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
	def start(self):
		self.params['output_fileformat'] = 'mrc'
		newstackname='framealigned.hed'
		stackdata=apStack.getStackParticlesFromId(self.params['stackid'])
		stackrundata=apStack.getOnlyStackData(self.params['stackid'])
		apix=stackrundata['pixelsize']*1e10
		kev=stackdata[0]['particle']['image']['scope']['high tension']/1000
		origstackpath=os.path.join(stackrundata['path']['path'],stackrundata['name'])
		boxsize=stackdata[0]['stackRun']['stackParams']['boxSize']
		binning=stackdata[0]['stackRun']['stackParams']['bin']	
		
		#determine camera type
		cameratype=stackdata[0]['particle']['image']['camera']['ccdcamera']['name']
		if self.params['override_camera'] is not None:
			cameratype=self.params['override_camera']
		
		#create sorted boxfiles
		imagedict={}
		masterlist=[]
		for particle in stackdata:
			parentimage=particle['particle']['image']['filename']
			if parentimage in imagedict.keys():
				imagedict[parentimage].append(particle['particle'])
			else:
				imagedict[parentimage]=[]
				imagedict[parentimage].append(particle['particle'])
			index=len(imagedict[parentimage])-1
			masterlist.append({'particle':particle,'key':parentimage,'index':index})
		#print masterlist
		
		for key in imagedict:
			particlelst=imagedict[key]
			parentimage=key
			framespath=particlelst[0]['image']['session']['frame path']
			
			print cameratype
			if 'Gatan' in cameratype:
				#prepare frames
				print framespath
				
				#prepare frame directory
				framespathname=os.path.join(self.params['rundir'],parentimage+'.frames')
				if os.path.exists(framespathname):
					pass
				else:
					os.mkdir(framespathname)
				print framespathname
				
				mrcframestackname=parentimage+'.frames.mrc'
				
				print mrcframestackname
				
				nframes=particlelst[0]['image']['camera']['nframes']
				
				print "Extracting frames for", mrcframestackname
				for n in range(nframes):
					a=mrc.read(os.path.join(framespath,mrcframestackname),n)
					numpil.write(a,imfile=os.path.join(framespathname,'RawImage_%d.tif' % (n)), format='tiff')
				
			elif 'DE' in cameratype:
				framespathname=os.path.join(framespath,parentimage+'.frames')
			
			print os.getcwd()
			print framespathname
			#generate DE script call
			if os.path.exists(framespathname):
				print "found frames for", parentimage

				nframes=particlelst[0]['image']['camera']['nframes']
				boxname=parentimage + '.box'
				boxpath=os.path.join(framespathname,boxname)
				shiftdata={'scale':1,'shiftx':0,'shifty':0}

				#flatfield references
				brightrefpath=particlelst[0]['image']['bright']['session']['image path']
				brightrefname=particlelst[0]['image']['bright']['filename']
				brightnframes=particlelst[0]['image']['bright']['camera']['nframes']
				darkrefpath=particlelst[0]['image']['dark']['session']['image path']
				darkrefname=particlelst[0]['image']['dark']['filename']
				darknframes=particlelst[0]['image']['dark']['camera']['nframes']
				brightref=os.path.join(brightrefpath,brightrefname+'.mrc')
				darkref=os.path.join(darkrefpath,darkrefname+'.mrc')
				print brightref
				print darkref			
				apBoxer.processParticleData(particle['particle']['image'],boxsize,particlelst,shiftdata,boxpath)
				print framespathname			

				#set appion specific options
				self.params['gainreference_filename']=brightref
				self.params['gainreference_framecount']=brightnframes
				self.params['darkreference_filename']=darkref
				self.params['darkreference_framecount']=darknframes
				self.params['input_framecount']=nframes
				self.params['boxes_fromfiles']=1
				#self.params['run_verbosity']=3
				self.params['output_invert']=0
				#self.params['radiationdamage_apix=']=apix
				self.params['radiationdamage_voltage']=kev
				#self.params['boxes_boxsize']=boxsize

				outpath=os.path.join(self.params['rundir'],key)
				if os.path.exists(outpath):
					shutil.rmtree(outpath)
				os.mkdir(outpath)
				
				command=['deProcessFrames.py']
				keys=self.params.keys()
				keys.sort()
				for key in keys:
					param=self.params[key]
					#print key, param, type(param)
					if param == None or param=='':
						pass
					else:
						option='--%s=%s' % (key,param)
						command.append(option)
				command.append(outpath)
				command.append(framespathname)
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
					
		
		#recreate particle stack
		for n,particledict in enumerate(masterlist):
			parentimage=particledict['key']
			correctedpath=os.path.join(self.params['rundir'],parentimage)
			print correctedpath
			if os.path.exists(correctedpath):
			
				correctedparticle=glob.glob(os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index']))))
				print os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index'])))
				print correctedparticle
				#sys.exit()
				command=['proc2d',correctedparticle[0], newstackname]
				if self.params['output_rotation'] !=0:
					command.append('rot=%d' % self.params['output_rotation'])
				
				if self.params['show_DE_command'] is True:
					print command
				subprocess.call(command)
			else:
				print "did not find frames for ", parentimage
				command=['proc2d', origstackpath, newstackname,('first=%d' % n), ('last=%d' % n)]
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
				
		#upload stack
		
		#make keep file
		self.params['keepfile']='keepfile.txt'
		f=open(self.params['keepfile'],'w')
		for n in range(len(masterlist)):
			f.write('%d\n' % (n))
		f.close()
		
		apStack.commitSubStack(self.params, newname=newstackname)
		apStack.averageStack(stack=newstackname)
		
		print "Done!!!!"
	def start(self):
		### get stack parameteres
		self.stack = {}
		self.stack['data'] = apStack.getOnlyStackData(self.params['stackId'])
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackId'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackId'])
		self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])

		### copy stack into working directory	
		if os.path.isfile(self.stack['file']):
			apDisplay.printColor("copying stack into running directoroy", "cyan")
			if self.stack['file'][-4:] == ".img" or self.stack['file'][-4:] == ".hed":
				strippedfile = self.stack['file'][:-4]
			else:
				strippedfile = self.stack['file']
			while os.path.isfile(os.path.join(self.params['rundir'], "start.img")):
				apFile.removeStack(os.path.join(self.params['rundir'], "start.img"))
			emancmd = "proc2d "+strippedfile+".hed "+os.path.join(self.params['rundir'], "start.hed ")+\
				"first=0 last="+str(self.params['numpart']-1)
			apParam.runCmd(emancmd, "EMAN")
		else:
			apDisplay.printError("stack not found in database")
	
		### get template stack parameters
		self.templatestack = {}
		self.templatestack['data'] = appiondata.ApTemplateStackData.direct_query(self.params['templateStackId'])
		self.templatestack['apix'] = self.templatestack['data']['apix']
		self.templatestack['boxsize'] = self.templatestack['data']['boxsize']	
		self.templatestack['file'] = os.path.join(self.templatestack['data']['path']['path'], self.templatestack['data']['templatename'])
		self.templatestack['numimages'] = self.templatestack['data']['numimages']

		### copy templates into working directory
		if os.path.isfile(self.templatestack['file']):
			apDisplay.printColor("copying templates into running directoroy", "cyan")
			ts = os.path.join(self.params['rundir'], "references.img")		
			while os.path.isfile(ts):
				apFile.removeStack(ts)
			if self.templatestack['file'][-4:] == ".img" or self.templatestack['file'][-4:] == ".hed":
				strippedfile = self.templatestack['file'][:-4]
			else:
				strippedfile = self.templatestack['file']
			emancmd = "proc2d "+strippedfile+".img "+ts
			apParam.runCmd(emancmd, "EMAN")
		else:
			apDisplay.printError("template stack not found in database")

		### set new pixelsize
		if self.params['bin'] is not None and self.params['bin'] != 0:
			self.params['apix'] = float(self.stack['apix']) * int(self.params['bin'])
		else:
			self.params['apix'] = self.stack['apix']

		### scale, low-pass, and high-pass filter stack ... do this with imagic, because it determines the appropriate boxsizes
		scalingbatchfile = self.createImagicBatchFileScaling()
		preptime = time.time()
		proc = subprocess.Popen("chmod 775 "+str(scalingbatchfile), shell=True)
		proc.wait()
		os.chdir(self.params['rundir'])
		apParam.runCmd(scalingbatchfile, "IMAGIC")
		apIMAGIC.checkLogFileForErrors(os.path.join(self.params['rundir'], "prepareStack.log"))
               	apDisplay.printColor("finished IMAGIC in "+apDisplay.timeString(time.time()-preptime), "cyan")

		### set new boxsize, done only after scaling is complete
		if self.params['bin'] is not None:
			self.params['boxsize'] = apFile.getBoxSize(os.path.join(self.params['rundir'], "start.hed"))[0]
		else:
			self.params['boxsize'] = self.stack['boxsize']

		### make sure template stack boxsize matches that of the input stack
		if self.params['apix'] != self.templatestack['apix'] or self.params['boxsize'] != self.templatestack['boxsize']:
			self.scaleTemplates()

		starttime=time.time()
		print self.params
		print "... stack pixel size: "+str(self.params['apix'])
		print "... stack box size: "+str(self.params['boxsize'])	
		apDisplay.printColor("Running IMAGIC .batch file: See multiReferenceAlignment.log file for details", "cyan")
	
		### create IMAGIC batch file
		batchfile = self.createImagicBatchFileMRA()

		### execute IMAGIC batch file
		aligntime0 = time.time()
		proc = subprocess.Popen("chmod 775 "+str(batchfile), shell=True)
		proc.wait()
		os.chdir(self.params['rundir'])
		apParam.runCmd(batchfile, "IMAGIC")
		apIMAGIC.checkLogFileForErrors(os.path.join(self.params['rundir'], "multiReferenceAlignment.log"))
               	apDisplay.printColor("finished IMAGIC in "+apDisplay.timeString(time.time()-aligntime0), "cyan")

		### get particle parameters (shift, rotate, refnum, mirror, ccc)
		partparams = self.getParticleParams()

		### average stack
		alignstack = os.path.join(self.params['rundir'], "alignstack.hed")
		apStack.averageStack(alignstack)	

		### normalize particles (otherwise found problems in viewing with stackviewer)
		emancmd = "proc2d "+alignstack+" "+alignstack+".norm.hed norm"
		while os.path.isfile(alignstack+".norm.img"):
			apFile.removeStack(alignstack+".norm.img")
		apParam.runCmd(emancmd, "EMAN")
		os.rename(alignstack+".norm.hed", alignstack)
		os.rename(alignstack+".norm.img", alignstack[:-4]+".img")

		### normalize references
		emancmd = "proc2d "+ts+" "+ts+".norm.hed norm"
		while os.path.isfile(ts+".norm.img"):
			apFile.removeStack(ts+".norm.img")
		apParam.runCmd(emancmd, "EMAN")
		os.rename(ts+".norm.hed", ts)
		os.rename(ts+".norm.img", ts[:-4]+".img")

		### remove copied stack
		while os.path.isfile(os.path.join(self.params['rundir'], "start.img")):
			apFile.removeStack(os.path.join(self.params['rundir'], "start.img"))

		### insert run into database
		self.insertAlignmentRun(insert=True)
		self.insertParticlesIntoDatabase(partparams, insert=True)
Exemple #28
0
	def start(self):
		self.insertTopolRepJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		self.dumpParameters()

		self.params['canexe'] = self.getCANPath()

		### process stack to local file
		self.params['localstack'] = os.path.join(self.params['rundir'], self.timestamp+".hed")

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)
		a.setValue('append',False)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert',True)
		if self.params['premask'] is True and self.params['mramethod'] != 'imagic':
			a.setValue('mask',self.params['mask'])

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		if self.params['uploadonly'] is not True:
			if os.path.isfile(os.path.join(self.params['rundir'],"stack.hed")):
				self.params['localstack']=os.path.join(self.params['rundir'],"stack.hed")
			else:
				a.run()
			if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
				apDisplay.printError("Missing particles in stack")

			### IMAGIC mask particles before alignment
			if self.params['premask'] is True and self.params['mramethod'] == 'imagic':
				# convert mask to fraction for imagic
				maskfrac = self.workingmask*2/self.workingboxsize
				maskstack = imagicFilters.softMask(self.params['localstack'],mask=maskfrac)
				shutil.move(maskstack+".hed",os.path.splitext(self.params['localstack'])[0]+".hed")
				shutil.move(maskstack+".img",os.path.splitext(self.params['localstack'])[0]+".img")

		origstack = self.params['localstack']
		### find number of processors
#		if self.params['nproc'] is None:
		self.params['nproc'] = apParam.getNumProcessors()

		if self.params['uploadonly'] is not True:
			aligntime = time.time()
			# run through iterations
			for i in range(0,self.params['iter']+1):
				# move back to starting directory
				os.chdir(self.params['rundir'])

				# set up next iteration directory
				self.params['currentiter'] = i
				self.params['iterdir'] = os.path.abspath("iter%02i" % i)
				self.params['iterdir'] = string.replace(self.params['iterdir'],"/jetstor/APPION","")
				if os.path.exists(self.params['iterdir']):
					apDisplay.printError("Error: directory '%s' exists, aborting alignment" % self.params['iterdir'])

				# create directory for iteration
				os.makedirs(self.params['iterdir'])	
				os.chdir(self.params['iterdir'])

				# if at first iteration, create initial class averages 
				if i == 0:
					# first rewrite localstack headers if particles not pre-masked
					if self.params['premask'] is False and self.params['mramethod'] == "imagic":
						imagicFilters.takeoverHeaders(self.params['localstack'],self.params['numpart'],self.workingboxsize)
					self.params['alignedstack'] = os.path.splitext(self.params['localstack'])[0]
					if self.params['msamethod']=='imagic':
						self.runIMAGICmsa()
					else:
						self.runCAN()
					continue

				# using references from last iteration, run multi-ref alignment
				if self.params['mramethod'] == "imagic":
					# rewrite class headers
					imagicFilters.takeoverHeaders(self.params['currentcls'],self.params['currentnumclasses'],self.workingboxsize)
					self.runIMAGICmra()
				else:
					self.runEMANmra()

				# create class averages from aligned stack
				if self.params['msamethod']=='imagic':
					self.runIMAGICmsa()
				else:
					self.runCAN()
			
			aligntime = time.time() - aligntime
			apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

		## set upload information params:
		else:
			## get last iteration
			alliters = glob.glob("iter*")
			alliters.sort()

			## get iteration number from iter dir
			self.params['currentiter'] = int(alliters[-1][-2:])
			self.params['iterdir'] = os.path.join(self.params['rundir'],alliters[-1])
			self.params['currentcls'] = "classes%02i"%(self.params['currentiter'])

			## go into last iteration directory
			os.chdir(self.params['iterdir'])
			self.params['alignedstack'] = os.path.abspath("mrastack")
			if os.path.isfile(os.path.join(self.params['rundir'],self.params['currentcls']+".hed")):
				p1 = os.path.join(self.params['rundir'],self.params['currentcls'])
				p2 = os.path.join(self.params['iterdir'],self.params['currentcls'])
				shutil.move(p1+".hed",p2+".hed")
				shutil.move(p1+".img",p2+".img")

		## sort the class averages
		self.sortClassAverages()

		### get particle information from last iteration
		if self.params['mramethod']=='imagic':
			partlist = self.readPartIMAGICFile()
		else:
			partlist = self.readPartEMANFile()
		if self.params['msamethod']=='imagic':
			partrefdict = self.imagicClassificationToDict()
		else:
			partrefdict = self.canClassificationToDict()

		# move back to starting directory
		os.chdir(self.params['rundir'])

		# move aligned stack to current directory for appionweb
		if not os.path.isfile("mrastack.hed"):
			shutil.move(self.params['alignedstack']+".hed","mrastack.hed")
			shutil.move(self.params['alignedstack']+".img","mrastack.img")
			# rewrite header
			if self.params['mramethod'] == "imagic" or self.params['msamethod'] == 'imagic':
				imagicFilters.takeoverHeaders("mrastack",self.params['numpart'],self.workingboxsize)

		# move actual averages to current directory
		if self.params['msamethod']=='can':
			if not os.path.isfile("classes_avg.hed"):
				self.applySort()
			# save actual class averages as refs in database
			self.params['currentcls']="classes_avg"
		
		
		### create an average mrc of final references 
		if not os.path.isfile("average.mrc"):
			apStack.averageStack(stack=self.params['currentcls']+".hed")
			self.dumpParameters()

		### remove the filtered stack
		apFile.removeStack(origstack)

		### save to database
		if self.params['commit'] is True:
			self.insertRunIntoDatabase()
			self.insertParticlesIntoDatabase(partlist, partrefdict)
    def start(self):
        ### new stack path
        oldstack = os.path.join(self.stackdata['path']['path'],
                                self.stackdata['name'])
        newstack = os.path.join(self.params['rundir'], self.stackdata['name'])
        apStack.checkForPreviousStack(newstack)

        ### get particles from stack
        apDisplay.printMsg("Querying stack particles")
        t0 = time.time()
        stackpartq = appiondata.ApRefineParticleData()
        stackpartq['refineIter'] = self.iterdata
        particles = stackpartq.query()
        apDisplay.printMsg("Finished in " +
                           apDisplay.timeString(time.time() - t0))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        f = open("test.log", "w")
        count = 0
        apDisplay.printMsg("Processing stack particles")
        t0 = time.time()
        for part in particles:
            count += 1
            if count % 500 == 0:
                sys.stderr.write(".")
            emanstackpartnum = part['particle']['particleNumber'] - 1

            if part['postRefine_keep'] == 1:
                ### good particle
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\tinclude\n" % (count, emanstackpartnum))
            else:
                ### bad particle
                excludeParticle += 1
                f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
        sys.stderr.write("\n")
        apDisplay.printMsg("Finished in " +
                           apDisplay.timeString(time.time() - t0))

        f.close()
        includeParticle.sort()
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding " + str(excludeParticle) +
                           " particles")

        ### write kept particles to file
        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params['keepfile'])
        kf = open(self.params['keepfile'], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        self.params['description'] += (" ... %d no jumpers substack" %
                                       (numparticles, ))

        ### create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=True)

        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        apStack.averageStack(stack=newstack)
        if self.params['commit'] is True:
            apStack.commitSubStack(self.params)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
	def start(self):
		self.stack = {}
		self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
		self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])

		### test insert to make sure data is not overwritten
		self.params['runtime'] = 0
		#self.checkDuplicateRefBasedRun()

		### set box size
		self.boxsize = int(math.floor(self.stack['boxsize']/self.params['bin']/2.0))*2

		### convert stack to spider
		spiderstack = self.createSpiderFile()

		### create template stack
		templatestack = self.createTemplateStack()

		### run the alignment
		aligntime = time.time()
		usestack = spiderstack
		oldpartlist = None
		for i in range(self.params['numiter']):
			iternum = i+1
			apDisplay.printColor("\n\nITERATION "+str(iternum), "green")
			alignedstack, partlist = self.runAlignmentGPU(
				usestack, templatestack, spiderstack,
				self.params['xysearch'], self.params['xystep'],
				self.params['firstring'], self.params['lastring'],
				iternum=iternum, oldpartlist=oldpartlist)
			oldpartlist = partlist
			usestack = alignedstack
			templatestack = self.updateTemplateStack(alignedstack, partlist, iternum)
		aligntime = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

		### remove large, worthless stack
		spiderstack = os.path.join(self.params['rundir'], "start.spi")
		apDisplay.printMsg("Removing un-aligned stack: "+spiderstack)
		apFile.removeFile(spiderstack, warn=True)

		### convert aligned stack to imagic
		finalspistack = "aligned.spi"
		shutil.move(alignedstack, finalspistack)
		imagicstack = "aligned.hed"
		apFile.removeStack(imagicstack)
		emancmd = "proc2d "+finalspistack+" "+imagicstack
		apEMAN.executeEmanCmd(emancmd, verbose=True)

		### average stack
		apStack.averageStack(imagicstack)

		### calculate resolution for each reference
		apix = self.stack['apix']*self.params['bin']
		self.calcResolution(partlist, imagicstack, apix)

		if self.params['commit'] is True:
			apDisplay.printMsg("committing results to DB")
			self.params['runtime'] = aligntime
			self.insertRefBasedRun(partlist, imagicstack, insert=True)
		else:
			apDisplay.printWarning("not committing results to DB")

		### remove temporary files
		apFile.removeFilePattern("alignments/alignedstack*.spi")
		apFile.removeFile(finalspistack)
    def start(self):
        ### get stack parameteres
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackId'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackId'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackId'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])

        ### copy stack into working directory
        if os.path.isfile(self.stack['file']):
            apDisplay.printColor("copying stack into running directoroy",
                                 "cyan")
            if self.stack['file'][-4:] == ".img" or self.stack['file'][
                    -4:] == ".hed":
                strippedfile = self.stack['file'][:-4]
            else:
                strippedfile = self.stack['file']
            while os.path.isfile(
                    os.path.join(self.params['rundir'], "start.img")):
                apFile.removeStack(
                    os.path.join(self.params['rundir'], "start.img"))
            emancmd = "proc2d "+strippedfile+".hed "+os.path.join(self.params['rundir'], "start.hed ")+\
                    "first=0 last="+str(self.params['numpart']-1)
            apParam.runCmd(emancmd, "EMAN")
        else:
            apDisplay.printError("stack not found in database")

        ### get template stack parameters
        self.templatestack = {}
        self.templatestack[
            'data'] = appiondata.ApTemplateStackData.direct_query(
                self.params['templateStackId'])
        self.templatestack['apix'] = self.templatestack['data']['apix']
        self.templatestack['boxsize'] = self.templatestack['data']['boxsize']
        self.templatestack['file'] = os.path.join(
            self.templatestack['data']['path']['path'],
            self.templatestack['data']['templatename'])
        self.templatestack['numimages'] = self.templatestack['data'][
            'numimages']

        ### copy templates into working directory
        if os.path.isfile(self.templatestack['file']):
            apDisplay.printColor("copying templates into running directoroy",
                                 "cyan")
            ts = os.path.join(self.params['rundir'], "references.img")
            while os.path.isfile(ts):
                apFile.removeStack(ts)
            if self.templatestack['file'][-4:] == ".img" or self.templatestack[
                    'file'][-4:] == ".hed":
                strippedfile = self.templatestack['file'][:-4]
            else:
                strippedfile = self.templatestack['file']
            emancmd = "proc2d " + strippedfile + ".img " + ts
            apParam.runCmd(emancmd, "EMAN")
        else:
            apDisplay.printError("template stack not found in database")

        ### set new pixelsize
        if self.params['bin'] is not None and self.params['bin'] != 0:
            self.params['apix'] = float(self.stack['apix']) * int(
                self.params['bin'])
        else:
            self.params['apix'] = self.stack['apix']

        ### scale, low-pass, and high-pass filter stack ... do this with imagic, because it determines the appropriate boxsizes
        scalingbatchfile = self.createImagicBatchFileScaling()
        preptime = time.time()
        proc = subprocess.Popen("chmod 775 " + str(scalingbatchfile),
                                shell=True)
        proc.wait()
        os.chdir(self.params['rundir'])
        apParam.runCmd(scalingbatchfile, "IMAGIC")
        apIMAGIC.checkLogFileForErrors(
            os.path.join(self.params['rundir'], "prepareStack.log"))
        apDisplay.printColor(
            "finished IMAGIC in " +
            apDisplay.timeString(time.time() - preptime), "cyan")

        ### set new boxsize, done only after scaling is complete
        if self.params['bin'] is not None:
            self.params['boxsize'] = apFile.getBoxSize(
                os.path.join(self.params['rundir'], "start.hed"))[0]
        else:
            self.params['boxsize'] = self.stack['boxsize']

        ### make sure template stack boxsize matches that of the input stack
        if self.params['apix'] != self.templatestack['apix'] or self.params[
                'boxsize'] != self.templatestack['boxsize']:
            self.scaleTemplates()

        starttime = time.time()
        print self.params
        print "... stack pixel size: " + str(self.params['apix'])
        print "... stack box size: " + str(self.params['boxsize'])
        apDisplay.printColor(
            "Running IMAGIC .batch file: See multiReferenceAlignment.log file for details",
            "cyan")

        ### create IMAGIC batch file
        batchfile = self.createImagicBatchFileMRA()

        ### execute IMAGIC batch file
        aligntime0 = time.time()
        proc = subprocess.Popen("chmod 775 " + str(batchfile), shell=True)
        proc.wait()
        os.chdir(self.params['rundir'])
        apParam.runCmd(batchfile, "IMAGIC")
        apIMAGIC.checkLogFileForErrors(
            os.path.join(self.params['rundir'], "multiReferenceAlignment.log"))
        apDisplay.printColor(
            "finished IMAGIC in " +
            apDisplay.timeString(time.time() - aligntime0), "cyan")

        ### get particle parameters (shift, rotate, refnum, mirror, ccc)
        partparams = self.getParticleParams()

        ### average stack
        alignstack = os.path.join(self.params['rundir'], "alignstack.hed")
        apStack.averageStack(alignstack)

        ### normalize particles (otherwise found problems in viewing with stackviewer)
        emancmd = "proc2d " + alignstack + " " + alignstack + ".norm.hed norm"
        while os.path.isfile(alignstack + ".norm.img"):
            apFile.removeStack(alignstack + ".norm.img")
        apParam.runCmd(emancmd, "EMAN")
        os.rename(alignstack + ".norm.hed", alignstack)
        os.rename(alignstack + ".norm.img", alignstack[:-4] + ".img")

        ### normalize references
        emancmd = "proc2d " + ts + " " + ts + ".norm.hed norm"
        while os.path.isfile(ts + ".norm.img"):
            apFile.removeStack(ts + ".norm.img")
        apParam.runCmd(emancmd, "EMAN")
        os.rename(ts + ".norm.hed", ts)
        os.rename(ts + ".norm.img", ts[:-4] + ".img")

        ### remove copied stack
        while os.path.isfile(os.path.join(self.params['rundir'], "start.img")):
            apFile.removeStack(os.path.join(self.params['rundir'],
                                            "start.img"))

        ### insert run into database
        self.insertAlignmentRun(insert=True)
        self.insertParticlesIntoDatabase(partparams, insert=True)
	def start(self):
#		self.insertCL2DJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])

		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		### process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".hed")
 		if os.path.isfile(self.params['localstack']):
 			apFile.removeStack(self.params['localstack'])

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert',True)

		# clip not yet implemented
#		if self.params['clipsize'] is not None:
#			clipsize = int(self.clipsize)*self.params['bin']
#			if clipsize % 2 == 1:
#				clipsize += 1 ### making sure that clipped boxsize is even
#			a.setValue('clip',clipsize)

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		#run proc2d
		a.run()

 		if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
 			apDisplay.printError("Missing particles in stack")

		### setup Xmipp command
		aligntime = time.time()
 		xmippopts = (" -i "+os.path.join(self.params['rundir'], self.params['localstack'])
 			+" --nref "+str(self.params['numrefs'])
 			+" --iter "+str(self.params['maxiter'])
 			+" --odir "+str(self.params['rundir'])
 			+" --oroot "+ "part"+str(self.params['timestamp'])
			+" --classifyAllImages"
 		)
 
 		if self.params['correlation']:
 			xmippopts += " --distance correlation"
 		if self.params['classical']:
 			xmippopts += " --classicalMultiref"		
 
 
 		### use multi-processor command
 		apDisplay.printColor("Using "+str(self.params['nproc'])+" processors!", "green")
 		xmippexe = apParam.getExecPath(self.execFile, die=True)
 		mpiruncmd = self.mpirun+" -np "+str(self.params['nproc'])+" "+xmippexe+" "+xmippopts
 		self.writeXmippLog(mpiruncmd)
 		apParam.runCmd(mpiruncmd, package="Xmipp 3", verbose=True, showcmd=True, logfile="xmipp.std")
 		self.params['runtime'] = time.time() - aligntime
 		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(self.params['runtime']))
 
 		### post-processing
 		# Create a stack for the class averages at each level
 		Nlevels=glob.glob("level_*")
 		for level in Nlevels:
 			digits = level.split("_")[1]
 			apParam.runCmd("xmipp_image_convert -i "+level+"/part"+self.params['timestamp']+"*xmd -o part"
 						+self.params['timestamp']+"_level_"+digits+"_.hed", package="Xmipp 3", verbose=True)
 			
 		if self.params['align']:
			apParam.runCmd("xmipp_transform_geometry -i images.xmd -o %s_aligned.stk --apply_transform" % self.params['timestamp'], package="Xmipp 3", verbose=True)
 			apParam.runCmd("xmipp_image_convert -i %s_aligned.xmd -o alignedStack.hed" % self.params['timestamp'], package="Xmipp 3", verbose=True)
			apFile.removeFile("%s_aligned.xmd" % self.params['timestamp'])
			apFile.removeFile("%s_aligned.stk" % self.params['timestamp'])
 		
 		self.parseOutput()
 		apParam.dumpParameters(self.params, "cl2d-"+self.params['timestamp']+"-params.pickle")

		### upload results ... this used to be two separate operations, I'm combining into one
		self.runparams = apParam.readRunParameters("cl2d-"+self.params['timestamp']+"-params.pickle")
		self.apix = apStack.getStackPixelSizeFromStackId(self.runparams['stackid'])*self.runparams['bin']
		self.Nlevels=len(glob.glob("part"+self.params['timestamp']+"_level_??_.hed"))

		### create average of aligned stacks & insert aligned stack info
		lastLevelStack = "part"+self.params['timestamp']+"_level_%02d_.hed"%(self.Nlevels-1)
		apStack.averageStack(lastLevelStack)
		self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
		self.insertCL2DParamsIntoDatabase()
		if self.runparams['align'] is True:
			self.insertAlignStackRunIntoDatabase("alignedStack.hed")
			self.calcResolution(self.Nlevels-1)
			self.insertAlignParticlesIntoDatabase(level=self.Nlevels-1)
		
		### loop over each class average stack & insert as clustering stacks
		self.insertClusterRunIntoDatabase()
		for level in range(self.Nlevels):
			### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
			if self.params['align'] is True:
				self.calcResolution(level)
			partdict = self.getClassificationAtLevel(level)
			for classnum in partdict: 
				self.insertClusterStackIntoDatabase(
					"part"+self.params['timestamp']+"_level_%02d_.hed"%level,
					classnum+1, partdict[classnum], len(partdict))
		self.clearIntermediateFiles()
        def start(self):
                ### new stack path
                oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
                newstack = os.path.join(self.params['rundir'], self.stackdata['name'])
                apStack.checkForPreviousStack(newstack)

                ### get particles from stack
                apDisplay.printMsg("Querying stack particles")
                t0 = time.time()
                stackpartq =  appiondata.ApStackParticleData()
                stackpartq['stack'] = self.stackdata
                particles = stackpartq.query()
                apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

                ### write included particles to text file
                includeParticle = []
                excludeParticle = 0
                f = open("test.log", "w")
                count = 0
                apDisplay.printMsg("Processing stack particles")
                t0 = time.time()
                for part in particles:
                        count += 1
                        if count%500 == 0:
                                sys.stderr.write(".")
                        emanstackpartnum = part['particleNumber']-1

                        ### get euler jump data
                        jumpq = appiondata.ApEulerJumpData()
                        jumpq['particle'] = part
                        jumpq['refineRun'] = self.recondata
                        jumpdatas = jumpq.query(results=1)
                        if not jumpdatas or len(jumpdatas) < 1:
                                ### no data
                                continue
                        jumpdata = jumpdatas[0]

                        if jumpdata['median'] is None or jumpdata['median'] > self.params['maxjump']:
                                ### bad particle
                                excludeParticle += 1
                                f.write("%d\t%d\t%.1f\texclude\n"%(count, emanstackpartnum, jumpdata['median']))
                        else:
                                ### good particle
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%.1f\tinclude\n"%(count, emanstackpartnum, jumpdata['median']))
                sys.stderr.write("\n")
                apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

                f.close()
                includeParticle.sort()
                apDisplay.printMsg("Keeping "+str(len(includeParticle))
                        +" and excluding "+str(excludeParticle)+" particles")

                #print includeParticle

                ### write kept particles to file
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
                kf = open(self.params['keepfile'], "w")
                for partnum in includeParticle:
                        kf.write(str(partnum)+"\n")
                kf.close()

                ### get number of particles
                numparticles = len(includeParticle)
                self.params['description'] += ( " ... %d no jumpers substack" % (numparticles,))

                ### create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)

                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")

                apStack.averageStack(stack=newstack)
                if self.params['commit'] is True:
                        apStack.commitSubStack(self.params)
                        newstackid = apStack.getStackIdFromPath(newstack)
                        apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
    def start(self):
        #old stack size
        stacksize = apStack.getNumberStackParticlesFromId(
            self.params['stackid'])

        # if exclude or include lists are not defined...
        if self.params['exclude'] is None and self.params['include'] is None:
            # if first and last are specified, create a file
            if self.params['first'] is not None and self.params[
                    'last'] is not None:
                stp = str(self.params['first'])
                enp = str(self.params['last'])
                fname = 'sub' + str(
                    self.params['stackid']) + '_' + stp + '-' + enp + '.lst'
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                apDisplay.printMsg("Creating keep list: " +
                                   self.params['keepfile'])
                f = open(self.params['keepfile'], 'w')
                for i in range(self.params['first'], self.params['last'] + 1):
                    f.write('%d\n' % (int(i) - 1))
                f.close()
                # generate the random list by giving number and create the file
            elif self.params['random'] is not None:
                #numOfRandomParticles = str(self.params['random'])
                #fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst'
                fname = "random%d_%d.lst" % (self.params['stackid'],
                                             self.params['random'])
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                apDisplay.printMsg("Creating keep list: " +
                                   self.params['keepfile'])
                # create a file
                f = open(self.params['keepfile'], 'w')
                # generate a random sequence by giving size
                randomList = random.sample(xrange(self.params['last']),
                                           self.params['random'])
                randomList.sort()
                for partnum in randomList:
                    f.write('%d\n' % partnum)
                f.close()

            # if splitting, create files containing the split values
            elif self.params['split'] > 1:
                for i in range(self.params['split']):
                    fname = 'sub' + str(
                        self.params['stackid']) + '.' + str(i + 1) + '.lst'
                    self.params['keepfile'] = os.path.join(
                        self.params['rundir'], fname)
                    apDisplay.printMsg("Creating keep list: " +
                                       self.params['keepfile'])
                    f = open(self.params['keepfile'], 'w')
                    for p in range(stacksize):
                        if (p % self.params['split']) - i == 0:
                            f.write('%i\n' % p)
                    f.close()

            # if exclude-from option is specified, convert particles to exclude
            elif self.params['excludefile'] is True:
                oldkf = open(self.params['keepfile'])
                partlist = []
                for line in oldkf:
                    particle = line.strip()
                    try:
                        particle = int(particle)
                    except:
                        continue
                    partlist.append(particle)
                oldkf.close()
                # create new list excluding the particles
                apDisplay.printMsg("Converting keep file to exclude file")
                newkeepfile = "tmpnewkeepfile.txt"
                newkf = open(newkeepfile, 'w')
                for p in range(stacksize):
                    if p not in partlist:
                        newkf.write("%i\n" % p)
                newkf.close()
                self.params['keepfile'] = os.path.abspath(newkeepfile)

            # otherwise, just copy the file
            elif not os.path.isfile(os.path.basename(self.params['keepfile'])):
                shutil.copy(self.params['keepfile'],
                            os.path.basename(self.params['keepfile']))

        # if either exclude or include lists is defined
        elif self.params['exclude'] or self.params['include']:

            ### list of particles to be excluded
            excludelist = []
            if self.params['exclude'] is not None:
                excludestrlist = self.params['exclude'].split(",")
                for excld in excludestrlist:
                    excludelist.append(int(excld.strip()))
            apDisplay.printMsg("Exclude list: " + str(excludelist))

            ### list of particles to be included
            includelist = []
            if self.params['include'] is not None:
                includestrlist = self.params['include'].split(",")
                for incld in includestrlist:
                    includelist.append(int(incld.strip()))
            apDisplay.printMsg("Include list: " + str(includelist))

        #new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        newname = stackdata['name']

        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

        #if include or exclude list is given...
        if self.params['include'] is not None or self.params[
                'exclude'] is not None:

            includeParticle = []
            excludeParticle = 0

            for partnum in range(stacksize):
                if includelist and partnum in includelist:
                    includeParticle.append(partnum)
                elif excludelist and not partnum in excludelist:
                    includeParticle.append(partnum)
                else:
                    excludeParticle += 1
            includeParticle.sort()

            ### write kept particles to file
            self.params['keepfile'] = os.path.join(
                self.params['rundir'], "keepfile-" + self.timestamp + ".list")
            apDisplay.printMsg("writing to keepfile " +
                               self.params['keepfile'])
            kf = open(self.params['keepfile'], "w")
            for partnum in includeParticle:
                kf.write(str(partnum) + "\n")
            kf.close()

            #get number of particles
            numparticles = len(includeParticle)
            if excludelist:
                self.params['description'] += (
                    " ... %d particle substack of stackid %d" %
                    (numparticles, self.params['stackid']))
            elif includelist:
                self.params['description'] += (
                    " ... %d particle substack of stackid %d" %
                    (numparticles, self.params['stackid']))

        ogdescr = self.params['description']
        for i in range(self.params['split']):
            ### always do this, if not splitting split=1
            sb = os.path.splitext(stackdata['name'])
            if self.params['first'] is not None and self.params[
                    'last'] is not None:
                newname = sb[0] + '.' + str(self.params['first']) + '-' + str(
                    self.params['last']) + sb[-1]
            elif self.params['random'] is not None:
                newname = "%s-random%d%s" % (sb[0], self.params['random'],
                                             sb[-1])
            elif self.params['split'] > 1:
                fname = 'sub' + str(
                    self.params['stackid']) + '.' + str(i + 1) + '.lst'
                self.params['keepfile'] = os.path.join(self.params['rundir'],
                                                       fname)
                newname = sb[0] + '.' + str(i + 1) + 'of' + str(
                    self.params['split']) + sb[-1]
            newstack = os.path.join(self.params['rundir'], newname)
            apStack.checkForPreviousStack(newstack)

            #get number of particles
            f = open(self.params['keepfile'], "r")
            numparticles = len(f.readlines())
            f.close()
            self.params['description'] = ogdescr
            self.params['description'] += (
                (" ... %d particle substack of stackid %d" %
                 (numparticles, self.params['stackid'])))
            #if splitting, add to description
            if self.params['split'] > 1:
                self.params['description'] += (" (%i of %i)" %
                                               (i + 1, self.params['split']))

            #create the new sub stack
            if not self.params['correctbeamtilt']:
                apStack.makeNewStack(oldstack,
                                     newstack,
                                     self.params['keepfile'],
                                     bad=True)
            else:
                apBeamTilt.makeCorrectionStack(self.params['stackid'],
                                               oldstack, newstack)
            if not os.path.isfile(newstack):
                apDisplay.printError("No stack was created")
            apStack.commitSubStack(self.params, newname, sorted=False)
            apStack.averageStack(stack=newstack)
            newstackid = apStack.getStackIdFromPath(newstack)
            if self.params['meanplot'] is True:
                apDisplay.printMsg(
                    "creating Stack Mean Plot montage for stackid")
                apStackMeanPlot.makeStackMeanPlot(newstackid)
        def start(self):
                ### new stack path
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                newstack = os.path.join(self.params['rundir'], stackdata['name'])
                apStack.checkForPreviousStack(newstack)

                includelist = []
                excludelist = []
                ### list of classes to be excluded
                if self.params['dropclasslist'] is not None:
                        excludestrlist = self.params['dropclasslist'].split(",")
                        for excludeitem in excludestrlist:
                                excludelist.append(int(excludeitem.strip()))
                apDisplay.printMsg("Exclude list: "+str(excludelist))

                ### list of classes to be included
                if self.params['keepclasslist'] is not None:
                        includestrlist = self.params['keepclasslist'].split(",")
                        for includeitem in includestrlist:
                                includelist.append(int(includeitem.strip()))

                ### or read from keepfile
                elif self.params['keepfile'] is not None:
                        keeplistfile = open(self.params['keepfile'])
                        for line in keeplistfile:
                                if self.params['excludefrom'] is True:
                                        excludelist.append(int(line.strip()))
                                else:
                                        includelist.append(int(line.strip()))
                        keeplistfile.close()
                apDisplay.printMsg("Include list: "+str(includelist))

                ### get particles from align or cluster stack
                apDisplay.printMsg("Querying database for particles")
                q0 = time.time()
                if self.params['alignid'] is not None:
                        alignpartq =  appiondata.ApAlignParticleData()
                        alignpartq['alignstack'] = self.alignstackdata
                        particles = alignpartq.query()
                elif self.params['clusterid'] is not None:
                        clusterpartq = appiondata.ApClusteringParticleData()
                        clusterpartq['clusterstack'] = self.clusterstackdata
                        particles = clusterpartq.query()
                apDisplay.printMsg("Complete in "+apDisplay.timeString(time.time()-q0))

                ### write included particles to text file
                includeParticle = []
                excludeParticle = 0
                badscore = 0
                badshift = 0
                badspread = 0
                f = open("test.log", "w")
                count = 0
                for part in particles:
                        count += 1
                        #partnum = part['partnum']-1
                        if 'alignparticle' in part:
                                alignpart = part['alignparticle']
                                classnum = int(part['refnum'])-1
                        else:
                                alignpart = part
                                classnum = int(part['ref']['refnum'])-1
                        emanstackpartnum = alignpart['stackpart']['particleNumber']-1

                        ### check shift
                        if self.params['maxshift'] is not None:
                                shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
                                if shift > self.params['maxshift']:
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badshift += 1
                                        continue


                        if self.params['minscore'] is not None:
                                ### check score
                                if ( alignpart['score'] is not None
                                 and alignpart['score'] < self.params['minscore'] ):
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badscore += 1
                                        continue

                                ### check spread
                                if ( alignpart['spread'] is not None
                                 and alignpart['spread'] < self.params['minscore'] ):
                                        excludeParticle += 1
                                        f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                                        badspread += 1
                                        continue

                        if includelist and classnum in includelist:
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
                        elif excludelist and not classnum in excludelist:
                                includeParticle.append(emanstackpartnum)
                                f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
                        else:
                                excludeParticle += 1
                                f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))

                f.close()
                includeParticle.sort()
                if badshift > 0:
                        apDisplay.printMsg("%d paricles had a large shift"%(badshift))
                if badscore > 0:
                        apDisplay.printMsg("%d paricles had a low score"%(badscore))
                if badspread > 0:
                        apDisplay.printMsg("%d paricles had a low spread"%(badspread))
                apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

                #print includeParticle

                ### write kept particles to file
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
                kf = open(self.params['keepfile'], "w")
                for partnum in includeParticle:
                        kf.write(str(partnum)+"\n")
                kf.close()

                ### get number of particles
                numparticles = len(includeParticle)
                if excludelist:
                        self.params['description'] += ( " ... %d particle substack with %s classes excluded"
                                % (numparticles, self.params['dropclasslist']))
                elif includelist:
                        self.params['description'] += ( " ... %d particle substack with %s classes included"
                                % (numparticles, self.params['keepclasslist']))

                ### create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])

                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")

                apStack.averageStack(stack=newstack)
                if self.params['commit'] is True:
                        apStack.commitSubStack(self.params)
                        newstackid = apStack.getStackIdFromPath(newstack)
                        apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
Exemple #36
0
    def start(self):
        self.params['output_fileformat'] = 'mrc'
        newstackname = 'framealigned.hed'
        stackdata = apStack.getStackParticlesFromId(self.params['stackid'])
        stackrundata = apStack.getOnlyStackData(self.params['stackid'])
        apix = stackrundata['pixelsize'] * 1e10
        kev = stackdata[0]['particle']['image']['scope']['high tension'] / 1000
        origstackpath = os.path.join(stackrundata['path']['path'],
                                     stackrundata['name'])
        boxsize = stackdata[0]['stackRun']['stackParams']['boxSize']
        binning = stackdata[0]['stackRun']['stackParams']['bin']

        #determine camera type
        cameratype = stackdata[0]['particle']['image']['camera']['ccdcamera'][
            'name']
        if self.params['override_camera'] is not None:
            cameratype = self.params['override_camera']

        #create sorted boxfiles
        imagedict = {}
        masterlist = []
        for particle in stackdata:
            parentimage = particle['particle']['image']['filename']
            if parentimage in imagedict.keys():
                imagedict[parentimage].append(particle['particle'])
            else:
                imagedict[parentimage] = []
                imagedict[parentimage].append(particle['particle'])
            index = len(imagedict[parentimage]) - 1
            masterlist.append({
                'particle': particle,
                'key': parentimage,
                'index': index
            })
        #print masterlist

        for key in imagedict:
            particlelst = imagedict[key]
            parentimage = key
            framespath = particlelst[0]['image']['session']['frame path']

            print cameratype
            if 'Gatan' in cameratype:
                #prepare frames
                print framespath

                #prepare frame directory
                framespathname = os.path.join(self.params['rundir'],
                                              parentimage + '.frames')
                if os.path.exists(framespathname):
                    pass
                else:
                    os.mkdir(framespathname)
                print framespathname

                mrcframestackname = parentimage + '.frames.mrc'

                print mrcframestackname

                nframes = particlelst[0]['image']['camera']['nframes']

                print "Extracting frames for", mrcframestackname
                for n in range(nframes):
                    a = mrc.read(os.path.join(framespath, mrcframestackname),
                                 n)
                    numpil.write(a,
                                 imfile=os.path.join(framespathname,
                                                     'RawImage_%d.tif' % (n)),
                                 format='tiff')

            elif 'DE' in cameratype:
                framespathname = os.path.join(framespath,
                                              parentimage + '.frames')

            print os.getcwd()
            print framespathname
            #generate DE script call
            if os.path.exists(framespathname):
                print "found frames for", parentimage

                nframes = particlelst[0]['image']['camera']['nframes']
                boxname = parentimage + '.box'
                boxpath = os.path.join(framespathname, boxname)
                shiftdata = {'scale': 1, 'shiftx': 0, 'shifty': 0}

                #flatfield references
                brightrefpath = particlelst[0]['image']['bright']['session'][
                    'image path']
                brightrefname = particlelst[0]['image']['bright']['filename']
                brightnframes = particlelst[0]['image']['bright']['camera'][
                    'nframes']
                darkrefpath = particlelst[0]['image']['dark']['session'][
                    'image path']
                darkrefname = particlelst[0]['image']['dark']['filename']
                darknframes = particlelst[0]['image']['dark']['camera'][
                    'nframes']
                brightref = os.path.join(brightrefpath, brightrefname + '.mrc')
                darkref = os.path.join(darkrefpath, darkrefname + '.mrc')
                print brightref
                print darkref
                apBoxer.processParticleData(particle['particle']['image'],
                                            boxsize, particlelst, shiftdata,
                                            boxpath)
                print framespathname

                #set appion specific options
                self.params['gainreference_filename'] = brightref
                self.params['gainreference_framecount'] = brightnframes
                self.params['darkreference_filename'] = darkref
                self.params['darkreference_framecount'] = darknframes
                self.params['input_framecount'] = nframes
                self.params['boxes_fromfiles'] = 1
                #self.params['run_verbosity']=3
                self.params['output_invert'] = 0
                #self.params['radiationdamage_apix=']=apix
                self.params['radiationdamage_voltage'] = kev
                #self.params['boxes_boxsize']=boxsize

                outpath = os.path.join(self.params['rundir'], key)
                if os.path.exists(outpath):
                    shutil.rmtree(outpath)
                os.mkdir(outpath)

                command = ['deProcessFrames.py']
                keys = self.params.keys()
                keys.sort()
                for key in keys:
                    param = self.params[key]
                    #print key, param, type(param)
                    if param == None or param == '':
                        pass
                    else:
                        option = '--%s=%s' % (key, param)
                        command.append(option)
                command.append(outpath)
                command.append(framespathname)
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #recreate particle stack
        for n, particledict in enumerate(masterlist):
            parentimage = particledict['key']
            correctedpath = os.path.join(self.params['rundir'], parentimage)
            print correctedpath
            if os.path.exists(correctedpath):

                correctedparticle = glob.glob(
                    os.path.join(correctedpath,
                                 ('%s.*.region_%03d.*' %
                                  (parentimage, particledict['index']))))
                print os.path.join(correctedpath,
                                   ('%s.*.region_%03d.*' %
                                    (parentimage, particledict['index'])))
                print correctedparticle
                #sys.exit()
                command = ['proc2d', correctedparticle[0], newstackname]
                if self.params['output_rotation'] != 0:
                    command.append('rot=%d' % self.params['output_rotation'])

                if self.params['show_DE_command'] is True:
                    print command
                subprocess.call(command)
            else:
                print "did not find frames for ", parentimage
                command = [
                    'proc2d', origstackpath, newstackname, ('first=%d' % n),
                    ('last=%d' % n)
                ]
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #upload stack

        #make keep file
        self.params['keepfile'] = 'keepfile.txt'
        f = open(self.params['keepfile'], 'w')
        for n in range(len(masterlist)):
            f.write('%d\n' % (n))
        f.close()

        apStack.commitSubStack(self.params, newname=newstackname)
        apStack.averageStack(stack=newstackname)

        print "Done!!!!"
    stackpath = stackdata['path']['path']
    # generate stack if it doesn't exist.
    if not os.path.isdir(stackpath):
        os.makedirs(stackpath)
    fname = os.path.join(stackpath, stackdata['name'])

    # check if stack file already exists
    if os.path.isfile(fname):
        apDisplay.printError("file: '%s' already exists" % fname)

    vstackdata = apStack.getVirtualStackParticlesFromId(params['stackid'])
    plist = [int(p['particleNumber']) - 1 for p in vstackdata['particles']]

    a = proc2dLib.RunProc2d()
    a.setValue('infile', vstackdata['filename'])
    a.setValue('outfile', fname)
    a.setValue('list', plist)
    a.setValue('apix', apStack.getStackPixelSizeFromStackId(params['stackid']))

    apDisplay.printMsg("generating stack: '%s' with %i particles" %
                       (fname, len(plist)))
    a.run()

    outavg = os.path.join(stackpath, "average.mrc")
    if not os.path.isfile(outavg):
        apStack.averageStack(stack=fname, outfile=outavg)

    montageimg = os.path.join(stackpath, "montage%i.png" % params['stackid'])
    if not os.path.isfile(montageimg):
        apStackMeanPlot.makeStackMeanPlot(params['stackid'], gridpoints=4)
    stackdata = apStack.getOnlyStackData(params["stackid"])
    stackpath = stackdata["path"]["path"]
    # generate stack if it doesn't exist.
    if not os.path.isdir(stackpath):
        os.makedirs(stackpath)
    fname = os.path.join(stackpath, stackdata["name"])

    # check if stack file already exists
    if os.path.isfile(fname):
        apDisplay.printError("file: '%s' already exists" % fname)

    vstackdata = apStack.getVirtualStackParticlesFromId(params["stackid"])
    plist = [int(p["particleNumber"]) - 1 for p in vstackdata["particles"]]

    a = proc2dLib.RunProc2d()
    a.setValue("infile", vstackdata["filename"])
    a.setValue("outfile", fname)
    a.setValue("list", plist)
    a.setValue("apix", apStack.getStackPixelSizeFromStackId(params["stackid"]))

    apDisplay.printMsg("generating stack: '%s' with %i particles" % (fname, len(plist)))
    a.run()

    outavg = os.path.join(stackpath, "average.mrc")
    if not os.path.isfile(outavg):
        apStack.averageStack(stack=fname, outfile=outavg)

    montageimg = os.path.join(stackpath, "montage%i.png" % params["stackid"])
    if not os.path.isfile(montageimg):
        apStackMeanPlot.makeStackMeanPlot(params["stackid"], gridpoints=4)
Exemple #39
0
    def start(self):
        partdict = {}
        partlist = []
        ### get Euler angles for each particle
        for iternum in self.iternums:
            ### get recon iter data
            reconiterq = appiondata.ApRefineIterData()
            reconiterq['refineRun'] = self.reconrundata
            reconiterq['iteration'] = iternum
            reconiterdata = reconiterq.query(
                results=1)[0]  #this should be unique

            ### get particle data
            reconpartq = appiondata.ApRefineParticleData()
            reconpartq['refineIter'] = reconiterdata
            apDisplay.printMsg("Querying for particles at " + time.asctime())
            reconpartdatas = reconpartq.query()

            ### group particle data
            for partdata in reconpartdatas:
                partnum = partdata['particle']['particleNumber']
                if not partnum in partlist:
                    partlist.append(partnum)
                partdict[(partnum, iternum)] = partdata

        ### run through particles and check Euler angles
        partlist.sort()
        eulerdict = {}
        eulercount = {}
        reject = 0
        for partnum in partlist:
            e1d = {}
            e2d = {}
            for iternum in self.iternums:
                if not (partnum, iternum) in partdict:
                    continue
                partdata = partdict[(partnum, iternum)]
                euler1 = "%.2f" % (partdata['euler1'])
                if not euler1 in e1d:
                    e1d[euler1] = 1
                else:
                    e1d[euler1] += 1
                euler2 = "%.2f" % (partdata['euler2'])
                if not euler2 in e2d:
                    e2d[euler2] = 1
                else:
                    e2d[euler2] += 1
                #print partnum, euler1, euler2
            counts = [(val, key) for key, val in e1d.items()]
            e1count, euler1 = max(counts)
            counts = [(val, key) for key, val in e2d.items()]
            e2count, euler2 = max(counts)

            # reject indeterminant particles
            if e2count < 2 or e1count < 2:
                reject += 1
                continue

            ### group particles by their Euler angles
            if not (euler1, euler2) in eulerdict:
                eulerdict[(euler1, euler2)] = []
                eulercount[(euler1, euler2)] = 0
            eulerdict[(euler1, euler2)].append(partnum)
            eulercount[(euler1, euler2)] += 1

        print "Rejected %d particles" % (reject)

        values = eulercount.values()
        values.sort()
        print values

        ### run through Euler angles and count particles
        counts = [(val, key) for key, val in eulercount.items()]
        mincount, val = min(counts)
        self.params['mincount'] = max(self.params['mincount'], mincount)
        #print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist))
        print "Keeping %d of %d particles" % (
            self.params['mincount'] * len(eulercount.keys()), len(partlist))

        keeplist = []
        for key in eulerdict.keys():
            eulerpartlist = eulerdict[key]
            if len(partlist) < self.params['mincount']:
                keeplist.extend(eulerpartlist)
            else:
                keeplist.extend(eulerpartlist[:self.params['mincount']])
        keeplist.sort()
        print "Keeping %d of %d particles" % (len(keeplist), len(partlist))

        #need to set keepfile for commitSubStack
        self.params['keepfile'] = os.path.join(self.params['rundir'],
                                               "equalviews.lst")
        f = open(self.params['keepfile'], "w")
        for partnum in keeplist:
            f.write("%d\n" % (partnum - 1))
        f.close()

        ### make a new stack using the keep particles
        oldstackdata = self.reconrundata['stack']

        oldstack = os.path.join(oldstackdata['path']['path'],
                                oldstackdata['name'])
        newstack = os.path.join(self.params['rundir'], "start.hed")
        apStack.makeNewStack(oldstack,
                             newstack,
                             listfile=self.params['keepfile'],
                             remove=True,
                             bad=True)
        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")
        self.params[
            'stackid'] = oldstackdata.dbid  #need to set stackid for commitSubStack
        apStack.commitSubStack(self.params, "start.hed")
        apStack.averageStack(stack=newstack)
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params["stackid"])
        oldstack = os.path.join(stackdata["path"]["path"], stackdata["name"])
        newstack = os.path.join(self.params["rundir"], stackdata["name"])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params["dropclasslist"] is not None:
            excludestrlist = self.params["dropclasslist"].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params["keepclasslist"] is not None:
            includestrlist = self.params["keepclasslist"].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

                ### or read from keepfile
        elif self.params["keepfile"] is not None:
            keeplistfile = open(self.params["keepfile"])
            for line in keeplistfile:
                if self.params["excludefrom"] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()

        if self.params["alignid"] is not None:
            # DIRECT SQL STUFF
            sqlcmd = (
                "SELECT "
                + "apd.partnum, "
                + "apd.xshift, apd.yshift, "
                + "apd.rotation, apd.mirror, "
                + "apd.spread, apd.correlation, "
                + "apd.score, apd.bad, "
                + "spd.particleNumber, "
                + "ard.refnum "
                + "FROM ApAlignParticleData apd "
                + "LEFT JOIN ApStackParticleData spd ON "
                + "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) "
                + "LEFT JOIN ApAlignReferenceData ard ON"
                + "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) "
                + "WHERE `REF|ApAlignStackData|alignstack` = %i" % (self.params["alignid"])
            )
            # These are AlignParticles
            particles = sinedon.directq.complexMysqlQuery("appiondata", sqlcmd)

        elif self.params["clusterid"] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq["clusterstack"] = self.clusterstackdata
            # These are ClusteringParticles
            particles = clusterpartq.query()
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - q0)))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0

        f = open("test.log", "w")
        count = 0
        t0 = time.time()
        apDisplay.printMsg("Parsing particle information")

        # find out if there is alignparticle info:
        is_cluster_p = False
        # alignparticle is a key of any particle in particles if the latter is
        # a CluateringParticle
        if "alignparticle" in particles[0]:
            is_cluster_p = True

        for part in particles:
            count += 1
            if is_cluster_p:
                # alignpart is an item of ClusteringParticle
                alignpart = part["alignparticle"]
                try:
                    classnum = int(part["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                emanstackpartnum = alignpart["stackpart"]["particleNumber"] - 1
            else:
                # particle has info from AlignedParticle as results of direct query
                alignpart = part
                try:
                    classnum = int(alignpart["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                    classnum = None
                emanstackpartnum = int(alignpart["particleNumber"]) - 1

                ### check shift
            if self.params["maxshift"] is not None:
                shift = math.hypot(alignpart["xshift"], alignpart["yshift"])
                if shift > self.params["maxshift"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badshift += 1
                    continue

            if self.params["minscore"] is not None:
                ### check score
                if alignpart["score"] is not None and alignpart["score"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badscore += 1
                    continue

                    ### check spread
                if alignpart["spread"] is not None and alignpart["spread"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badspread += 1
                    continue

            if classnum is not None:
                if includelist and (classnum in includelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                elif excludelist and not (classnum in excludelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                else:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))

        f.close()

        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - t0)))
        apDisplay.printMsg(
            "Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles"
        )

        ### write kept particles to file
        self.params["keepfile"] = os.path.join(self.params["rundir"], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params["keepfile"])
        kf = open(self.params["keepfile"], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params["description"] += " ... %d particle substack with %s classes excluded" % (
                numparticles,
                self.params["dropclasslist"],
            )
        elif includelist:
            self.params["description"] += " ... %d particle substack with %s classes included" % (
                numparticles,
                self.params["keepclasslist"],
            )

        outavg = os.path.join(self.params["rundir"], "average.mrc")

        ### create the new sub stack
        # first check if virtual stack
        if not os.path.isfile(oldstack):
            vstackdata = apStack.getVirtualStackParticlesFromId(self.params["stackid"])
            vparts = vstackdata["particles"]
            oldstack = vstackdata["filename"]
            # get subset of virtualstack
            vpartlist = [int(vparts[p]["particleNumber"]) - 1 for p in includeParticle]

            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params["savebad"])

            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=vpartlist)
        else:
            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, self.params["keepfile"], bad=self.params["savebad"])
            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=includeParticle)

        if self.params["writefile"] is True and not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        if self.params["commit"] is True:
            apStack.commitSubStack(self.params, included=includeParticle)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
	def start(self):
		partdict = {}
		partlist = []
		### get Euler angles for each particle
		for iternum in self.iternums:
			### get recon iter data
			reconiterq = appiondata.ApRefineIterData()
			reconiterq['refineRun'] = self.reconrundata
			reconiterq['iteration'] = iternum
			reconiterdata = reconiterq.query(results=1)[0] #this should be unique

			### get particle data
			reconpartq = appiondata.ApRefineParticleData()
			reconpartq['refineIter'] = reconiterdata
			apDisplay.printMsg("Querying for particles at "+time.asctime())
			reconpartdatas = reconpartq.query()

			### group particle data
			for partdata in reconpartdatas:
				partnum = partdata['particle']['particleNumber']
				if not partnum in partlist:
					partlist.append(partnum)
				partdict[(partnum, iternum)] = partdata

		### run through particles and check Euler angles
		partlist.sort()
		eulerdict = {}
		eulercount = {}
		reject = 0
		for partnum in partlist:
			e1d = {}
			e2d = {}
			for iternum in self.iternums:
				if not (partnum, iternum) in partdict:
					continue
				partdata = partdict[(partnum, iternum)]
				euler1 = "%.2f"%(partdata['euler1'])
				if not euler1 in e1d:
					e1d[euler1] = 1
				else:
					e1d[euler1] += 1
				euler2 = "%.2f"%(partdata['euler2'])
				if not euler2 in e2d:
					e2d[euler2] = 1
				else:
					e2d[euler2] += 1
				#print partnum, euler1, euler2
			counts = [(val,key) for key,val in e1d.items()]
			e1count, euler1 = max(counts)
			counts = [(val,key) for key,val in e2d.items()]
			e2count, euler2 = max(counts)

			# reject indeterminant particles
			if e2count < 2 or e1count < 2:
				reject += 1
				continue

			### group particles by their Euler angles
			if not (euler1,euler2) in eulerdict:
				eulerdict[(euler1,euler2)] = []
				eulercount[(euler1,euler2)] = 0
			eulerdict[(euler1,euler2)].append(partnum)
			eulercount[(euler1,euler2)] += 1

		print "Rejected %d particles"%(reject)

		values = eulercount.values()
		values.sort()
		print values

		### run through Euler angles and count particles
		counts = [(val,key) for key,val in eulercount.items()]
		mincount, val = min(counts)
		self.params['mincount'] = max(self.params['mincount'], mincount)
		#print "Keeping %d of %d particles"%(mincount*len(eulercount.keys()), len(partlist))
		print "Keeping %d of %d particles"%(self.params['mincount']*len(eulercount.keys()), len(partlist))

		keeplist = []
		for key in eulerdict.keys():
			eulerpartlist = eulerdict[key]
			if len(partlist) < self.params['mincount']:
				keeplist.extend(eulerpartlist)
			else:
				keeplist.extend(eulerpartlist[:self.params['mincount']])
		keeplist.sort()
		print "Keeping %d of %d particles"%(len(keeplist), len(partlist))

		#need to set keepfile for commitSubStack
		self.params['keepfile'] = os.path.join(self.params['rundir'], "equalviews.lst")
		f = open(self.params['keepfile'], "w")
		for partnum in keeplist:
			f.write("%d\n"%(partnum-1))
		f.close()

		### make a new stack using the keep particles
		oldstackdata = self.reconrundata['stack']

		oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name'])
		newstack = os.path.join(self.params['rundir'], "start.hed")
		apStack.makeNewStack(oldstack, newstack, listfile=self.params['keepfile'], remove=True, bad=True)
		if not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")
		self.params['stackid'] = oldstackdata.dbid #need to set stackid for commitSubStack
		apStack.commitSubStack(self.params, "start.hed")
		apStack.averageStack(stack=newstack)
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		newstack = os.path.join(self.params['rundir'], stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		includelist = []
		excludelist = []
		### list of classes to be excluded
		if self.params['dropclasslist'] is not None:
			excludestrlist = self.params['dropclasslist'].split(",")
			for excludeitem in excludestrlist:
				excludelist.append(int(excludeitem.strip()))
		apDisplay.printMsg("Exclude list: "+str(excludelist))

		### list of classes to be included
		if self.params['keepclasslist'] is not None:
			includestrlist = self.params['keepclasslist'].split(",")
			for includeitem in includestrlist:
				includelist.append(int(includeitem.strip()))

		### or read from keepfile
		elif self.params['keepfile'] is not None:
			keeplistfile = open(self.params['keepfile'])
			for line in keeplistfile:
				if self.params['excludefrom'] is True:
					excludelist.append(int(line.strip()))
				else:
					includelist.append(int(line.strip()))
			keeplistfile.close()
		apDisplay.printMsg("Include list: "+str(includelist))

		### get particles from align or cluster stack
		apDisplay.printMsg("Querying database for particles")
		q0 = time.time()

		if self.params['alignid'] is not None:
			# DIRECT SQL STUFF
			sqlcmd = "SELECT " + \
				"apd.partnum, " + \
				"apd.xshift, apd.yshift, " + \
				"apd.rotation, apd.mirror, " + \
				"apd.spread, apd.correlation, " + \
				"apd.score, apd.bad, " + \
				"spd.particleNumber, " + \
				"ard.refnum "+ \
				"FROM ApAlignParticleData apd " + \
				"LEFT JOIN ApStackParticleData spd ON " + \
				"(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \
				"LEFT JOIN ApAlignReferenceData ard ON" + \
				"(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \
				"WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid'])
			# These are AlignParticles
			particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)

		elif self.params['clusterid'] is not None:
			clusterpartq = appiondata.ApClusteringParticleData()
			clusterpartq['clusterstack'] = self.clusterstackdata
			# These are ClusteringParticles
			particles = clusterpartq.query()
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0)))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		badscore = 0
		badshift = 0
		badspread = 0

		f = open("test.log", "w")
		count = 0
		t0 = time.time()
		apDisplay.printMsg("Parsing particle information")

		# find out if there is alignparticle info:
		is_cluster_p = False
		# alignparticle is a key of any particle in particles if the latter is
		# a CluateringParticle
		if 'alignparticle' in particles[0]:
			is_cluster_p = True

		for part in particles:
			count += 1
			if is_cluster_p:
				# alignpart is an item of ClusteringParticle
				alignpart = part['alignparticle']
				try:
					classnum = int(part['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
				emanstackpartnum = alignpart['stackpart']['particleNumber']-1
			else:
				# particle has info from AlignedParticle as results of direct query
				alignpart = part
				try:
					classnum = int(alignpart['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
					classnum = None
				emanstackpartnum = int(alignpart['particleNumber'])-1

			### check shift
			if self.params['maxshift'] is not None:
				shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
				if shift > self.params['maxshift']:
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badshift += 1
					continue

			if self.params['minscore'] is not None:
				### check score
				if ( alignpart['score'] is not None
				 and alignpart['score'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badscore += 1
					continue

				### check spread
				if ( alignpart['spread'] is not None
				 and alignpart['spread'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badspread += 1
					continue

			if classnum is not None:
				if includelist and (classnum in includelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				elif excludelist and not (classnum in excludelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				else:
					excludeParticle += 1
					f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
			else:
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
			
		f.close()

		includeParticle.sort()
		if badshift > 0:
			apDisplay.printMsg("%d paricles had a large shift"%(badshift))
		if badscore > 0:
			apDisplay.printMsg("%d paricles had a low score"%(badscore))
		if badspread > 0:
			apDisplay.printMsg("%d paricles had a low spread"%(badspread))
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0)))
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		if excludelist:
			self.params['description'] += ( " ... %d particle substack with %s classes excluded"
				% (numparticles, self.params['dropclasslist']))
		elif includelist:
			self.params['description'] += ( " ... %d particle substack with %s classes included"
				% (numparticles, self.params['keepclasslist']))

		outavg = os.path.join(self.params['rundir'],"average.mrc")

		### create the new sub stack
		# first check if virtual stack
		if not os.path.isfile(oldstack):
			vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			vparts = vstackdata['particles']
			oldstack = vstackdata['filename']
			# get subset of virtualstack
			vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle]
	
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad'])

			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist)
		else:
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])
			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle)

		if self.params['writefile'] is True and not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		if self.params['commit'] is True:
			apStack.commitSubStack(self.params,included=includeParticle)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
        def commitToDatabase(self):
                """
                insert the results into the database
                """
                ### expected result for an alignment run:
                ### 1. aligned particle stack in IMAGIC
                ### 2. rotation, shift, and quality parameters for each particle
                ### 3. which particles belongs to which class
                ### 4. stack file with the class averages
                
                alignedstack = os.path.join(self.params['rundir'], "ptcl.hed")
                refstack = os.path.join(self.params['rundir'], "iter.final.hed")
                averagemrc = os.path.join(self.params['rundir'], "average.mrc")
                apStack.averageStack(alignedstack, averagemrc)
                particlemapping = self.determineClassOwnership()

                ### setup alignment run
                alignrunq = appiondata.ApAlignRunData()
                alignrunq['runname'] = self.params['runname']
                alignrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                uniquerun = alignrunq.query(results=1)
                if uniquerun:
                        apDisplay.printError("Run name '"+runparams['runname']
                                +"' and path already exisclassmappingt in database")

                ### setup eman refine2d run
                emanrefinetwodq = appiondata.ApEMANRefine2dRunData()
                emanrefinetwodq['runname'] = self.params['runname']
                emanrefinetwodq['run_seconds'] = time.time() - self.t0
                emanrefinetwodq['num_iters'] = self.params['numiter']
                emanrefinetwodq['num_classes'] = self.params['numclasses']

                ### finish alignment run
                alignrunq['refine2drun'] = emanrefinetwodq
                alignrunq['hidden'] = False
                alignrunq['runname'] = self.params['runname']
                alignrunq['description'] = self.params['description']
                alignrunq['lp_filt'] = self.params['lowpass']
                alignrunq['hp_filt'] = self.params['highpass']
                alignrunq['bin'] = self.params['bin']

                ### setup alignment stackalignimagicfile
                alignstackq = appiondata.ApAlignStackData()
                alignstackq['imagicfile'] = os.path.basename(alignedstack)
                alignstackq['avgmrcfile'] = os.path.basename(averagemrc)
                alignstackq['refstackfile'] = os.path.basename(refstack)
                alignstackq['iteration'] = self.params['numiter']
                alignstackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                alignstackq['alignrun'] = alignrunq

                ### check to make sure files exist
                alignimagicfilepath = os.path.join(self.params['rundir'], alignstackq['imagicfile'])
                if not os.path.isfile(alignimagicfilepath):
                        apDisplay.printError("could not find stack file: "+alignimagicfilepath)
                avgmrcfile = os.path.join(self.params['rundir'], alignstackq['avgmrcfile'])
                if not os.path.isfile(avgmrcfile):
                        apDisplay.printError("could not find average mrc file: "+avgmrcfile)
                refstackfile = os.path.join(self.params['rundir'], alignstackq['refstackfile'])
                if not os.path.isfile(refstackfile):
                        apDisplay.printErrrefqor("could not find reference stack file: "+refstackfile)

                ### continue setting values
                alignstackq['stack'] = apStack.getOnlyStackData(self.params['stackid'])
                alignstackq['boxsize'] = apFile.getBoxSize(alignimagicfilepath)[0]
                alignstackq['pixelsize'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])*self.params['bin']
                alignstackq['description'] = self.params['description']
                alignstackq['hidden'] =  False
                alignstackq['num_particles'] = apFile.numImagesInStack(alignimagicfilepath)

                ### inserting particles and references
                apDisplay.printColor("Inserting particle alignment data, please wait", "cyan")
                for emanpartnum in range(self.params['numpart']):
                        partnum = emanpartnum+1
                        if partnum % 100 == 0:
                                sys.stderr.write(".")

                        ### setup reference
                        refq = appiondata.ApAlignReferenceData()
                        refnum = particlemapping[emanpartnum]
                        refq['refnum'] = refnum
                        refq['iteration'] = self.params['numiter']
                        refq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                        refq['alignrun'] = alignrunq

                        ### TODO: create mrc file
                        #refq['mrcfile'] = refbase+".mrc"
                        #reffile = os.path.join(self.params['rundir'], refq['mrcfile'])
                        #if not os.path.isfile(reffile):
                        #       emancmd = "proc2d "+refstack+".xmp "+refstack+".mrc"
                        #       apEMAN.executeEmanCmd(emancmd, verbose=False)
                        #if not os.path.isfile(reffile):
                        #       apDisplay.printError("could not find reference file: "+reffile)

                        ### TODO: get resolution
                        #refq['ssnr_resolution'] = TODO

                        ### setup particle
                        alignpartq = appiondata.ApAlignParticleData()
                        alignpartq['partnum'] = partnum
                        alignpartq['alignstack'] = alignstackq
                        stackpartdata = apStack.getStackParticle(self.params['stackid'], partnum)
                        alignpartq['stackpart'] = stackpartdata
                        ### TODO: get the alignment parameters
                        #alignpartq['xshift'] = partdict['xshift']
                        #alignpartq['yshift'] = partdict['yshift']
                        #alignpartq['rotation'] = partdict['inplane']
                        #alignpartq['mirror'] = partdict['mirror']
                        alignpartq['ref'] = refq
                        ### TODO: get the score
                        #alignpartq['score'] = partdict['score']

                        ### insert
                        if self.params['commit'] is True:
                                alignpartq.insert()

                return
    def start(self):
        #               self.insertCL2DJob()
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])

        ### process stack to local file
        if self.params['timestamp'] is None:
            apDisplay.printMsg("creating timestamp")
            self.params['timestamp'] = self.timestamp
        self.params['localstack'] = os.path.join(
            self.params['rundir'], self.params['timestamp'] + ".hed")
        if os.path.isfile(self.params['localstack']):
            apFile.removeStack(self.params['localstack'])
        proccmd = "proc2d " + self.stack['file'] + " " + self.params[
            'localstack'] + " apix=" + str(self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            clipsize = int(self.clipsize) * self.params['bin']
            if clipsize % 2 == 1:
                clipsize += 1  ### making sure that clipped boxsize is even
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   clipsize, clipsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        if self.params['highpass'] is not None and self.params['highpass'] > 1:
            proccmd += " hp=" + str(self.params['highpass'])
        if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
            proccmd += " lp=" + str(self.params['lowpass'])
        apParam.runCmd(proccmd, "EMAN", verbose=True)
        if self.params['numpart'] != apFile.numImagesInStack(
                self.params['localstack']):
            apDisplay.printError("Missing particles in stack")

        ### convert stack into single spider files
        self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(
            self.params['localstack'])

        ### setup Xmipp command
        aligntime = time.time()
        xmippopts = (
            " " + " -i " +
            os.path.join(self.params['rundir'], self.partlistdocfile) +
            " -codes " + str(self.params['numrefs']) + " -iter " +
            str(self.params['maxiter']) + " -o " + os.path.join(
                self.params['rundir'], "part" + self.params['timestamp']))
        if self.params['fast']:
            xmippopts += " -fast "
        if self.params['correlation']:
            xmippopts += " -useCorrelation "
        if self.params['classical']:
            xmippopts += " -classicalMultiref "
        if self.params['align']:
            xmippopts += " -alignImages "

        ### use multi-processor command
        apDisplay.printColor(
            "Using " + str(self.params['nproc']) + " processors!", "green")
        xmippexe = apParam.getExecPath("xmipp_mpi_class_averages", die=True)
        mpiruncmd = self.mpirun + " -np " + str(
            self.params['nproc']) + " " + xmippexe + " " + xmippopts
        self.writeXmippLog(mpiruncmd)
        apParam.runCmd(mpiruncmd,
                       package="Xmipp",
                       verbose=True,
                       showcmd=True,
                       logfile="xmipp.std")
        self.params['runtime'] = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(self.params['runtime']))

        ### minor post-processing
        self.createReferenceStack()
        self.parseOutput()
        self.clearIntermediateFiles()
        #               self.readyUploadFlag()
        apParam.dumpParameters(
            self.params, "cl2d-" + self.params['timestamp'] + "-params.pickle")

        ### upload results ... this used to be two separate operations, I'm combining into one
        self.runparams = apParam.readRunParameters("cl2d-" +
                                                   self.params['timestamp'] +
                                                   "-params.pickle")
        self.apix = apStack.getStackPixelSizeFromStackId(
            self.runparams['stackid']) * self.runparams['bin']
        self.Nlevels = len(
            glob.glob("part" + self.params['timestamp'] + "_level_??_.hed"))

        ### create average of aligned stacks & insert aligned stack info
        lastLevelStack = "part" + self.params[
            'timestamp'] + "_level_%02d_.hed" % (self.Nlevels - 1)
        apStack.averageStack(lastLevelStack)
        self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
        self.insertCL2DParamsIntoDatabase()
        if self.runparams['align'] is True:
            self.insertAlignStackRunIntoDatabase("alignedStack.hed")
            self.calcResolution(self.Nlevels - 1)
            self.insertAlignParticlesIntoDatabase(level=self.Nlevels - 1)

        ### loop over each class average stack & insert as clustering stacks
        self.insertClusterRunIntoDatabase()
        for level in range(self.Nlevels):
            ### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
            if self.params['align'] is True:
                self.calcResolution(level)
            partdict = self.getClassificationAtLevel(level)
            for classnum in partdict:
                self.insertClusterStackIntoDatabase(
                    "part" + self.params['timestamp'] +
                    "_level_%02d_.hed" % level, classnum + 1,
                    partdict[classnum], len(partdict))
	def start(self):
		#old stack size
		stacksize = apStack.getNumberStackParticlesFromId(self.params['stackid'])

		# if exclude or include lists are not defined...
		if self.params['exclude'] is None and self.params['include'] is None:
			# if first and last are specified, create a file
			if self.params['first'] is not None and self.params['last'] is not None:
				stp = str(self.params['first'])
				enp = str(self.params['last'])
				fname = 'sub'+str(self.params['stackid'])+'_'+stp+'-'+enp+'.lst'
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
				f=open(self.params['keepfile'],'w')
				for i in range(self.params['first'],self.params['last']+1):
					f.write('%d\n' % (int(i)-1))
				f.close()
				# generate the random list by giving number and create the file
			elif self.params['random'] is not None:
				#numOfRandomParticles = str(self.params['random'])
				#fname = 'random'+str(self.params['stackid'])+'_'+numOfRandomParticles+'.lst'
				fname = "random%d_%d.lst"%(self.params['stackid'], self.params['random'])
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
				# create a file
				f=open(self.params['keepfile'],'w')
				# generate a random sequence by giving size
				randomList = random.sample(xrange(self.params['last']), self.params['random'])
				randomList.sort()
				for partnum in randomList:
					f.write('%d\n' % partnum)
				f.close()				
				
			# if splitting, create files containing the split values
			elif self.params['split'] > 1:
				for i in range(self.params['split']):
					fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst'
					self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
					apDisplay.printMsg("Creating keep list: "+self.params['keepfile'])
					f = open(self.params['keepfile'],'w')
					for p in range(stacksize):
						if (p % self.params['split'])-i==0:
							f.write('%i\n' % p)
					f.close()

			# if exclude-from option is specified, convert particles to exclude
			elif self.params['excludefile'] is True:
				oldkf = open(self.params['keepfile'])
				partlist = []
				for line in oldkf:
					particle=line.strip()
					try:
						particle = int(particle)
					except:
						continue
					partlist.append(particle)
				oldkf.close()
				# create new list excluding the particles
				apDisplay.printMsg("Converting keep file to exclude file")
				newkeepfile = "tmpnewkeepfile.txt"
				newkf = open(newkeepfile,'w')
				for p in range(stacksize):
					if p not in partlist:
						newkf.write("%i\n"%p)
				newkf.close()
				self.params['keepfile'] = os.path.abspath(newkeepfile)

			# otherwise, just copy the file
			elif not os.path.isfile(os.path.basename(self.params['keepfile'])):
				shutil.copy(self.params['keepfile'], os.path.basename(self.params['keepfile']))

		# if either exclude or include lists is defined
		elif self.params['exclude'] or self.params['include']:
			
			### list of particles to be excluded
			excludelist = []
			if self.params['exclude'] is not None:
				excludestrlist = self.params['exclude'].split(",")
				for excld in excludestrlist:
					excludelist.append(int(excld.strip()))
			apDisplay.printMsg("Exclude list: "+str(excludelist))

			### list of particles to be included
			includelist = []
			if self.params['include'] is not None:
				includestrlist = self.params['include'].split(",")
				for incld in includestrlist:
					includelist.append(int(incld.strip()))		
			apDisplay.printMsg("Include list: "+str(includelist))


		#new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		newname = stackdata['name']

		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])

		#if include or exclude list is given...			
		if self.params['include'] is not None or self.params['exclude'] is not None:
		
			includeParticle = []
			excludeParticle = 0

			for partnum in range(stacksize):
				if includelist and partnum in includelist:
					includeParticle.append(partnum)
				elif excludelist and not partnum in excludelist:
					includeParticle.append(partnum)
				else:
					excludeParticle += 1
			includeParticle.sort()
		
			### write kept particles to file
			self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
			apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
			kf = open(self.params['keepfile'], "w")
			for partnum in includeParticle:
				kf.write(str(partnum)+"\n")
			kf.close()		

			#get number of particles
			numparticles = len(includeParticle)
			if excludelist:
				self.params['description'] += ( " ... %d particle substack of stackid %d" 
				% (numparticles, self.params['stackid']))
			elif includelist:
				self.params['description'] += ( " ... %d particle substack of stackid %d" 
				% (numparticles, self.params['stackid']))	
		
		ogdescr = self.params['description']
		for i in range(self.params['split']):
			### always do this, if not splitting split=1
			sb = os.path.splitext(stackdata['name'])
			if self.params['first'] is not None and self.params['last'] is not None:
				newname = sb[0]+'.'+str(self.params['first'])+'-'+str(self.params['last'])+sb[-1]
			elif self.params['random'] is not None:
				newname = "%s-random%d%s"%(sb[0], self.params['random'], sb[-1])
			elif self.params['split'] > 1:
				fname = 'sub'+str(self.params['stackid'])+'.'+str(i+1)+'.lst'
				self.params['keepfile'] = os.path.join(self.params['rundir'],fname)
				newname = sb[0]+'.'+str(i+1)+'of'+str(self.params['split'])+sb[-1]
			newstack = os.path.join(self.params['rundir'], newname)
			apStack.checkForPreviousStack(newstack)

			#get number of particles
			f = open(self.params['keepfile'], "r")
			numparticles = len(f.readlines())
			f.close()
			self.params['description'] = ogdescr
			self.params['description'] += (
				(" ... %d particle substack of stackid %d" 
				 % (numparticles, self.params['stackid']))
			)
			#if splitting, add to description
			if self.params['split'] > 1:
				self.params['description'] += (" (%i of %i)" % (i+1, self.params['split']))

			#create the new sub stack
			if not self.params['correctbeamtilt']:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)
			else:
				apBeamTilt.makeCorrectionStack(self.params['stackid'], oldstack, newstack)
			if not os.path.isfile(newstack):
				apDisplay.printError("No stack was created")
			apStack.commitSubStack(self.params, newname, sorted=False)
			apStack.averageStack(stack=newstack)
			newstackid = apStack.getStackIdFromPath(newstack)
			if self.params['meanplot'] is True:
				apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
				apStackMeanPlot.makeStackMeanPlot(newstackid)
	def start(self):
		self.ace2correct = self.getACE2Path()

		### determine amount of memory needed for entire stack
		memorylimit = 0.3
		bytelimit = memorylimit*(1024**3)
		writeiters = 1
		partbytes = 4*self.params['box']*self.params['box']
		if partbytes*self.params['projcount'] > bytelimit:
			writeiters = int(math.ceil(float(partbytes)*self.params['projcount'] / bytelimit))
		partsperiter = int(float(self.params['projcount']) / writeiters) ### number of particles read each time

		### some defaults, and workarounds for now
		self.params['projpergraph'] = 100
		self.params['filesperdir'] = partsperiter
		if self.params['filesperdir'] > 2048:
			self.params['filesperdir'] = 2048

		### first create projections
		if self.params['preforient'] is True:
			filename = self.createProjections(pad=self.params['pad'], invert=self.params['invert'])
		else:
			filename = self.createProjectionsEmanProp(pad=self.params['pad'], invert=self.params['invert'])

		### shift & rotate randomly
		if self.params['rotang']!=0 or self.params['shiftrad']!=0:
			shiftstackname = self.shift_images(filename)
		else:
			shiftstackname = filename

		### read MRC stats to figure out noise level addition
		mean1, stdev1 = self.readFileStats(shiftstackname)

		### determine noise multiplication factor to ensure that appropriate amount of noise gets added to particles inside circular mask
		multfactor = 1.0/((float(self.params['radius'])/self.params['box'])*(float(self.params['radius'])/self.params['box'])*math.pi)

		### calculate noiselevel additions and add noise to an initial ratio of 1.8, simulating beam and structural damage
		### NOTE: THERE ARE DIFFERENT DEFINITIONS OF SNR, see below
		noiselevel1 = math.sqrt((float(stdev1)*float(stdev1)) / float(self.params['snr1']))
#		noiselevel1 = float(stdev1) / float(self.params['snr1'])
		noiselevel1 = noiselevel1 * multfactor 
		noisystack = self.addNoise(shiftstackname, noiselevel1, SNR=self.params['snr1'])

		### get list of defocus values
		self.getListOfDefoci(self.params['projcount'])

		### apply envelope and ctf to each .mrc file, then correct based on how well ace2 works on raw micrographs
		ctfstack, ctfpartlist = self.applyEnvelopeAndCTF(noisystack)

		#recoverlists = self.recoverLists()

		### read IMAGIC stats to figure out noise level addition
		mean2, stdev2 = self.readFileStats(ctfstack)

		### cascading of noise processes according to Frank and Al-Ali (1975) & Baxter (2009)
#		snr2 = 1 / ((1+1/float(self.params['snrtot'])) / (1/float(self.params['snr1']) + 1) - 1)
		snr2 = (1+1/self.params['snr1'])/(1/self.params['snrtot']-1/self.params['snr1'])

		### NOTE: THERE ARE DIFFERENT DEFINITIONS OF SNR, see below
		noiselevel2 = math.sqrt((float(stdev2)*float(stdev2)) / float(snr2))
#		noiselevel2 = float(stdev2) / float(snr2)
		noiselevel2 = noiselevel2 * multfactor

		### add a last layer of noise
		noisystack2 = self.addNoise(ctfstack, noiselevel2, SNR=self.params['snrtot'])

		### low-pass / high-pass filter resulting stack, if specified
		if self.params['hpfilt'] is not None or self.params['lpfilt'] is not None or self.params['norm'] is True:
			filtstack = noisystack2[:-4]
			if self.params['norm'] is True:
				filtstack = filtstack+"_norm.hed"
			else:
				filtstack = filtstack+"_filt.hed"
			apFile.removeStack(filtstack)
			emancmd = "proc2d "+noisystack2+" "+filtstack+" apix="+str(self.params['apix'])+" "
			if self.params['hpfilt'] is not None:
				emancmd = emancmd+"hp="+str(self.params['hpfilt'])+" "
			if self.params['lpfilt'] is not None:
				emancmd = emancmd+"lp="+str(self.params['lpfilt'])+" "
			if self.params['norm'] is True:
				emancmd = emancmd+"norm="+str(self.params['norm'])+" "
			apParam.runCmd(emancmd, "EMAN")
			self.params['finalstack'] = os.path.basename(filtstack)
			finalstack = filtstack
		else:
			self.params['finalstack'] = os.path.basename(noisystack2)
			finalstack = noisystack2

		### post-processing: create average file for viewing on webpages
		apStack.averageStack(finalstack)

		### upload if commit is checked
		self.uploadData(ctfpartlist)

		### post-processing: Create Stack Mean Plot
		if self.params['commit'] is True:
			stackid = apStack.getStackIdFromPath(finalstack)
			if stackid is not None:
				apStackMeanPlot.makeStackMeanPlot(stackid, gridpoints=8)
	def start(self):
#		self.insertCL2DJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])

		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		### process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".hed")
		if os.path.isfile(self.params['localstack']):
			apFile.removeStack(self.params['localstack'])

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert',True)

		# clip not yet implemented
#		if self.params['clipsize'] is not None:
#			clipsize = int(self.clipsize)*self.params['bin']
#			if clipsize % 2 == 1:
#				clipsize += 1 ### making sure that clipped boxsize is even
#			a.setValue('clip',clipsize)

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		#run proc2d
		a.run()

		if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
			apDisplay.printError("Missing particles in stack")

		### convert stack into single spider files
		self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(self.params['localstack'])

		### setup Xmipp command
		aligntime = time.time()
		xmippopts = ( " "
			+" -i "+os.path.join(self.params['rundir'], self.partlistdocfile)
			+" -codes "+str(self.params['numrefs'])
			+" -iter "+str(self.params['maxiter'])
			+" -o "+os.path.join(self.params['rundir'], "part"+self.params['timestamp'])
		)
		if self.params['fast']:
			xmippopts += " -fast "
		if self.params['correlation']:
			xmippopts += " -useCorrelation "
		if self.params['classical']:
			xmippopts += " -classicalMultiref "		
		if self.params['align']:
			xmippopts += " -alignImages "

		### use multi-processor command
		apDisplay.printColor("Using "+str(self.params['nproc'])+" processors!", "green")
		xmippexe = apParam.getExecPath("xmipp_mpi_class_averages", die=True)
		mpiruncmd = self.mpirun+" -np "+str(self.params['nproc'])+" "+xmippexe+" "+xmippopts
		self.writeXmippLog(mpiruncmd)
		apParam.runCmd(mpiruncmd, package="Xmipp", verbose=True, showcmd=True, logfile="xmipp.std")
		self.params['runtime'] = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(self.params['runtime']))

		### minor post-processing
		self.createReferenceStack()
		self.parseOutput()
		self.clearIntermediateFiles()
#		self.readyUploadFlag()
		apParam.dumpParameters(self.params, "cl2d-"+self.params['timestamp']+"-params.pickle")

		### upload results ... this used to be two separate operations, I'm combining into one
		self.runparams = apParam.readRunParameters("cl2d-"+self.params['timestamp']+"-params.pickle")
		self.apix = apStack.getStackPixelSizeFromStackId(self.runparams['stackid'])*self.runparams['bin']
		self.Nlevels=len(glob.glob("part"+self.params['timestamp']+"_level_??_.hed"))

		### create average of aligned stacks & insert aligned stack info
		lastLevelStack = "part"+self.params['timestamp']+"_level_%02d_.hed"%(self.Nlevels-1)
		apStack.averageStack(lastLevelStack)
		self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
		self.insertCL2DParamsIntoDatabase()
		if self.runparams['align'] is True:
			self.insertAlignStackRunIntoDatabase("alignedStack.hed")
			self.calcResolution(self.Nlevels-1)
			self.insertAlignParticlesIntoDatabase(level=self.Nlevels-1)
		
		### loop over each class average stack & insert as clustering stacks
		self.insertClusterRunIntoDatabase()
		for level in range(self.Nlevels):
			### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
			if self.params['align'] is True:
				self.calcResolution(level)
			partdict = self.getClassificationAtLevel(level)
			for classnum in partdict: 
				self.insertClusterStackIntoDatabase(
					"part"+self.params['timestamp']+"_level_%02d_.hed"%level,
					classnum+1, partdict[classnum], len(partdict))
	def start(self):
		self.stack = {}
		self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
		self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])

		### test insert to make sure data is not overwritten
		self.params['runtime'] = 0
		#self.checkDuplicateRefBasedRun()

		### set box size
		self.boxsize = int(math.floor(self.stack['boxsize']/self.params['bin']/2.0))*2

		### convert stack to spider
		spiderstack = self.createSpiderFile()

		### create template stack
		templatestack = self.createTemplateStack()

		### run the alignment
		aligntime = time.time()
		usestack = spiderstack
		oldpartlist = None
		for i in range(self.params['numiter']):
			iternum = i+1
			apDisplay.printColor("\n\nITERATION "+str(iternum), "green")
			alignedstack, partlist = self.runAlignmentGPU(
				usestack, templatestack, spiderstack,
				self.params['xysearch'], self.params['xystep'],
				self.params['firstring'], self.params['lastring'],
				iternum=iternum, oldpartlist=oldpartlist)
			oldpartlist = partlist
			usestack = alignedstack
			templatestack = self.updateTemplateStack(alignedstack, partlist, iternum)
		aligntime = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

		### remove large, worthless stack
		spiderstack = os.path.join(self.params['rundir'], "start.spi")
		apDisplay.printMsg("Removing un-aligned stack: "+spiderstack)
		apFile.removeFile(spiderstack, warn=True)

		### convert aligned stack to imagic
		finalspistack = "aligned.spi"
		shutil.move(alignedstack, finalspistack)
		imagicstack = "aligned.hed"
		apFile.removeStack(imagicstack)
		emancmd = "proc2d "+finalspistack+" "+imagicstack
		apEMAN.executeEmanCmd(emancmd, verbose=True)

		### average stack
		apStack.averageStack(imagicstack)

		### calculate resolution for each reference
		apix = self.stack['apix']*self.params['bin']
		self.calcResolution(partlist, imagicstack, apix)

		if self.params['commit'] is True:
			apDisplay.printMsg("committing results to DB")
			self.params['runtime'] = aligntime
			self.insertRefBasedRun(partlist, imagicstack, insert=True)
		else:
			apDisplay.printWarning("not committing results to DB")

		### remove temporary files
		apFile.removeFilePattern("alignments/alignedstack*.spi")
		apFile.removeFile(finalspistack)
	def start(self):
		self.ace2correct = self.getACE2Path()

		### determine amount of memory needed for entire stack
		memorylimit = 0.3
		bytelimit = memorylimit*(1024**3)
		writeiters = 1
		partbytes = 4*self.params['box']*self.params['box']
		if partbytes*self.params['projcount'] > bytelimit:
			writeiters = int(math.ceil(float(partbytes)*self.params['projcount'] / bytelimit))
		partsperiter = int(float(self.params['projcount']) / writeiters) ### number of particles read each time

		### some defaults, and workarounds for now
		self.params['projpergraph'] = 100
		self.params['filesperdir'] = partsperiter
		if self.params['filesperdir'] > 2048:
			self.params['filesperdir'] = 2048

		### first create projections
		if self.params['preforient'] is True:
			filename = self.createProjections(pad=self.params['pad'], invert=self.params['invert'])
		else:
			filename = self.createProjectionsEmanProp(pad=self.params['pad'], invert=self.params['invert'])

		### shift & rotate randomly
		if self.params['rotang']!=0 or self.params['shiftrad']!=0:
			shiftstackname = self.shift_images(filename)
		else:
			shiftstackname = filename

		### read MRC stats to figure out noise level addition
		mean1, stdev1 = self.readFileStats(shiftstackname)

		### determine noise multiplication factor to ensure that appropriate amount of noise gets added to particles inside circular mask
		multfactor = 1.0/((float(self.params['radius'])/self.params['box'])*(float(self.params['radius'])/self.params['box'])*math.pi)

		### calculate noiselevel additions and add noise to an initial ratio of 1.8, simulating beam and structural damage
		### NOTE: THERE ARE DIFFERENT DEFINITIONS OF SNR, see below
		noiselevel1 = math.sqrt((float(stdev1)*float(stdev1)) / float(self.params['snr1']))
#		noiselevel1 = float(stdev1) / float(self.params['snr1'])
		noiselevel1 = noiselevel1 * multfactor 
		noisystack = self.addNoise(shiftstackname, noiselevel1, SNR=self.params['snr1'])

		### get list of defocus values
		self.getListOfDefoci(self.params['projcount'])

		### apply envelope and ctf to each .mrc file, then correct based on how well ace2 works on raw micrographs
		ctfstack, ctfpartlist = self.applyEnvelopeAndCTF(noisystack)

		#recoverlists = self.recoverLists()

		### read IMAGIC stats to figure out noise level addition
		mean2, stdev2 = self.readFileStats(ctfstack)

		### cascading of noise processes according to Frank and Al-Ali (1975) & Baxter (2009)
#		snr2 = 1 / ((1+1/float(self.params['snrtot'])) / (1/float(self.params['snr1']) + 1) - 1)
		snr2 = (1+1/self.params['snr1'])/(1/self.params['snrtot']-1/self.params['snr1'])

		### NOTE: THERE ARE DIFFERENT DEFINITIONS OF SNR, see below
		noiselevel2 = math.sqrt((float(stdev2)*float(stdev2)) / float(snr2))
#		noiselevel2 = float(stdev2) / float(snr2)
		noiselevel2 = noiselevel2 * multfactor

		### add a last layer of noise
		noisystack2 = self.addNoise(ctfstack, noiselevel2, SNR=self.params['snrtot'])

		### low-pass / high-pass filter resulting stack, if specified
		if self.params['hpfilt'] is not None or self.params['lpfilt'] is not None or self.params['norm'] is True:
			filtstack = noisystack2[:-4]
			if self.params['norm'] is True:
				filtstack = filtstack+"_norm.hed"
			else:
				filtstack = filtstack+"_filt.hed"
			apFile.removeStack(filtstack)
			emancmd = "proc2d "+noisystack2+" "+filtstack+" apix="+str(self.params['apix'])+" "
			if self.params['hpfilt'] is not None:
				emancmd = emancmd+"hp="+str(self.params['hpfilt'])+" "
			if self.params['lpfilt'] is not None:
				emancmd = emancmd+"lp="+str(self.params['lpfilt'])+" "
			if self.params['norm'] is True:
				emancmd = emancmd+"norm="+str(self.params['norm'])+" "
			apParam.runCmd(emancmd, "EMAN")
			self.params['finalstack'] = os.path.basename(filtstack)
			finalstack = filtstack
		else:
			self.params['finalstack'] = os.path.basename(noisystack2)
			finalstack = noisystack2

		### post-processing: create average file for viewing on webpages
		apStack.averageStack(finalstack)

		### upload if commit is checked
		self.uploadData(ctfpartlist)

		### post-processing: Create Stack Mean Plot
		if self.params['commit'] is True:
			stackid = apStack.getStackIdFromPath(finalstack)
			if stackid is not None:
				apStackMeanPlot.makeStackMeanPlot(stackid, gridpoints=8)