Ejemplo n.º 1
0
    def insertClusterStack(self,
                           classavg=None,
                           classvar=None,
                           numclass=None,
                           insert=False):
        clusterstackq = appiondata.ApClusteringStackData()
        clusterstackq['avg_imagicfile'] = classavg + ".hed"
        clusterstackq['var_imagicfile'] = classvar + ".hed"
        clusterstackq['num_classes'] = numclass
        clusterstackq['clusterrun'] = self.clusterrun
        clusterstackq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        clusterstackq['hidden'] = False

        imagicfile = os.path.join(self.params['rundir'],
                                  clusterstackq['avg_imagicfile'])
        if not os.path.isfile(imagicfile):
            apDisplay.printError("could not find average stack file: " +
                                 imagicfile)
        imagicfile = os.path.join(self.params['rundir'],
                                  clusterstackq['var_imagicfile'])
        if not os.path.isfile(imagicfile):
            apDisplay.printError("could not find variance stack file: " +
                                 imagicfile)

        apDisplay.printMsg("inserting clustering stack into database")
        if insert is True:
            clusterstackq.insert()

        ### particle class data
        apDisplay.printColor(
            "Inserting particle classification data, please wait", "cyan")
        for i in range(numclass):
            classnum = i + 1
            classdocfile = os.path.join(
                self.params['rundir'],
                "cluster/classdoc_%s_%04d.spi" % (self.timestamp, classnum))
            partlist = self.readClassDocFile(classdocfile)
            sys.stderr.write(".")
            for partnum in partlist:
                alignpartdata = self.getAlignParticleData(partnum)
                cpartq = appiondata.ApClusteringParticleData()
                cpartq['clusterstack'] = clusterstackq
                cpartq['alignparticle'] = alignpartdata
                cpartq['partnum'] = partnum
                cpartq['refnum'] = classnum
                cpartq['clusterreference'] = None
                # actual parameters
                if insert is True:
                    cpartq.insert()
        return
    def insertClusterStackIntoDatabase(self, clusterstackfile, classnum,
                                       partlist, num_classes):
        clusterstackq = appiondata.ApClusteringStackData()
        clusterstackq['avg_imagicfile'] = clusterstackfile
        clusterstackq['var_imagicfile'] = None
        clusterstackq['num_classes'] = num_classes
        clusterstackq['clusterrun'] = self.clusterrun
        clusterstackq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        clusterstackq['hidden'] = False

        if not os.path.isfile(clusterstackfile):
            apDisplay.printError("could not find average stack file: " +
                                 clusterstackfile)

        apDisplay.printMsg("inserting clustering stack into database")
        if self.params['commit'] is True:
            clusterstackq.insert()

        ### insert particle class & reference data
        clusterrefq = appiondata.ApClusteringReferenceData()
        clusterrefq['refnum'] = classnum
        clusterrefq['clusterrun'] = self.clusterrun
        clusterrefq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        clusterrefq['num_particles'] = len(partlist)
        if self.params['align'] is True:
            if (classnum - 1) in self.resdict:
                clusterrefq['ssnr_resolution'] = self.resdict[classnum - 1]
        apDisplay.printColor(
            "Inserting particle classification data, please wait", "cyan")
        for i, partnum in enumerate(partlist):
            cpartq = appiondata.ApClusteringParticleData()
            if self.runparams['align'] is True and self.params[
                    'commit'] is True:
                alignpartdata = self.getAlignParticleData(int(partnum))
                cpartq['alignparticle'] = alignpartdata
            else:
                cpartq['alignparticle'] = None
            cpartq['clusterstack'] = clusterstackq
            cpartq['partnum'] = int(partnum)
            cpartq['refnum'] = classnum
            cpartq['clusterreference'] = clusterrefq
            # actual parameters
            if self.params['commit'] is True:
                cpartq.insert()

        return
        def insertAffinityPropagationRun(self, classes):
                ### Preliminary data
                numclass = len(classes.keys())
                projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid'])
                alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
                pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))

                ### Affinity Propagation Params object
                affpropq = appiondata.ApAffinityPropagationClusterParamsData()
                affpropq['mask_diam'] = 2.0*self.params['maskrad']
                affpropq['run_seconds'] = time.time()-self.t0
                affpropq['preference_type'] = self.params['preftype']

                ### Align Analysis Run object
                analysisq = appiondata.ApAlignAnalysisRunData()
                analysisq['runname'] = self.params['runname']
                analysisq['path'] = pathdata
                analysisq['description'] = self.params['description']
                analysisq['alignstack'] = alignstackdata
                analysisq['hidden'] = False
                ### linked through cluster not analysis

                ### Clustering Run object
                clusterrunq = appiondata.ApClusteringRunData()
                clusterrunq['runname'] = self.params['runname']
                clusterrunq['description'] = self.params['description']
                clusterrunq['boxsize'] = alignstackdata['boxsize']
                clusterrunq['pixelsize'] = alignstackdata['pixelsize']
                clusterrunq['num_particles'] = self.params['numpart']
                clusterrunq['alignstack'] = alignstackdata
                clusterrunq['analysisrun'] = analysisq
                clusterrunq['affpropparams'] = affpropq

                ### Clustering Stack object
                clusterstackq = appiondata.ApClusteringStackData()
                clusterstackq['avg_imagicfile'] = "classaverage-"+self.timestamp+".hed"
                clusterstackq['num_classes'] = numclass
                clusterstackq['clusterrun'] = clusterrunq
                clusterstackq['path'] = pathdata
                clusterstackq['hidden'] = False
                imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile'])
                if not os.path.isfile(imagicfile):
                        apDisplay.printError("could not find average stack file: "+imagicfile)

                ### looping over clusters
                apDisplay.printColor("Inserting particle classification data, please wait", "cyan")
                for i,classkey in enumerate(classes.keys()):
                        classnum = i+1
                        partlist = classes[classkey]
                        #print "MINIMUM: ", min(partlist)
                        classroot = "%s.%d"% (self.timestamp, classnum-1)
                        classdocfile = os.path.join(self.params['rundir'], classroot)

                        ### Clustering Particle object
                        clusterrefq = appiondata.ApClusteringReferenceData()
                        clusterrefq['refnum'] = classnum
                        clusterrefq['clusterrun'] = clusterrunq
                        clusterrefq['path'] = pathdata
                        clusterrefq['num_particles'] = len(partlist)
                        #clusterrefq['ssnr_resolution'] = self.cluster_resolution[i]

                        ### looping over particles
                        sys.stderr.write(".")
                        for partnum in partlist:
                                alignpartdata = self.getAlignParticleData(partnum, alignstackdata)

                                ### Clustering Particle objects
                                clusterpartq = appiondata.ApClusteringParticleData()
                                clusterpartq['clusterstack'] = clusterstackq
                                clusterpartq['alignparticle'] = alignpartdata
                                clusterpartq['partnum'] = partnum
                                clusterpartq['refnum'] = classnum
                                clusterpartq['clusterreference'] = clusterrefq

                                ### finally we can insert parameters
                                if self.params['commit'] is True:
                                        clusterpartq.insert()
                return
Ejemplo n.º 4
0
	def insertKerDenSOM(self, binned=None):
		### Preliminary data
		projectid = apProject.getProjectIdFromAlignStackId(self.params['alignstackid'])
		alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
		numclass = self.params['xdim']*self.params['ydim']
		pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))

		### KerDen SOM Params object
		kerdenq = appiondata.ApKerDenSOMParamsData()
		kerdenq['mask_diam'] = 2.0*self.params['maskrad']
		kerdenq['x_dimension'] = self.params['xdim']
		kerdenq['y_dimension'] = self.params['ydim']
		kerdenq['convergence'] = self.params['converge']
		kerdenq['run_seconds'] = time.time()-self.t0

		### Align Analysis Run object
		analysisq = appiondata.ApAlignAnalysisRunData()
		analysisq['runname'] = self.params['runname']
		analysisq['path'] = pathdata
		analysisq['description'] = self.params['description']
		analysisq['alignstack'] = alignstackdata
		analysisq['hidden'] = False
		### linked through cluster not analysis
		#analysisq['kerdenparams'] = kerdenq

		### Clustering Run object
		clusterrunq = appiondata.ApClusteringRunData()
		clusterrunq['runname'] = self.params['runname']
		clusterrunq['description'] = self.params['description']
		# what if we binned the aligned stack to get the new one
		if binned is None:
			boxsize = alignstackdata['boxsize']
			pixelsize = alignstackdata['pixelsize']
		else:
			boxsize = alignstackdata['boxsize'] / binned
			pixelsize = alignstackdata['pixelsize'] * binned
		clusterrunq['boxsize'] = boxsize
		clusterrunq['pixelsize'] = pixelsize
		clusterrunq['num_particles'] = self.params['numpart']
		clusterrunq['alignstack'] = alignstackdata
		clusterrunq['analysisrun'] = analysisq
		clusterrunq['kerdenparams'] = kerdenq

		### Clustering Stack object
		clusterstackq = appiondata.ApClusteringStackData()
		clusterstackq['avg_imagicfile'] = "kerdenstack"+self.timestamp+".hed"
		clusterstackq['num_classes'] = numclass
		clusterstackq['clusterrun'] = clusterrunq
		clusterstackq['path'] = pathdata
		clusterstackq['hidden'] = False
		imagicfile = os.path.join(self.params['rundir'], clusterstackq['avg_imagicfile'])
		if not os.path.isfile(imagicfile):
			apDisplay.printError("could not find average stack file: "+imagicfile)

		### looping over clusters
		apDisplay.printColor("Inserting particle classification data, please wait", "cyan")
		for i in range(numclass):
			classnum = i+1
			classroot = "%s.%d"% (self.timestamp, classnum-1)
			classdocfile = os.path.join(self.params['rundir'], classroot)
			partlist = self.readClassDocFile(classdocfile)

			### Clustering Particle object
			clusterrefq = appiondata.ApClusteringReferenceData()
			clusterrefq['refnum'] = classnum
			clusterrefq['avg_mrcfile'] = classroot+".mrc"
			clusterrefq['clusterrun'] = clusterrunq
			clusterrefq['path'] = pathdata
			clusterrefq['num_particles'] = len(partlist)
			clusterrefq['ssnr_resolution'] = self.cluster_resolution[i]

			### looping over particles
			sys.stderr.write(".")
			for partnum in partlist:
				alignpartdata = self.getAlignParticleData(partnum, alignstackdata)

				### Clustering Particle objects
				clusterpartq = appiondata.ApClusteringParticleData()
				clusterpartq['clusterstack'] = clusterstackq
				clusterpartq['alignparticle'] = alignpartdata
				clusterpartq['partnum'] = partnum
				clusterpartq['refnum'] = classnum
				clusterpartq['clusterreference'] = clusterrefq

				### finally we can insert parameters
				if self.params['commit'] is True:
					clusterpartq.insert()
Ejemplo n.º 5
0
    def getGoodAlignParticles(self):
        includeParticle = []
        tiltParticlesData = []
        nopairParticle = 0
        excludeParticle = 0
        badmirror = 0
        badscore = 0
        apDisplay.printMsg("Sorting particles from classes at " +
                           time.asctime())
        count = 0
        startmem = mem.active()
        t0 = time.time()
        if self.params['clusterid'] is not None:
            ### method 1: get particles from clustering data
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq[
                'clusterstack'] = appiondata.ApClusteringStackData.direct_query(
                    self.params['clusterid'])
            clusterpartdatas = clusterpartq.query()
            apDisplay.printMsg("Sorting " + str(len(clusterpartdatas)) +
                               " clustered particles")

            for clustpart in clusterpartdatas:
                count += 1
                if count % 50 == 0:
                    sys.stderr.write(".")
                    memdiff = (mem.active() - startmem) / count / 1024.0
                    if memdiff > 3:
                        apDisplay.printColor(
                            "Memory increase: %d MB/part" % (memdiff), "red")
                #write to text file
                clustnum = clustpart['refnum'] - 1
                if self.params['minscore'] is not None:
                    if (clustpart['alignparticle']['score'] is not None
                            and clustpart['alignparticle']['score'] <
                            self.params['minscore']):
                        badscore += 1
                        continue
                    elif (clustpart['alignparticle']['spread'] is not None
                          and clustpart['alignparticle']['spread'] <
                          self.params['minscore']):
                        badscore += 1
                        continue
                if clustnum in self.classlist:
                    notstackpartnum = clustpart['alignparticle']['stackpart'][
                        'particleNumber']
                    tiltstackpartdata = apTiltPair.getStackParticleTiltPair(
                        self.params['notstackid'], notstackpartnum,
                        self.params['tiltstackid'])

                    if tiltstackpartdata is None:
                        nopairParticle += 1
                        continue
                    tiltrot, theta, notrot, tiltangle = apTiltPair.getParticleTiltRotationAngles(
                        tiltstackpartdata)
                    if tiltrot is None:
                        apDisplay.printWarning("BAD particle  " +
                                               str(tiltstackpartdata))
                        nopairParticle += 1
                        continue
                    else:
                        inplane, mirror = self.getParticleInPlaneRotation(
                            tiltstackpartdata)
                        if (self.params['mirror'] == "all" or
                            (self.params['mirror'] == "no" and mirror is False)
                                or (self.params['mirror'] == "yes"
                                    and mirror is True)):
                            emantiltstackpartnum = tiltstackpartdata[
                                'particleNumber'] - 1
                            includeParticle.append(emantiltstackpartnum)
                            tiltParticlesData.append(tiltstackpartdata)
                            if self.params['numpart'] is not None and len(
                                    includeParticle) > self.params['numpart']:
                                break
                        else:
                            badmirror += 1
                else:
                    excludeParticle += 1
        else:
            ### method 2: get particles from alignment data
            alignpartq = appiondata.ApAlignParticleData()
            alignpartq['alignstack'] = self.alignstackdata
            alignpartdatas = alignpartq.query()
            apDisplay.printMsg("Sorting " + str(len(alignpartdatas)) +
                               " aligned particles")

            for alignpart in alignpartdatas:
                count += 1
                if count % 50 == 0:
                    sys.stderr.write(".")
                    memdiff = (mem.active() - startmem) / count / 1024.0
                    if memdiff > 3:
                        apDisplay.printColor(
                            "Memory increase: %d MB/part" % (memdiff), "red")
                #write to text file
                alignnum = alignpart['ref']['refnum'] - 1
                if (self.params['minscore'] is not None
                        and alignpart['score'] is not None
                        and alignpart['score'] < self.params['minscore']):
                    badscore += 1
                    continue
                if alignnum in self.classlist:
                    notstackpartnum = alignpart['stackpart']['particleNumber']
                    tiltstackpartdata = apTiltPair.getStackParticleTiltPair(
                        self.params['notstackid'], notstackpartnum,
                        self.params['tiltstackid'])
                    if tiltstackpartdata is None:
                        nopairParticle += 1
                    else:
                        inplane, mirror = self.getParticleInPlaneRotation(
                            tiltstackpartdata)
                        if (self.params['mirror'] == "all" or
                            (self.params['mirror'] == "no" and mirror is False)
                                or (self.params['mirror'] == "yes"
                                    and mirror is True)):
                            emantiltstackpartnum = tiltstackpartdata[
                                'particleNumber'] - 1
                            includeParticle.append(emantiltstackpartnum)
                            tiltParticlesData.append(tiltstackpartdata)
                            if self.params['numpart'] is not None and len(
                                    includeParticle) > self.params['numpart']:
                                break
                        else:
                            badmirror += 1
                else:
                    excludeParticle += 1
        ### end methods

        includeParticle.sort()
        ### messages
        if time.time() - t0 > 1.0:
            apDisplay.printMsg("\nSorting time: " +
                               apDisplay.timeString(time.time() - t0))
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding \n\t" + str(excludeParticle) +
                           " particles with " + str(nopairParticle) +
                           " unpaired particles")
        if badmirror > 0:
            apDisplay.printMsg("Particles with bad mirrors: %d" % (badmirror))
        if badscore > 0:
            apDisplay.printColor("Particles with bad scores: %d" % (badscore),
                                 "cyan")
        if len(includeParticle) < 1:
            apDisplay.printError("No particles were kept")
        memdiff = (mem.active() - startmem) / count / 1024.0
        if memdiff > 0.1:
            apDisplay.printColor("Memory increase: %.2f MB/part" % (memdiff),
                                 "red")

        return includeParticle, tiltParticlesData
Ejemplo n.º 6
0
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], stackdata['name'])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params['dropclasslist'] is not None:
            excludestrlist = self.params['dropclasslist'].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params['keepclasslist'] is not None:
            includestrlist = self.params['keepclasslist'].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

        ### or read from keepfile
        elif self.params['keepfile'] is not None:
            keeplistfile = open(self.params['keepfile'])
            for line in keeplistfile:
                if self.params['excludefrom'] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()
        if self.params['alignid'] is not None:
            alignpartq = appiondata.ApAlignParticleData()
            alignpartq['alignstack'] = self.alignstackdata
            particles = alignpartq.query()
        elif self.params['clusterid'] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq['clusterstack'] = self.clusterstackdata
            particles = clusterpartq.query()
        apDisplay.printMsg("Complete in " +
                           apDisplay.timeString(time.time() - q0))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0
        f = open("test.log", "w")
        count = 0
        for part in particles:
            count += 1
            #partnum = part['partnum']-1
            if 'alignparticle' in part:
                alignpart = part['alignparticle']
                classnum = int(part['refnum']) - 1
            else:
                alignpart = part
                classnum = int(part['ref']['refnum']) - 1
            emanstackpartnum = alignpart['stackpart']['particleNumber'] - 1

            ### check shift
            if self.params['maxshift'] is not None:
                shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
                if shift > self.params['maxshift']:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badshift += 1
                    continue

            if self.params['minscore'] is not None:
                ### check score
                if (alignpart['score'] is not None
                        and alignpart['score'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badscore += 1
                    continue

                ### check spread
                if (alignpart['spread'] is not None
                        and alignpart['spread'] < self.params['minscore']):
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" %
                            (count, emanstackpartnum, classnum))
                    badspread += 1
                    continue

            if includelist and classnum in includelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            elif excludelist and not classnum in excludelist:
                includeParticle.append(emanstackpartnum)
                f.write("%d\t%d\t%d\tinclude\n" %
                        (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\t%d\texclude\n" %
                        (count, emanstackpartnum, classnum))

        f.close()
        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Keeping " + str(len(includeParticle)) +
                           " and excluding " + str(excludeParticle) +
                           " particles")

        #print includeParticle

        ### write kept particles to file
        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params['keepfile'])
        kf = open(self.params['keepfile'], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes excluded" %
                (numparticles, self.params['dropclasslist']))
        elif includelist:
            self.params['description'] += (
                " ... %d particle substack with %s classes included" %
                (numparticles, self.params['keepclasslist']))

        ### create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=self.params['savebad'])

        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        apStack.averageStack(stack=newstack)
        if self.params['commit'] is True:
            apStack.commitSubStack(self.params)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
Ejemplo n.º 7
0
    def insertRotKerDenSOM(self):
        inserttime = time.time()
        ### Preliminary data
        projectid = apProject.getProjectIdFromAlignStackId(
            self.params['alignstackid'])
        alignstackdata = appiondata.ApAlignStackData.direct_query(
            self.params['alignstackid'])
        numclass = self.params['xdim'] * self.params['ydim']
        pathdata = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))

        ### rotKerDen SOM Params object
        rotkerdenson = appiondata.ApRotKerDenSOMParamsData()
        #rotkerdenson['mask_diam'] = 2.0*self.params['maskrad']
        rotkerdenson['x_dimension'] = self.params['xdim']
        rotkerdenson['y_dimension'] = self.params['ydim']
        #rotkerdenson['convergence'] = self.params['converge']
        rotkerdenson['run_seconds'] = time.time() - self.t0
        rotkerdenson['initregulfact'] = self.params['initregulfact']
        rotkerdenson['finalregulfact'] = self.params['finalregulfact']
        rotkerdenson['incrementregulfact'] = self.params['incrementregulfact']
        rotkerdenson['spectrainnerradius'] = self.params['spectrainnerradius']
        rotkerdenson['spectraouterradius'] = self.params['spectraouterradius']
        rotkerdenson['spectralowharmonic'] = self.params['spectralowharmonic']
        rotkerdenson['spectrahighharmonic'] = self.params[
            'spectrahighharmonic']

        ### Analysis Run object
        analysisq = appiondata.ApAlignAnalysisRunData()
        analysisq['runname'] = self.params['runname']
        analysisq['path'] = pathdata
        analysisq['description'] = self.params['description']
        analysisq['alignstack'] = alignstackdata
        analysisq['hidden'] = False

        ### Clustering Run object
        clusterrunq = appiondata.ApClusteringRunData()
        clusterrunq['runname'] = self.params['runname']
        clusterrunq['description'] = self.params['description']
        clusterrunq['boxsize'] = alignstackdata['boxsize']
        clusterrunq['pixelsize'] = alignstackdata['pixelsize']
        clusterrunq['num_particles'] = self.params['numpart']
        clusterrunq['alignstack'] = alignstackdata
        clusterrunq['analysisrun'] = analysisq
        clusterrunq['rotkerdenparams'] = rotkerdenson

        ### Clustering Stack object
        #Stack with cluster averages??????
        template = os.path.join(self.params['rundir'],
                                self.spectraTemporalFilesMask + ".png")
        files = glob.glob(template)
        imglist = []
        for listname in files:
            a = apImage.readPNG(listname)
            imglist.append(a)
        apImagicFile.writeImagic(imglist,
                                 "rotkerdenstack" + self.timestamp + ".hed")
        clusterstackq = appiondata.ApClusteringStackData()
        clusterstackq[
            'avg_imagicfile'] = "rotkerdenstack" + self.timestamp + ".hed"
        clusterstackq['num_classes'] = numclass
        clusterstackq['clusterrun'] = clusterrunq
        clusterstackq['path'] = pathdata
        clusterstackq['hidden'] = False
        imagicfile = os.path.join(self.params['rundir'],
                                  clusterstackq['avg_imagicfile'])
        if not os.path.isfile(imagicfile):
            apDisplay.printError("could not find average stack file: " +
                                 imagicfile)

        ### looping over clusters
        apDisplay.printColor(
            "Inserting particle classification data, please wait", "cyan")
        numclass = self.params['xdim'] * self.params['ydim']
        for i in range(numclass):
            classnum = i + 1
            classroot = "%s.%d" % (self.timestamp, classnum - 1)
            classdocfile = os.path.join(self.params['rundir'], classroot)
            partlist = self.readClassDocFile(classdocfile)
            ### Clustering Particle object
            # MRC image for each code node but plot or image
            clusterrefq = appiondata.ApClusteringReferenceData()
            clusterrefq['refnum'] = classnum
            clusterrefq['avg_mrcfile'] = classroot + ".mrc"
            clusterrefq['clusterrun'] = clusterrunq
            clusterrefq['path'] = pathdata
            clusterrefq['num_particles'] = len(partlist)

            ### looping over particles
            #which particles belong to which code node
            sys.stderr.write(".")
            for partnum in partlist:
                alignpartdata = self.getAlignParticleData(
                    partnum, alignstackdata)

                ### Clustering Particle objects
                clusterpartq = appiondata.ApClusteringParticleData()
                clusterpartq['clusterstack'] = clusterstackq
                clusterpartq['alignparticle'] = alignpartdata
                clusterpartq['partnum'] = partnum
                clusterpartq['refnum'] = classnum
                clusterpartq['clusterreference'] = clusterrefq

                ### finally we can insert parameters
                if self.params['commit'] is True:
                    clusterpartq.insert()
        sys.stderr.write("\n")
        apDisplay.printMsg("Insertion complete in %s" %
                           (apDisplay.timeString(time.time() - inserttime)))
        def getClusterParticles(self):
                """
                get selected particles from cluster stack
                """
                ### list of classes to be excluded
                excludelist = []
                if self.params['excludelist'] is not None:
                        excludestrlist = self.params['excludelist'].split(",")
                        for excludeitem in excludestrlist:
                                excludelist.append(int(excludeitem.strip()))
                apDisplay.printMsg("Exclude list: "+str(excludelist))

                ### list of classes to be included
                includelist = []
                if self.params['includelist'] is not None:
                        includestrlist = self.params['includelist'].split(",")
                        for includeitem in includestrlist:
                                includelist.append(int(includeitem.strip()))
                apDisplay.printMsg("Include list: "+str(includelist))

                apDisplay.printMsg("Querying for clustered particles")
                clusterpartq = appiondata.ApClusteringParticleData()
                clusterpartq['clusterstack'] = self.clusterstackdata
                particles = clusterpartq.query()
                apDisplay.printMsg("Sorting "+str(len(particles))+" clustered particles")

                ### write included particles to text file
                includeParticle = []
                excludeParticle = 0
                #f = open("test.log", "w")
                count = 0
                for part in particles:
                        count += 1
                        if count % 250 == 0:
                                sys.stderr.write(".")
                        alignpart = part['alignparticle']
                        classnum = int(part['refnum'])-1
                        emanstackpartnum = alignpart['stackpart']['particleNumber']-1

                        if includelist and classnum not in includelist:
                                excludeParticle += 1
                                #f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                        elif excludelist and classnum in excludelist:
                                excludeParticle += 1
                                #f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
                        else:
                                includeParticle.append(emanstackpartnum)
                                #f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))

                #f.close()
                sys.stderr.write("\n")
                includeParticle.sort()
                apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

                ### write kept particles to file
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
                kf = open(self.params['keepfile'], "w")
                for partnum in includeParticle:
                        kf.write(str(partnum)+"\n")
                kf.close()

                ### get number of particles
                numparticles = len(includeParticle)

                ### create the new sub stack
                #stackdata = apStack.getOnlyStackData(self.params['stackid'])
                #oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                oldstack = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['imagicfile'])
                newstack = os.path.join(self.params['rundir'], "rawparticles.hed")
                apFile.removeStack(newstack)
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'])

                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")

                return newstack, numparticles
	def insertClusterStack(self, numclusters):
		pathdata = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))

		### clusterStack object
		clusterstackq = appiondata.ApClusteringStackData()
		clusterstackq['num_classes'] = numclusters
		clusterstackq['avg_imagicfile'] = self.params['classumfile']
		clusterstackq['clusterrun'] = self.clusterrun
		clusterstackq['ignore_images'] = self.params['ignore_images']
		clusterstackq['ignore_members'] = self.params['ignore_members']
		clusterstackq['num_factors'] = self.params['num_eigenimages']
		clusterstackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
		clusterstackq['hidden'] = False

		### first insertion into database, if commit is checked
		apDisplay.printMsg("inserting clustering stack into database")
		if self.params['commit'] is True:
			clusterstackq.insert()
		else:
			apDisplay.printWarning("not committing results to DB")

		### inserting particles into database
		if self.params['commit'] is True:
			apDisplay.printColor("Inserting particle classification data, please wait", "cyan")

		### read .cls file that contains information regarding particle classification
		clsfile = os.path.join(self.params['rundir'], self.params['classfile'])
		cls_particle_data, classqualities, particles_in_class = self.readClassesFile(clsfile, numclusters)

		for i in range(numclusters):
			### insert the particles
			cls_num = i + 1
			cls_quality = classqualities[i]
			num_particles = particles_in_class[i]
			particles = cls_particle_data[i]
			
#			### calculate SSNR resolution for class
#			partlist = ""
#			stackfile = self.params['classumfile']
#			self.calcResolution(partlist, stackfile, self.params['apix'])
			
			### Clustering Particle object
			clusterrefq = appiondata.ApClusteringReferenceData()
			clusterrefq['refnum'] = cls_num
			clusterrefq['clusterrun'] = self.clusterrun
			clusterrefq['path'] = pathdata
			clusterrefq['num_particles'] = num_particles			
			
			for partnum in particles:
				alignpartdata = self.getAlignParticleData(partnum)
				cpartq = appiondata.ApClusteringParticleData()
				cpartq['clusterstack'] = clusterstackq
				cpartq['alignparticle'] = alignpartdata
				cpartq['refnum'] = cls_num
				cpartq['partnum'] = partnum
				cpartq['clusterreference'] = clusterrefq
				cpartq['imagic_cls_quality'] = cls_quality
				if self.params['commit'] is True:
					cpartq.insert()
				lastnum = partnum
		return
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		newstack = os.path.join(self.params['rundir'], stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		includelist = []
		excludelist = []
		### list of classes to be excluded
		if self.params['dropclasslist'] is not None:
			excludestrlist = self.params['dropclasslist'].split(",")
			for excludeitem in excludestrlist:
				excludelist.append(int(excludeitem.strip()))
		apDisplay.printMsg("Exclude list: "+str(excludelist))

		### list of classes to be included
		if self.params['keepclasslist'] is not None:
			includestrlist = self.params['keepclasslist'].split(",")
			for includeitem in includestrlist:
				includelist.append(int(includeitem.strip()))

		### or read from keepfile
		elif self.params['keepfile'] is not None:
			keeplistfile = open(self.params['keepfile'])
			for line in keeplistfile:
				if self.params['excludefrom'] is True:
					excludelist.append(int(line.strip()))
				else:
					includelist.append(int(line.strip()))
			keeplistfile.close()
		apDisplay.printMsg("Include list: "+str(includelist))

		### get particles from align or cluster stack
		apDisplay.printMsg("Querying database for particles")
		q0 = time.time()

		if self.params['alignid'] is not None:
			# DIRECT SQL STUFF
			sqlcmd = "SELECT " + \
				"apd.partnum, " + \
				"apd.xshift, apd.yshift, " + \
				"apd.rotation, apd.mirror, " + \
				"apd.spread, apd.correlation, " + \
				"apd.score, apd.bad, " + \
				"spd.particleNumber, " + \
				"ard.refnum "+ \
				"FROM ApAlignParticleData apd " + \
				"LEFT JOIN ApStackParticleData spd ON " + \
				"(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \
				"LEFT JOIN ApAlignReferenceData ard ON" + \
				"(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \
				"WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid'])
			# These are AlignParticles
			particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)

		elif self.params['clusterid'] is not None:
			clusterpartq = appiondata.ApClusteringParticleData()
			clusterpartq['clusterstack'] = self.clusterstackdata
			# These are ClusteringParticles
			particles = clusterpartq.query()
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0)))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		badscore = 0
		badshift = 0
		badspread = 0

		f = open("test.log", "w")
		count = 0
		t0 = time.time()
		apDisplay.printMsg("Parsing particle information")

		# find out if there is alignparticle info:
		is_cluster_p = False
		# alignparticle is a key of any particle in particles if the latter is
		# a CluateringParticle
		if 'alignparticle' in particles[0]:
			is_cluster_p = True

		for part in particles:
			count += 1
			if is_cluster_p:
				# alignpart is an item of ClusteringParticle
				alignpart = part['alignparticle']
				try:
					classnum = int(part['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
				emanstackpartnum = alignpart['stackpart']['particleNumber']-1
			else:
				# particle has info from AlignedParticle as results of direct query
				alignpart = part
				try:
					classnum = int(alignpart['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
					classnum = None
				emanstackpartnum = int(alignpart['particleNumber'])-1

			### check shift
			if self.params['maxshift'] is not None:
				shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
				if shift > self.params['maxshift']:
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badshift += 1
					continue

			if self.params['minscore'] is not None:
				### check score
				if ( alignpart['score'] is not None
				 and alignpart['score'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badscore += 1
					continue

				### check spread
				if ( alignpart['spread'] is not None
				 and alignpart['spread'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badspread += 1
					continue

			if classnum is not None:
				if includelist and (classnum in includelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				elif excludelist and not (classnum in excludelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				else:
					excludeParticle += 1
					f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
			else:
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
			
		f.close()

		includeParticle.sort()
		if badshift > 0:
			apDisplay.printMsg("%d paricles had a large shift"%(badshift))
		if badscore > 0:
			apDisplay.printMsg("%d paricles had a low score"%(badscore))
		if badspread > 0:
			apDisplay.printMsg("%d paricles had a low spread"%(badspread))
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0)))
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		if excludelist:
			self.params['description'] += ( " ... %d particle substack with %s classes excluded"
				% (numparticles, self.params['dropclasslist']))
		elif includelist:
			self.params['description'] += ( " ... %d particle substack with %s classes included"
				% (numparticles, self.params['keepclasslist']))

		outavg = os.path.join(self.params['rundir'],"average.mrc")

		### create the new sub stack
		# first check if virtual stack
		if not os.path.isfile(oldstack):
			vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			vparts = vstackdata['particles']
			oldstack = vstackdata['filename']
			# get subset of virtualstack
			vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle]
	
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad'])

			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist)
		else:
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])
			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle)

		if self.params['writefile'] is True and not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		if self.params['commit'] is True:
			apStack.commitSubStack(self.params,included=includeParticle)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)