Example #1
0
	def checkConflicts(self):
		if self.params['stackid'] is None:
			apDisplay.printError("stack id was not defined")
		if self.params['start'] is None:
			apDisplay.printError("a number of starting classes was not provided")
		if self.params['end'] is None:
			apDisplay.printError("a number of ending classes was not provided")
		if self.params['runname'] is None:
			apDisplay.printError("run name was not defined")
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		stackfile = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		# check for virtual stack
		self.params['virtualdata'] = None
		if not os.path.isfile(stackfile):
			vstackdata = apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			npart = len(vstackdata['particles'])
			self.params['virtualdata'] = vstackdata
		else:
			npart = apFile.numImagesInStack(stackfile)

		if self.params['numpart'] is None:
			self.params['numpart'] = npart
		elif self.params['numpart'] > npart:
			apDisplay.printError("trying to use more particles "+str(self.params['numpart'])
				+" than available "+str(apFile.numImagesInStack(stackfile)))

		self.boxsize = apStack.getStackBoxsize(self.params['stackid'])
		self.workingboxsize = math.floor(self.boxsize/self.params['bin'])
		if not self.params['mask']:
			self.params['mask'] = (self.boxsize/2)-2
		self.workingmask = math.floor(self.params['mask']/self.params['bin'])
		if self.params['mramethod'] == 'imagic':
			self.imagicroot = apIMAGIC.checkImagicExecutablePath()
			self.imagicversion = apIMAGIC.getImagicVersion(self.imagicroot)
    def checkConflicts(self):
        if self.params['stackid'] is None:
            apDisplay.printError("stack id was not defined")
        if self.params['numrefs'] is None:
            apDisplay.printError("a number of classes was not provided")
        maxparticles = 500000
        if self.params['numpart'] > maxparticles:
            apDisplay.printError("too many particles requested, max: " +
                                 str(maxparticles) + " requested: " +
                                 str(self.params['numpart']))
        self.stackdata = apStack.getOnlyStackData(self.params['stackid'],
                                                  msg=False)
        stackfile = os.path.join(self.stackdata['path']['path'],
                                 self.stackdata['name'])
        # check for virtual stack
        self.params['virtualdata'] = None
        if not os.path.isfile(stackfile):
            vstackdata = apStack.getVirtualStackParticlesFromId(
                self.params['stackid'])
            npart = len(vstackdata['particles'])
            self.params['virtualdata'] = vstackdata
        else:
            npart = apFile.numImagesInStack(stackfile)
        if self.params['numpart'] > npart:
            apDisplay.printError("trying to use more particles " +
                                 str(self.params['numpart']) +
                                 " than available " +
                                 str(apFile.numImagesInStack(stackfile)))

        boxsize = apStack.getStackBoxsize(self.params['stackid'])
        self.clipsize = int(math.floor(
            boxsize / float(self.params['bin'] * 2))) * 2
        if self.params['clipsize'] is not None:
            if self.params['clipsize'] > self.clipsize:
                apDisplay.printError("requested clipsize is too big %d > %d" %
                                     (self.params['clipsize'], self.clipsize))
            self.clipsize = self.params['clipsize']
        if self.params['numpart'] is None:
            self.params['numpart'] = apFile.numImagesInStack(stackfile)

        self.mpirun = self.checkMPI()
        if self.mpirun is None:
            apDisplay.printError("There is no MPI installed")

        if self.params['nproc'] is None:
            self.params['nproc'] = self.params['nodes'] * self.params['ppn']
        if self.params['nproc'] < 2:
            apDisplay.printError(
                "Only the MPI version of CL2D is currently supported, must run with > 1 CPU"
            )
    def checkConflicts(self):
        if self.params['stackid'] is None:
            apDisplay.printError("stack id was not defined")
        #if self.params['description'] is None:
        #	apDisplay.printError("run description was not defined")
        if self.params['numrefs'] is None:
            apDisplay.printError("a number of classes was not provided")
        if self.params['runname'] is None:
            apDisplay.printError("run name was not defined")
        if not self.params['fastmode'] in self.fastmodes:
            apDisplay.printError("fast mode must be on of: " +
                                 str(self.fastmodes))
        if not self.params['converge'] in self.convergemodes:
            apDisplay.printError("converge mode must be on of: " +
                                 str(self.convergemodes))
        maxparticles = 500000
        if self.params['numpart'] > maxparticles:
            apDisplay.printError("too many particles requested, max: " +
                                 str(maxparticles) + " requested: " +
                                 str(self.params['numpart']))
        self.stackdata = apStack.getOnlyStackData(self.params['stackid'],
                                                  msg=False)
        stackfile = os.path.join(self.stackdata['path']['path'],
                                 self.stackdata['name'])
        # check for virtual stack
        self.params['virtualdata'] = None
        if not os.path.isfile(stackfile):
            vstackdata = apStack.getVirtualStackParticlesFromId(
                self.params['stackid'])
            npart = len(vstackdata['particles'])
            self.params['virtualdata'] = vstackdata
        else:
            npart = apFile.numImagesInStack(stackfile)

        if self.params['numpart'] > npart:
            apDisplay.printError("trying to use more particles " +
                                 str(self.params['numpart']) +
                                 " than available " +
                                 str(apFile.numImagesInStack(stackfile)))

        boxsize = apStack.getStackBoxsize(self.params['stackid'])
        self.clipsize = int(math.floor(
            boxsize / float(self.params['bin'] * 2))) * 2
        if self.params['clipsize'] is not None:
            if self.params['clipsize'] > self.clipsize:
                apDisplay.printError("requested clipsize is too big %d > %d" %
                                     (self.params['clipsize'], self.clipsize))
            self.clipsize = self.params['clipsize']
        if self.params['numpart'] is None:
            self.params['numpart'] = apFile.numImagesInStack(stackfile)
	def checkConflicts(self):
		if self.params['stackid'] is None:
			apDisplay.printError("stack id was not defined")
		#if self.params['description'] is None:
		#	apDisplay.printError("run description was not defined")
		if self.params['numrefs'] is None:
			apDisplay.printError("a number of classes was not provided")
		if self.params['runname'] is None:
			apDisplay.printError("run name was not defined")
		if not self.params['fastmode'] in self.fastmodes:
			apDisplay.printError("fast mode must be on of: "+str(self.fastmodes))
		if not self.params['converge'] in self.convergemodes:
			apDisplay.printError("converge mode must be on of: "+str(self.convergemodes))
		maxparticles = 500000
		if self.params['numpart'] > maxparticles:
			apDisplay.printError("too many particles requested, max: "
				+ str(maxparticles) + " requested: " + str(self.params['numpart']))
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		stackfile = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		# check for virtual stack
		self.params['virtualdata'] = None
		if not os.path.isfile(stackfile):
			vstackdata = apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			npart = len(vstackdata['particles'])
			self.params['virtualdata'] = vstackdata
		else:
			npart = apFile.numImagesInStack(stackfile)

		if self.params['numpart'] > npart:
			apDisplay.printError("trying to use more particles "+str(self.params['numpart'])
				+" than available "+str(apFile.numImagesInStack(stackfile)))

		boxsize = apStack.getStackBoxsize(self.params['stackid'])
		self.clipsize = int(math.floor(boxsize/float(self.params['bin']*2)))*2
		if self.params['clipsize'] is not None:
			if self.params['clipsize'] > self.clipsize:
				apDisplay.printError("requested clipsize is too big %d > %d"
					%(self.params['clipsize'],self.clipsize))
			self.clipsize = self.params['clipsize']
		if self.params['numpart'] is None:
			self.params['numpart'] = apFile.numImagesInStack(stackfile)
	def checkConflicts(self):
		if self.params['stackid'] is None:
			apDisplay.printError("stack id was not defined")
		if self.params['numrefs'] is None:
			apDisplay.printError("a number of classes was not provided")
		maxparticles = 500000
		if self.params['numpart'] > maxparticles:
			apDisplay.printError("too many particles requested, max: "
				+ str(maxparticles) + " requested: " + str(self.params['numpart']))
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		stackfile = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		# check for virtual stack
		self.params['virtualdata'] = None
		if not os.path.isfile(stackfile):
			vstackdata = apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			npart = len(vstackdata['particles'])
			self.params['virtualdata'] = vstackdata
		else:
			npart = apFile.numImagesInStack(stackfile)
		if self.params['numpart'] > npart:
			apDisplay.printError("trying to use more particles "+str(self.params['numpart'])
				+" than available "+str(apFile.numImagesInStack(stackfile)))

		boxsize = apStack.getStackBoxsize(self.params['stackid'])
		self.clipsize = int(math.floor(boxsize/float(self.params['bin']*2)))*2
		if self.params['clipsize'] is not None:
			if self.params['clipsize'] > self.clipsize:
				apDisplay.printError("requested clipsize is too big %d > %d"
					%(self.params['clipsize'],self.clipsize))
			self.clipsize = self.params['clipsize']
		if self.params['numpart'] is None:
			self.params['numpart'] = apFile.numImagesInStack(stackfile)

		self.mpirun = self.checkMPI()
		if self.mpirun is None:
			apDisplay.printError("There is no MPI installed")

		if self.params['nproc'] is None:
			self.params['nproc'] = self.params['nodes']*self.params['ppn']
		if self.params['nproc'] < 2:
			apDisplay.printError("Only the MPI version of CL2D is currently supported, must run with > 1 CPU")
if __name__ == "__main__":
    params = parseOptions()
    apProject.setDBfromProjectId(params["projectid"])

    stackdata = apStack.getOnlyStackData(params["stackid"])
    stackpath = stackdata["path"]["path"]
    # generate stack if it doesn't exist.
    if not os.path.isdir(stackpath):
        os.makedirs(stackpath)
    fname = os.path.join(stackpath, stackdata["name"])

    # check if stack file already exists
    if os.path.isfile(fname):
        apDisplay.printError("file: '%s' already exists" % fname)

    vstackdata = apStack.getVirtualStackParticlesFromId(params["stackid"])
    plist = [int(p["particleNumber"]) - 1 for p in vstackdata["particles"]]

    a = proc2dLib.RunProc2d()
    a.setValue("infile", vstackdata["filename"])
    a.setValue("outfile", fname)
    a.setValue("list", plist)
    a.setValue("apix", apStack.getStackPixelSizeFromStackId(params["stackid"]))

    apDisplay.printMsg("generating stack: '%s' with %i particles" % (fname, len(plist)))
    a.run()

    outavg = os.path.join(stackpath, "average.mrc")
    if not os.path.isfile(outavg):
        apStack.averageStack(stack=fname, outfile=outavg)
if __name__ == "__main__":
    params = parseOptions()
    apProject.setDBfromProjectId(params['projectid'])

    stackdata = apStack.getOnlyStackData(params['stackid'])
    stackpath = stackdata['path']['path']
    # generate stack if it doesn't exist.
    if not os.path.isdir(stackpath):
        os.makedirs(stackpath)
    fname = os.path.join(stackpath, stackdata['name'])

    # check if stack file already exists
    if os.path.isfile(fname):
        apDisplay.printError("file: '%s' already exists" % fname)

    vstackdata = apStack.getVirtualStackParticlesFromId(params['stackid'])
    plist = [int(p['particleNumber']) - 1 for p in vstackdata['particles']]

    a = proc2dLib.RunProc2d()
    a.setValue('infile', vstackdata['filename'])
    a.setValue('outfile', fname)
    a.setValue('list', plist)
    a.setValue('apix', apStack.getStackPixelSizeFromStackId(params['stackid']))

    apDisplay.printMsg("generating stack: '%s' with %i particles" %
                       (fname, len(plist)))
    a.run()

    outavg = os.path.join(stackpath, "average.mrc")
    if not os.path.isfile(outavg):
        apStack.averageStack(stack=fname, outfile=outavg)
Example #8
0
def makeStackMeanPlot(stackid, gridpoints=16):
	if gridpoints > 30:
		apDisplay.printError("Too large of a grid")
	apDisplay.printMsg("creating Stack Mean Plot montage for stackid: "+str(stackid))
	t0 = time.time()
	### big stacks are too slow
	boxsize = apStack.getStackBoxsize(stackid)
	bin = 1
	if boxsize is not None:
		while boxsize/bin > 128:
			bin+=1
	apDisplay.printMsg("binning stack by "+str(bin))
	stackdata = apStack.getOnlyStackData(stackid, msg=False)
	stackpath = stackdata['path']['path']
	stackfile = os.path.join(stackpath, stackdata['name'])
	# if no stackfile, likely virtual stack
	if not os.path.isfile(stackfile):
		apDisplay.printMsg("possible virtual stack, searching for original stack")
		vstackdata = apStack.getVirtualStackParticlesFromId(stackid)
		partdatas = vstackdata['particles']
		stackfile = vstackdata['filename']
		stackdata = apStack.getOnlyStackData(vstackdata['stackid'], msg=False)
	# otherwise get stack info
	else:
		# get stats from stack:
		sqlcmd = "SELECT " + \
			"particleNumber, mean, stdev " + \
			"FROM ApStackParticleData " + \
			"WHERE `REF|ApStackData|stack` = %i"%(stackid)
		partdatas = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)
	#check only first 100 particles for now
	#partdatas = partdatas[:500]
	apFile.removeFile("montage"+str(stackid)+".png")

	### find limits
	limits = {'minmean': 1e12, 'maxmean': -1e12, 'minstdev': 1e12, 'maxstdev': -1e12,}
	for partdata in partdatas:
		if partdata['mean'] is None:
			continue
		mean = partdata['mean']
		stdev = partdata['stdev']
		if mean < limits['minmean']:
			limits['minmean'] = mean
		if mean > limits['maxmean']:
			limits['maxmean'] = mean
		if stdev < limits['minstdev']:
			limits['minstdev'] = stdev
		if stdev > limits['maxstdev']:
			limits['maxstdev'] = stdev
	if limits['minmean'] > 1e11:
		apDisplay.printWarning("particles have no mean values in database")
		return
	apDisplay.printMsg(str(limits))

	### create particle bins
	partlists = {}
	for i in range(gridpoints):
		for j in range(gridpoints):
			key = ("%02dx%02d"%(i,j))
			partlists[key] = []

	### sort particles into bins
	for partdata in partdatas:
		key = meanStdevToKey(partdata['mean'], partdata['stdev'], limits, gridpoints)
		partnum = int(partdata['particleNumber'])
		partlists[key].append(partnum)

	printPlot(partlists, gridpoints)

	### createStackAverages
	keys = partlists.keys()
	keys.sort()
	count = 0
	backs = "\b\b\b\b\b\b\b\b\b\b\b"
	montagestack = os.path.join(stackpath,"montage"+str(stackid)+".hed")
	apFile.removeStack(montagestack)
	mystack = []
	for key in keys:
		count += 1
		sys.stderr.write(backs+backs+backs+backs)
		sys.stderr.write("% 3d of % 3d, %s: % 6d"%(count, len(keys), key, len(partlists[key])))
		avgimg = averageSubStack(partlists[key], stackfile, bin)
		if avgimg is not False:
			avgimg = numpy.fliplr(avgimg)
			mystack.append(avgimg)
	apImagicFile.writeImagic(mystack, montagestack)
	sys.stderr.write("\n")
	assemblePngs(keys, str(stackid), montagestack)
	apDisplay.printMsg("/bin/mv -v montage"+str(stackid)+".??? "+stackpath)
	apDisplay.printMsg("finished in "+apDisplay.timeString(time.time()-t0))
    def start(self):
        ### new stack path
        stackdata = apStack.getOnlyStackData(self.params["stackid"])
        oldstack = os.path.join(stackdata["path"]["path"], stackdata["name"])
        newstack = os.path.join(self.params["rundir"], stackdata["name"])
        apStack.checkForPreviousStack(newstack)

        includelist = []
        excludelist = []
        ### list of classes to be excluded
        if self.params["dropclasslist"] is not None:
            excludestrlist = self.params["dropclasslist"].split(",")
            for excludeitem in excludestrlist:
                excludelist.append(int(excludeitem.strip()))
        apDisplay.printMsg("Exclude list: " + str(excludelist))

        ### list of classes to be included
        if self.params["keepclasslist"] is not None:
            includestrlist = self.params["keepclasslist"].split(",")
            for includeitem in includestrlist:
                includelist.append(int(includeitem.strip()))

                ### or read from keepfile
        elif self.params["keepfile"] is not None:
            keeplistfile = open(self.params["keepfile"])
            for line in keeplistfile:
                if self.params["excludefrom"] is True:
                    excludelist.append(int(line.strip()))
                else:
                    includelist.append(int(line.strip()))
            keeplistfile.close()
        apDisplay.printMsg("Include list: " + str(includelist))

        ### get particles from align or cluster stack
        apDisplay.printMsg("Querying database for particles")
        q0 = time.time()

        if self.params["alignid"] is not None:
            # DIRECT SQL STUFF
            sqlcmd = (
                "SELECT "
                + "apd.partnum, "
                + "apd.xshift, apd.yshift, "
                + "apd.rotation, apd.mirror, "
                + "apd.spread, apd.correlation, "
                + "apd.score, apd.bad, "
                + "spd.particleNumber, "
                + "ard.refnum "
                + "FROM ApAlignParticleData apd "
                + "LEFT JOIN ApStackParticleData spd ON "
                + "(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) "
                + "LEFT JOIN ApAlignReferenceData ard ON"
                + "(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) "
                + "WHERE `REF|ApAlignStackData|alignstack` = %i" % (self.params["alignid"])
            )
            # These are AlignParticles
            particles = sinedon.directq.complexMysqlQuery("appiondata", sqlcmd)

        elif self.params["clusterid"] is not None:
            clusterpartq = appiondata.ApClusteringParticleData()
            clusterpartq["clusterstack"] = self.clusterstackdata
            # These are ClusteringParticles
            particles = clusterpartq.query()
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - q0)))

        ### write included particles to text file
        includeParticle = []
        excludeParticle = 0
        badscore = 0
        badshift = 0
        badspread = 0

        f = open("test.log", "w")
        count = 0
        t0 = time.time()
        apDisplay.printMsg("Parsing particle information")

        # find out if there is alignparticle info:
        is_cluster_p = False
        # alignparticle is a key of any particle in particles if the latter is
        # a CluateringParticle
        if "alignparticle" in particles[0]:
            is_cluster_p = True

        for part in particles:
            count += 1
            if is_cluster_p:
                # alignpart is an item of ClusteringParticle
                alignpart = part["alignparticle"]
                try:
                    classnum = int(part["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                emanstackpartnum = alignpart["stackpart"]["particleNumber"] - 1
            else:
                # particle has info from AlignedParticle as results of direct query
                alignpart = part
                try:
                    classnum = int(alignpart["refnum"]) - 1
                except:
                    apDisplay.printWarning("particle %d was not put into any class" % (part["partnum"]))
                    classnum = None
                emanstackpartnum = int(alignpart["particleNumber"]) - 1

                ### check shift
            if self.params["maxshift"] is not None:
                shift = math.hypot(alignpart["xshift"], alignpart["yshift"])
                if shift > self.params["maxshift"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badshift += 1
                    continue

            if self.params["minscore"] is not None:
                ### check score
                if alignpart["score"] is not None and alignpart["score"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badscore += 1
                    continue

                    ### check spread
                if alignpart["spread"] is not None and alignpart["spread"] < self.params["minscore"]:
                    excludeParticle += 1
                    if classnum is not None:
                        f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
                    else:
                        f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))
                    badspread += 1
                    continue

            if classnum is not None:
                if includelist and (classnum in includelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                elif excludelist and not (classnum in excludelist):
                    includeParticle.append(emanstackpartnum)
                    f.write("%d\t%d\t%d\tinclude\n" % (count, emanstackpartnum, classnum))
                else:
                    excludeParticle += 1
                    f.write("%d\t%d\t%d\texclude\n" % (count, emanstackpartnum, classnum))
            else:
                excludeParticle += 1
                f.write("%d\t%d\texclude\n" % (count, emanstackpartnum))

        f.close()

        includeParticle.sort()
        if badshift > 0:
            apDisplay.printMsg("%d paricles had a large shift" % (badshift))
        if badscore > 0:
            apDisplay.printMsg("%d paricles had a low score" % (badscore))
        if badspread > 0:
            apDisplay.printMsg("%d paricles had a low spread" % (badspread))
        apDisplay.printMsg("Completed in %s\n" % (apDisplay.timeString(time.time() - t0)))
        apDisplay.printMsg(
            "Keeping " + str(len(includeParticle)) + " and excluding " + str(excludeParticle) + " particles"
        )

        ### write kept particles to file
        self.params["keepfile"] = os.path.join(self.params["rundir"], "keepfile-" + self.timestamp + ".list")
        apDisplay.printMsg("writing to keepfile " + self.params["keepfile"])
        kf = open(self.params["keepfile"], "w")
        for partnum in includeParticle:
            kf.write(str(partnum) + "\n")
        kf.close()

        ### get number of particles
        numparticles = len(includeParticle)
        if excludelist:
            self.params["description"] += " ... %d particle substack with %s classes excluded" % (
                numparticles,
                self.params["dropclasslist"],
            )
        elif includelist:
            self.params["description"] += " ... %d particle substack with %s classes included" % (
                numparticles,
                self.params["keepclasslist"],
            )

        outavg = os.path.join(self.params["rundir"], "average.mrc")

        ### create the new sub stack
        # first check if virtual stack
        if not os.path.isfile(oldstack):
            vstackdata = apStack.getVirtualStackParticlesFromId(self.params["stackid"])
            vparts = vstackdata["particles"]
            oldstack = vstackdata["filename"]
            # get subset of virtualstack
            vpartlist = [int(vparts[p]["particleNumber"]) - 1 for p in includeParticle]

            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params["savebad"])

            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=vpartlist)
        else:
            if self.params["writefile"] is True:
                apStack.makeNewStack(oldstack, newstack, self.params["keepfile"], bad=self.params["savebad"])
            apStack.averageStack(stack=oldstack, outfile=outavg, partlist=includeParticle)

        if self.params["writefile"] is True and not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")

        if self.params["commit"] is True:
            apStack.commitSubStack(self.params, included=includeParticle)
            newstackid = apStack.getStackIdFromPath(newstack)
            apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		newstack = os.path.join(self.params['rundir'], stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		includelist = []
		excludelist = []
		### list of classes to be excluded
		if self.params['dropclasslist'] is not None:
			excludestrlist = self.params['dropclasslist'].split(",")
			for excludeitem in excludestrlist:
				excludelist.append(int(excludeitem.strip()))
		apDisplay.printMsg("Exclude list: "+str(excludelist))

		### list of classes to be included
		if self.params['keepclasslist'] is not None:
			includestrlist = self.params['keepclasslist'].split(",")
			for includeitem in includestrlist:
				includelist.append(int(includeitem.strip()))

		### or read from keepfile
		elif self.params['keepfile'] is not None:
			keeplistfile = open(self.params['keepfile'])
			for line in keeplistfile:
				if self.params['excludefrom'] is True:
					excludelist.append(int(line.strip()))
				else:
					includelist.append(int(line.strip()))
			keeplistfile.close()
		apDisplay.printMsg("Include list: "+str(includelist))

		### get particles from align or cluster stack
		apDisplay.printMsg("Querying database for particles")
		q0 = time.time()

		if self.params['alignid'] is not None:
			# DIRECT SQL STUFF
			sqlcmd = "SELECT " + \
				"apd.partnum, " + \
				"apd.xshift, apd.yshift, " + \
				"apd.rotation, apd.mirror, " + \
				"apd.spread, apd.correlation, " + \
				"apd.score, apd.bad, " + \
				"spd.particleNumber, " + \
				"ard.refnum "+ \
				"FROM ApAlignParticleData apd " + \
				"LEFT JOIN ApStackParticleData spd ON " + \
				"(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \
				"LEFT JOIN ApAlignReferenceData ard ON" + \
				"(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \
				"WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid'])
			# These are AlignParticles
			particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)

		elif self.params['clusterid'] is not None:
			clusterpartq = appiondata.ApClusteringParticleData()
			clusterpartq['clusterstack'] = self.clusterstackdata
			# These are ClusteringParticles
			particles = clusterpartq.query()
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0)))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		badscore = 0
		badshift = 0
		badspread = 0

		f = open("test.log", "w")
		count = 0
		t0 = time.time()
		apDisplay.printMsg("Parsing particle information")

		# find out if there is alignparticle info:
		is_cluster_p = False
		# alignparticle is a key of any particle in particles if the latter is
		# a CluateringParticle
		if 'alignparticle' in particles[0]:
			is_cluster_p = True

		for part in particles:
			count += 1
			if is_cluster_p:
				# alignpart is an item of ClusteringParticle
				alignpart = part['alignparticle']
				try:
					classnum = int(part['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
				emanstackpartnum = alignpart['stackpart']['particleNumber']-1
			else:
				# particle has info from AlignedParticle as results of direct query
				alignpart = part
				try:
					classnum = int(alignpart['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
					classnum = None
				emanstackpartnum = int(alignpart['particleNumber'])-1

			### check shift
			if self.params['maxshift'] is not None:
				shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
				if shift > self.params['maxshift']:
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badshift += 1
					continue

			if self.params['minscore'] is not None:
				### check score
				if ( alignpart['score'] is not None
				 and alignpart['score'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badscore += 1
					continue

				### check spread
				if ( alignpart['spread'] is not None
				 and alignpart['spread'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badspread += 1
					continue

			if classnum is not None:
				if includelist and (classnum in includelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				elif excludelist and not (classnum in excludelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				else:
					excludeParticle += 1
					f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
			else:
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
			
		f.close()

		includeParticle.sort()
		if badshift > 0:
			apDisplay.printMsg("%d paricles had a large shift"%(badshift))
		if badscore > 0:
			apDisplay.printMsg("%d paricles had a low score"%(badscore))
		if badspread > 0:
			apDisplay.printMsg("%d paricles had a low spread"%(badspread))
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0)))
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		if excludelist:
			self.params['description'] += ( " ... %d particle substack with %s classes excluded"
				% (numparticles, self.params['dropclasslist']))
		elif includelist:
			self.params['description'] += ( " ... %d particle substack with %s classes included"
				% (numparticles, self.params['keepclasslist']))

		outavg = os.path.join(self.params['rundir'],"average.mrc")

		### create the new sub stack
		# first check if virtual stack
		if not os.path.isfile(oldstack):
			vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			vparts = vstackdata['particles']
			oldstack = vstackdata['filename']
			# get subset of virtualstack
			vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle]
	
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad'])

			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist)
		else:
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])
			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle)

		if self.params['writefile'] is True and not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		if self.params['commit'] is True:
			apStack.commitSubStack(self.params,included=includeParticle)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)