Ejemplo n.º 1
0
	def runCommand(self, cmd):
		t0 = time.time()
		if self.params['showcmd'] is True:
			apDisplay.printColor("###################################", "magenta")
			sys.stderr.write(
				apDisplay.colorString("COMMAND: \n","magenta")
				+apDisplay.colorString(cmd, "cyan")+"\n")
			apDisplay.printColor("###################################", "cyan")
		try:
			if self.params['verbose'] is False:
				logf = open('testsuite-programs.log' ,'a')
				proc = subprocess.Popen(cmd, shell=True, 
					stdout=logf, stderr=logf)
			else:
				proc = subprocess.Popen(cmd, shell=True)
			proc.wait()
		except:
			apDisplay.printError("could not run command: "+cmd)
		runtime = time.time() - t0
		if self.params['showcmd'] is True:
			apDisplay.printColor("###################################", "cyan")
			apDisplay.printColor("command ran in "+apDisplay.timeString(runtime), "cyan")
		if runtime < 1:
			apDisplay.printError("command runtime was too short: "
				+apDisplay.timeString(runtime))
		elif runtime < 10:
			apDisplay.printWarning("command runtime was very short: "
				+apDisplay.timeString(runtime))
			return False

		if self.params['verbose'] is False:
			logf.close()

		return True
	def start(self):
		self.runtime = 0
		self.partlist = []
		self.stack = {}
		self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
		self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])

		self.checkNoRefRun()

		### convert stack to spider
		spiderstack = self.createSpiderFile()

		### create initialization template
		if self.params['initmethod'] == 'allaverage':
			templatefile = self.averageTemplate()
		elif self.params['initmethod'] == 'selectrand':
			templatefile = self.selectRandomParticles()
		elif self.params['initmethod'] == 'randpart':
			templatefile = self.pickRandomParticle()
		elif self.params['initmethod'] == 'template':
			templatefile = self.getTemplate()
		else:
			apDisplay.printError("unknown initialization method defined: "
				+str(self.params['initmethod'])+" not in "+str(self.initmethods))

		apDisplay.printColor("Running spider this can take awhile","cyan")

		### run the alignment
		aligntime = time.time()
		pixrad = int(round(self.params['partrad']/self.stack['apix']/self.params['bin']))
		alignedstack, self.partlist = alignment.refFreeAlignParticles(
			spiderstack, templatefile,
			self.params['numpart'], pixrad,
			self.params['firstring'], self.params['lastring'],
			rundir = ".")
		aligntime = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

		### remove large, worthless stack
		spiderstack = os.path.join(self.params['rundir'], "start.spi")
		apDisplay.printMsg("Removing un-aligned stack: "+spiderstack)
		apFile.removeFile(spiderstack, warn=False)

		### convert stack to imagic
		imagicstack = self.convertSpiderStack(alignedstack)
		apFile.removeFile(alignedstack)

		inserttime = time.time()
		if self.params['commit'] is True:
			self.runtime = aligntime
			self.insertNoRefRun(imagicstack, insert=True)
		else:
			apDisplay.printWarning("not committing results to DB")
		inserttime = time.time() - inserttime

		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))
		apDisplay.printMsg("Database Insertion time: "+apDisplay.timeString(inserttime))
	def runCommand(self, cmd):
		t0 = time.time()
		if self.params['showcmd'] is True:
			apDisplay.printColor("###################################", "magenta")
			sys.stderr.write(
				apDisplay.colorString("COMMAND: \n","magenta")
				+apDisplay.colorString(cmd, "cyan")+"\n")
			apDisplay.printColor("###################################", "cyan")
		try:
			if self.params['verbose'] is False:
				proc = subprocess.Popen(cmd, shell=True, 
					stdout=subprocess.PIPE)#, stderr=subprocess.PIPE)
			else:
				proc = subprocess.Popen(cmd, shell=True)
			proc.wait()
		except:
			apDisplay.printWarning("could not run command: "+cmd)
			sys.exit(1)
			return False
		runtime = time.time() - t0
		if self.params['showcmd'] is True:
			apDisplay.printColor("###################################", "cyan")
			apDisplay.printColor("command ran in "+apDisplay.timeString(runtime), "cyan")
		if runtime < 10:
			apDisplay.printWarning("command runtime was very short: "
				+apDisplay.timeString(runtime))
			sys.exit(1)
			return False

		#self.timestamp = apParam.makeTimestamp()
		return True
	def _printSummary(self):
		"""
		print summary statistics on last image
		"""
		### COP OUT
		if self.params['background'] is True:
			self.stats['count'] += 1
			return

		### THIS NEEDS TO BECOME MUCH MORE GENERAL, e.g. Peaks
		tdiff = time.time()-self.stats['startseries']
		if not self.params['continue'] or tdiff > 0.1:
			count = self.stats['count']
			#if(count != self.stats['lastcount']):
			sys.stderr.write("\n\tSUMMARY: "+self.functionname+"\n")
			self._printLine()
			sys.stderr.write("\tTIME:     \t"+apDisplay.timeString(tdiff)+"\n")
			self.stats['timesum'] = self.stats['timesum'] + tdiff
			self.stats['timesumsq'] = self.stats['timesumsq'] + (tdiff**2)
			timesum = self.stats['timesum']
			timesumsq = self.stats['timesumsq']
			if(count > 1):
				timeavg = float(timesum)/float(count)
				timestdev = math.sqrt(float(count*timesumsq - timesum**2) / float(count*(count-1)))
				timeremain = (float(timeavg)+float(timestdev))*self.stats['seriesleft']
				sys.stderr.write("\tAVG TIME: \t"+apDisplay.timeString(timeavg,timestdev)+"\n")
				#print "\t(- TOTAL:",apDisplay.timeString(timesum)," -)"
				if(self.stats['seriesleft'] > 0):
					sys.stderr.write("\t(- REMAINING TIME: "+apDisplay.timeString(timeremain)+" for "
						+str(self.stats['seriesleft'])+" series -)\n")
			#print "\tMEM: ",(mem.active()-startmem)/1024,"M (",(mem.active()-startmem)/(1024*count),"M)"
			self.stats['count'] += 1
			self._printLine()
def setVolumeMass(volumefile, apix=1.0, mass=1.0, rna=0.0):
        """
        set the contour of 1.0 to the desired mass (in kDa) of the
        macromolecule based on its density
        
        use RNA to set the percentage of RNA in the structure
        """
        if isValidVolume(volumefile) is False:
                apDisplay.printError("Volume file is not valid")

        procbin = apParam.getExecPath("proc2d")
        emandir = os.path.dirname(procbin)
        volumebin = os.path.join(emandir, "volume")
        if not os.path.isfile(volumebin):
                apDisplay.printWarning("failed to find volume program")
                return False
        command = "%s %s %.3f set=%.3f"%(       
                volumebin, volumefile, apix, mass
        )
        t0 = time.time()
        proc = subprocess.Popen(command, shell=True)
        proc.wait()
        if time.time()-t0 < 0.01:
                apDisplay.printWarning("failed to scale by mass in "+apDisplay.timeString(time.time()-t0))
                return False
        apDisplay.printMsg("finished scaling by mass in "+apDisplay.timeString(time.time()-t0))
        return True
	def applyEnvelopToDocFile(self, indocfile):
		apDisplay.printMsg("Applying CTF to particles")

		inf = open(indocfile, 'r')
		outdocfile = os.path.splitext(indocfile)[0]+".envelop.lst"
		outf = open(outdocfile, 'w')
		cmdlist = []
		partnum = 0
		if self.params['pad'] is True:
			scaleFactor =  float(self.params['box']*self.params['padF']) / 4096.0
		else:
			scaleFactor = float(self.params['box']) / 4096.0
		t0 = time.time()
		for line in inf:
			### get filename
			partnum += 1
			filename = line.strip().split()[0]
			newfile = os.path.splitext(filename)[0]+".envelop.mrc"

			self.applyEnvelope(filename, newfile, scaleFactor=scaleFactor, msg=False)

			if not os.path.isfile(newfile):
				apDisplay.printError("Ace 2 failed")
			outf.write(newfile+"\t1\n")
		numpart = partnum
		inf.close()
		outf.close()
		timeper = (time.time()-t0)/float(numpart)
		apDisplay.printColor("Total time %s"%(apDisplay.timeString(time.time()-t0)), "green")
		apDisplay.printColor("Time per particle %s"%(apDisplay.timeString(timeper)), "green")


		return outdocfile
	def start(self):
		self.runtime = 0

		self.checkCoranRun()

		### convert stack to spider
		self.alignstackdata = self.getAlignedStack()
		maskpixrad = self.params['maskrad']/self.alignstackdata['pixelsize']/self.params['bin']
		boxpixdiam = int(math.ceil(maskpixrad)+1)*2
		if boxpixdiam*self.params['bin'] > self.alignstackdata['boxsize']:
			boxpixdiam = math.floor(self.alignstackdata['boxsize']/self.params['bin'])
		clippixdiam = boxpixdiam*self.params['bin']
		apDisplay.printMsg("Pixel mask radius="+str(maskpixrad))

		oldalignedstack = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['imagicfile'])
		alignedstackname = re.sub("\.", "_", self.alignstackdata['imagicfile'])+".spi"
		alignedstack = os.path.join(self.params['rundir'], alignedstackname)
		apFile.removeFile(alignedstack)
		emancmd = ("proc2d %s %s spiderswap shrink=%d clip=%d,%d edgenorm"
			%(oldalignedstack,alignedstack,self.params['bin'],clippixdiam,clippixdiam))
		if self.params['numpart'] is not None:
			emancmd += " last=%d"%(self.params['numpart']-1)
			numpart = self.params['numpart']
		else:
			numpart = self.getNumAlignedParticles()
		apEMAN.executeEmanCmd(emancmd, verbose=True)

		### get number of processors
		nproc = apParam.getNumProcessors()

		esttime = classification.estimateTime(numpart, maskpixrad)
		apDisplay.printColor("Running spider this can take awhile, estimated time: "+\
			apDisplay.timeString(esttime),"cyan")

		### do correspondence analysis
		corantime = time.time()
		self.contriblist = classification.correspondenceAnalysis( alignedstack,
			boxsize=boxpixdiam, maskpixrad=maskpixrad,
			numpart=numpart, numfactors=self.params['numfactors'], nproc=nproc)
		corantime = time.time() - corantime

		### make dendrogram
		dendrotime = time.time()
		classification.makeDendrogram(numfactors=min(3,self.params['numfactors']),nproc=nproc)
		dendrotime = time.time() - dendrotime

		inserttime = time.time()
		if self.params['commit'] is True:
			self.runtime = corantime
			self.insertCoranRun(insert=True)
		else:
			apDisplay.printWarning("not committing results to DB")
		inserttime = time.time() - inserttime

		apFile.removeFile(alignedstack, warn=True)

		apDisplay.printMsg("Correspondence Analysis time: "+apDisplay.timeString(corantime))
		apDisplay.printMsg("Make Dendrogram time: "+apDisplay.timeString(dendrotime))
		apDisplay.printMsg("Database Insertion time: "+apDisplay.timeString(inserttime))
	def alignParticlesToClasses(self, partStack, alignedClassStack, alignedPartStack):
		t1 = time.time()
		numPart = sparx.EMUtil.get_image_count(partStack)
		# for some reason this reports more classes than exist		
		numClasses = sparx.EMUtil.get_image_count(alignedClassStack)
		apDisplay.printMsg("aligning %d particles to %d classes"%(numPart, numClasses))		
		combinePartList = []
		self.particleAlignData = {}
		self.particleClass = {}
		totalPart = 0
		for newClassNum in range(numClasses):
			genId, genClassNum = self.newClassToGenClass[newClassNum]
			particleList = self.classMembersDict[genId][genClassNum]
			apDisplay.printMsg("aligning %d particles to class %d of %d (gen %d, num %d)"
				%(len(particleList), newClassNum, numClasses, genId, genClassNum))
			partEMDataList = sparx.EMData.read_images(partStack, particleList, not self.headerOnly)
			classEMData = sparx.get_im(alignedClassStack, newClassNum)
			totalPart += len(particleList)
			print newClassNum, particleList
			for i in range(len(particleList)):
				partEMData = partEMDataList[i]
				partId = particleList[i]
				self.particleClass[partId] = newClassNum
				alignData = sparx.align2d(partEMData, classEMData, 
					self.xrange, self.yrange, self.transStep, self.firstRing, 
					self.lastRing, self.ringStep, self.mode)
				self.particleAlignData[partId] = alignData
				combinePartList.append(partId)
		apDisplay.printColor("Finished aligning %d particles in %s"
			%(totalPart, apDisplay.timeString(time.time() - t1)), "cyan")
		
		t1 = time.time()
		### write out complete alignment parameters for all generations & aligned stack
		f = open("alignParticles.csv", "w")
		count = 0
		sys.stderr.write("writing %d aligned particles to file"%(len(combinePartList)))
		self.origPartToAlignPartDict = {}
		self.alignPartToOrigPartDict = {}
		for partId in combinePartList:
			self.origPartToAlignPartDict[partId] = count
			self.alignPartToOrigPartDict[count] = partId
			if count % 100 == 0:
				sys.stderr.write(".")
			# write alignments to file
			alignData = self.particleAlignData[partId]
			alpha, x, y, mirror, peak = alignData

			f.write("%.3f\t%.3f\t%.3f\t%d\t%d\n" % (alpha, x, y, mirror, peak))
			partEMData = sparx.EMData.read_images(partStack, [partId], not self.headerOnly)[0]
			alignPartEMData = sparx.rot_shift2D(partEMData, alpha, x, y, mirror)
			#we have to use count instead of partId, because not all images were aligned
			alignPartEMData.write_image(alignedPartStack, count)
			count += 1
		f.close()
		sys.stderr.write("\n")
		apDisplay.printColor("Finished creating aligned stack of %d particles in %s"
			%(count, apDisplay.timeString(time.time() - t1)), "cyan")	
		return
	def removePtclsByJumps(self, particles, rejectlst):
		eulerjump = apEulerJump.ApEulerJump()
		numparts = len(particles)
		apDisplay.printMsg("finding euler jumps for "+str(numparts)+" particles")

		### check symmetry
		symmetry = eulerjump.getSymmetry(self.params['reconid'], msg=True)
		if not re.match("^[cd][0-9]+$", symmetry.lower()) and not re.match("^icos", symmetry.lower()):
			apDisplay.printError("Cannot calculate euler jumps for symmetry: "+symmetry)
			return
		self.params['sym']=symmetry.lower()

		### prepare file
		f = open('jumps.txt','w', 0666)
		f.write("#pnum\t")
		headerlist = ('mean', 'median', 'stdev', 'min', 'max')
		for key in headerlist:
			f.write(key+"\t")
		f.write("\n")

		### get stack particles
		stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

		### start loop
		t0 = time.time()
		medians = []
		count = 0
		apDisplay.printMsg("processing euler jumps for recon run="+str(self.params['reconid']))
		for stackpart in stackparts:
			count += 1
			partnum = stackpart['particleNumber']
			f.write('%d\t' % partnum)
			jumpdata = eulerjump.getEulerJumpData(self.params['reconid'], stackpartid=stackpart.dbid, stackid=self.params['stackid'], sym=symmetry)
			medians.append(jumpdata['median'])
			if (jumpdata['median'] > self.params['avgjump']) and partnum not in rejectlst:
				rejectlst.append(partnum)
			for key in headerlist:
				f.write("%.3f\t" % (jumpdata[key]))
			f.write("\n")
			if count % 1000 == 0:
				timeremain = (time.time()-t0)/(count+1)*(numparts-count)
				apDisplay.printMsg("particle=% 5d; median jump=% 3.2f, remain time= %s" % (partnum, jumpdata['median'],
					apDisplay.timeString(timeremain)))
				#f.flush()
		### print stats
		apDisplay.printMsg("-- median euler jumper stats --")
		medians = numpy.asarray(medians, dtype=numpy.float32)
		apDisplay.printMsg("mean/std :: "+str(round(medians.mean(),2))+" +/- "
			+str(round(medians.std(),2)))
		apDisplay.printMsg("min/max  :: "+str(round(medians.min(),2))+" <> "
			+str(round(medians.max(),2)))

		perrej = round(100.0*float(numparts-len(rejectlst))/float(numparts),2)
		apDisplay.printMsg("keeping "+str(numparts-len(rejectlst))+" of "+str(numparts)
			+" particles ("+str(perrej)+"%) so far "
			+" in "+apDisplay.timeString(time.time()-t0))

		return rejectlst
def fillSimilarityMatrix(stackfile):
        ### Get initial correlation values
        ### this is really, really slow

        numpart = apFile.numImagesInStack(stackfile)

        similarfile = "similarities.dat"
        if os.path.isfile(similarfile):
                simf = open(similarfile, 'r')
                simlist = []
                count = 0
                for line in simf:
                        count += 1
                        sline = line.strip()
                        slist = sline.split()
                        ccval = float(slist[2])
                        simlist.append(ccval)
                simf.close()
                apDisplay.printMsg("There are %d lines in the sim file: %s"%(count, similarfile))
                if count == numpart*(numpart-1):
                        ### we have a valid file already
                        return similarfile, simlist

        ### read data and estimate time
        imagicdict = apImagicFile.readImagic(stackfile)
        partarray = imagicdict['images']
        numpart = partarray.shape[0]
        boxsize = partarray.shape[1]
        #timeper = 27.0e-9
        timeper = 17.0e-9
        apDisplay.printMsg("Computing CC values in about %s"
                %(apDisplay.timeString(timeper*numpart**2*boxsize**2)))

        ### Computing CC values
        simf = open(similarfile, 'w')
        cctime = time.time()
        simlist = []
        for i in range(0, numpart):
                if i % 100 == 99:
                        sys.stderr.write(".")
                for j in range(i+1, numpart):
                        ccval = self.getCCValue(partarray[i],partarray[j])
                        str1 = "%05d %05d %.10f\n" % (i+1, j+1, ccval)
                        simf.write(str1)
                        str2 = "%05d %05d %.10f\n" % (j+1, i+1, ccval)
                        simf.write(str2)
                        simlist.append(ccval)
        sys.stderr.write("\n")
        simf.close()
        del partarray
        del imagicdict['images']
        apDisplay.printMsg("CC calc time: %s :: %s per part :: %s per part per pixel"
                %(apDisplay.timeString(time.time()-cctime),
                apDisplay.timeString((time.time()-cctime)/numpart**2),
                apDisplay.timeString((time.time()-cctime)/numpart**2/boxsize**2)))

        return similarfile, simlist
	def applyEnvelopeAndCTF(self, stack):
		### get defocus lists
		numpart = self.params['projcount']
		cut = int(numpart/80.0)+1
		apDisplay.printMsg("%d particles per dot"%(cut))

		if len(self.deflist1) == 0:
			self.getListOfDefoci(numpart)

		### break up particles
		partlistdocfile = apXmipp.breakupStackIntoSingleFiles(stack, filetype="mrc")

		t0 = time.time()
		apDisplay.printMsg("Applying CTF and Envelop to particles")

		### apply CTF using ACE2
		ctfapplydocfile = self.applyCTFToDocFile(partlistdocfile)

		### apply Envelop using ACE2
		envelopdocfile = self.applyEnvelopToDocFile(ctfapplydocfile)

		### correct CTF using ACE2
		if self.params['ace2correct'] is True or self.params['ace2correct_rand'] is True:
			ctfcorrectdocfile = self.correctCTFToDocFile(envelopdocfile)
		else:
			ctfcorrectdocfile = envelopdocfile

		timeper = (time.time()-t0)/float(numpart)
		apDisplay.printColor("Total time %s"%(apDisplay.timeString(time.time()-t0)), "green")
		apDisplay.printColor("Time per particle %s"%(apDisplay.timeString(timeper)), "green")

		### write corrected particle list to doc file
		ctfpartlist = []
		ctfpartlistfile = os.path.join(self.params['rundir'], "ctfpartlist.lst")
		inf = open(ctfcorrectdocfile, 'r')
		outf = open(ctfpartlistfile, "w")
		for line in inf:
			### get filename
			filename = line.strip().split()[0]
			if not os.path.isfile(filename):
				apDisplay.printError("CTF and envelop apply failed")
			ctfpartlist.append(filename)
			outf.write(filename+"\t1\n")
		inf.close()
		outf.close()

		### merge individual files into a common stack
		ctfstack = os.path.join(self.params['rundir'], "ctfstack.hed")
		apXmipp.gatherSingleFilesIntoStack(ctfpartlistfile, ctfstack, filetype="mrc")
		if self.params['pad'] is True:
			emancmd = "proc2d %s %s.clip.hed clip=%d,%d" % (ctfstack, ctfstack[:-4], self.params['box'], self.params['box'])
			apParam.runCmd(emancmd, "EMAN")
			shutil.move("%s.clip.hed" % ctfstack[:-4], "%s.hed" % ctfstack[:-4])
			shutil.move("%s.clip.img" % ctfstack[:-4], "%s.img" % ctfstack[:-4])

		return ctfstack, ctfpartlist
Ejemplo n.º 12
0
	def projMatchRefine(self, classnum, volfile, alignstack, eulerfile, boxsize, numpart, pixrad, iternum):

		APSHout = backproject.alignAPSH(volfile, alignstack, eulerfile, classnum, boxsize, numpart, pixrad, self.timestamp, iternum)

		### check APSH output
		if (os.path.isfile(APSHout) is False):
			apDisplay.printError("AP SH alignment did not generate a valid output file. Please check parameters and rerun!")

		apsh = open(APSHout, "r")

		neweulerdoc = os.path.join(self.params['rundir'], str(classnum),"newEulersdoc-%03d.spi"%(iternum))
		neweulerfile = open(neweulerdoc, "w")
		rotshiftdoc = os.path.join(self.params['rundir'], str(classnum),"rotShiftdoc-%03d.spi"%(iternum))
		rotshiftfile = open(rotshiftdoc, "w")

		starttime = time.time()

		count = 0
		for line in apsh.readlines():
			value = line.split()
			try:
				int(value[0])
			except:
				#apDisplay.printMsg(line)
				continue
			key = int(float(value[6]))
			rot = float(value[7])
			cumX = float(value[14]) #float(value[8])
			cumY = float(value[15]) #float(value[9])
			psi = float(value[2])
			theta = float(value[3])
			phi = float(value[4])
			mirror = int(float(value[16]))

			### rotate and shift particle
			APSHstack = backproject.rotshiftParticle(alignstack, key, rot, cumX, cumY, mirror, iternum, self.timestamp, str(classnum))

			### write out new euler file
			eulerline = operations.spiderOutLine(key, [psi, theta, phi])
			neweulerfile.write(eulerline)

			rotshiftline = operations.spiderOutLine(key, [rot, 1.00, cumX, cumY])
			rotshiftfile.write(rotshiftline)
			count+=1

			if (count%20) == 0:
				apDisplay.printColor(str(numpart-count)+" particles left", "cyan")
				apDisplay.printColor("Estimated time left is "+apDisplay.timeString(((time.time()-starttime)/count)*(numpart-count)), "cyan")

		apDisplay.printColor("finished rotating and shifting particles "+apDisplay.timeString(time.time()-starttime), "cyan")

		neweulerfile.close()
		rotshiftfile.close()
		return APSHout, APSHstack, neweulerdoc
    def start(self):
        """
                This is the core of your function.
                You decide what happens here!
                """
        ### try and get the appion instruments
        self.getAppionInstruments()

        ### create new session, so we have a place to store the log file
        self.createNewSession()

        mrclist = self.getImagesInDirectory(self.params["imagedir"])

        for i in range(min(len(mrclist), 6)):
            print mrclist[i]

        numinseries = 1
        seriescount = 1
        count = 1
        t0 = time.time()
        for mrcfile in mrclist:
            ### rename image
            newimagepath = self.newImagePath(mrcfile, numinseries)

            ### get image dimensions
            dims = self.getImageDimensions(mrcfile)

            ### set pixel size in database
            self.updatePixelSizeCalibration()

            ## read the file. images should be small enough to read into memory
            imagearray = self.readFile(mrcfile)

            ### upload image
            self.uploadImageInformation(imagearray, newimagepath, dims, seriescount, numinseries)

            ### counting
            numinseries += 1
            if numinseries % (self.params["seriessize"] + 1) == 0:
                ### reset series counter
                seriescount += 1
                numinseries = 1

            # print count, seriescount, numinseries
            timeperimage = (time.time() - t0) / float(count)
            apDisplay.printMsg("time per image: %s" % (apDisplay.timeString(timeperimage)))
            esttime = timeperimage * (len(mrclist) - count)
            apDisplay.printMsg(
                "estimated time remaining for %d of %d images: %s"
                % (len(mrclist) - count, len(mrclist), apDisplay.timeString(esttime))
            )
            ### counting
            count += 1
        def start(self):
                ### check for existing run
                selectrunq = appiondata.ApSelectionRunData()
                selectrunq['name'] = self.params['runname']
                selectrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                selectrundata = selectrunq.query(readimages=False)
                if selectrundata:
                        apDisplay.printError("Runname already exists")

                ### stack data
                stackdata = apStack.getOnlyStackData(self.params['stackid'])

                ### stack particles
                stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True)
                stackparts.reverse()

                ### selection run for first particle
                oldselectrun = stackparts[0]['particle']['selectionrun']

                ### set selection run
                manualparamsq = appiondata.ApManualParamsData()
                manualparamsq['diam'] = self.getDiamFromSelectionRun(oldselectrun)
                manualparamsq['oldselectionrun'] = oldselectrun
                manualparamsq['trace'] = False
                selectrunq = appiondata.ApSelectionRunData()
                selectrunq['name'] = self.params['runname']
                selectrunq['hidden'] = False
                selectrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                selectrunq['session'] = apStack.getSessionDataFromStackId(self.params['stackid'])
                selectrunq['manparams'] = manualparamsq

                ### insert particles
                apDisplay.printMsg("Inserting particles into database")
                count = 0
                t0 = time.time()
                startmem = mem.active()
                numpart = len(stackparts)
                for stackpart in stackparts:
                        count += 1
                        if count > 10 and count%100 == 0:
                                perpart = (time.time()-t0)/float(count+1)
                                apDisplay.printColor("part %d of %d :: %.1fM mem :: %s/part :: %s remain"%
                                        (count, numpart, (mem.active()-startmem)/1024. , apDisplay.timeString(perpart),
                                        apDisplay.timeString(perpart*(numpart-count))), "blue")
                        oldpartdata = stackpart['particle']
                        newpartq = appiondata.ApParticleData(initializer=oldpartdata)
                        newpartq['selectionrun'] = selectrunq
                        if self.params['commit'] is True:
                                newpartq.insert()
                apDisplay.printMsg("Completed in %s"%(apDisplay.timeString(time.time()-t0)))
	def calcResolution(self, partlist, stackfile, apix):
		### group particles by refnum
		reflistsdict = {}
		for partdict in partlist:
			refnum = partdict['template']
			partnum = partdict['num']
			if not refnum in reflistsdict:
					reflistsdict[refnum] = []
			reflistsdict[refnum].append(partnum)

		### get resolution
		self.resdict = {}
		boxsizetuple = apFile.getBoxSize(stackfile)
		boxsize = boxsizetuple[0]
		for refnum in reflistsdict.keys():
			partlist = reflistsdict[refnum]
			esttime = 3e-6 * len(partlist) * boxsize**2
			apDisplay.printMsg("Ref num %d; %d parts; est time %s"
				%(refnum, len(partlist), apDisplay.timeString(esttime)))

			frcdata = apFourier.spectralSNRStack(stackfile, apix, partlist, msg=False)
			frcfile = "frcplot-%03d.dat"%(refnum)
			apFourier.writeFrcPlot(frcfile, frcdata, apix, boxsize)
			res = apFourier.getResolution(frcdata, apix, boxsize)

			self.resdict[refnum] = res

		return
Ejemplo n.º 16
0
	def convertSQLtoEulerTree(self, results):
		t0 = time.time()
		eulertree = []
		for row in results:
			if len(row) < 11:
				apDisplay.printError("delete MySQL cache file and run again")
			try:
				eulerpair = { 'part1': {}, 'part2': {} }
				eulerpair['part1']['partid'] = int(row[0])
				eulerpair['part1']['dbid']   = int(row[1])
				eulerpair['part1']['euler1'] = float(row[2])
				eulerpair['part1']['euler2'] = float(row[3])
				eulerpair['part1']['euler3'] = float(row[4])
				eulerpair['part1']['mirror'] = self.nullOrValue(row[5])
				eulerpair['part1']['reject'] = not self.nullOrValue(row[6])
				eulerpair['part1']['tilt']   = apStack.getStackParticleTilt(eulerpair['part1']['dbid'])

				eulerpair['part2']['partid'] = int(row[7])
				eulerpair['part2']['dbid']   = int(row[8])
				eulerpair['part2']['euler1'] = float(row[9])
				eulerpair['part2']['euler2'] = float(row[10])
				eulerpair['part2']['euler3'] = float(row[11])
				eulerpair['part2']['mirror'] = self.nullOrValue(row[12])
				eulerpair['part2']['reject'] = not self.nullOrValue(row[13])
				eulerpair['part2']['tilt']   = apStack.getStackParticleTilt(eulerpair['part2']['dbid'])
				eulertree.append(eulerpair)
			except:
				print row
				apDisplay.printError("bad row entry")

		apDisplay.printMsg("Converted "+str(len(eulertree))+" eulers in "+apDisplay.timeString(time.time()-t0))
		return eulertree
 def getNumAlignedParticles(self):
         t0 = time.time()
         self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
         oldalignedstack = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['imagicfile'])
         numpart = apFile.numImagesInStack(oldalignedstack)
         apDisplay.printMsg("numpart="+str(numpart)+" in "+apDisplay.timeString(time.time()-t0))
         return numpart
        def xmippNormStack(self, inStackPath, outStackPath):
                ### convert stack into single spider files
                selfile = apXmipp.breakupStackIntoSingleFiles(inStackPath)      

                ### setup Xmipp command
                xmippexe = apParam.getExecPath("xmipp_normalize", die=True)
                apDisplay.printMsg("Using Xmipp to normalize particle stack")
                normtime = time.time()
                xmippopts = ( " "
                        +" -i %s"%os.path.join(self.params['rundir'],selfile)
                        +" -method Ramp "
                        +" -background circle %i"%(self.stack['boxsize']/self.params['bin']*0.4)
                        +" -remove_black_dust"
                        +" -remove_white_dust"
                        +" -thr_black_dust -%.2f"%(self.params['xmipp-norm'])
                        +" -thr_white_dust %.2f"%(self.params['xmipp-norm'])
                )
                xmippcmd = xmippexe+" "+xmippopts
                apParam.runCmd(xmippcmd, package="Xmipp", verbose=True, showcmd=True)
                normtime = time.time() - normtime
                apDisplay.printMsg("Xmipp normalization time: "+apDisplay.timeString(normtime))

                ### recombine particles to a single imagic stack
                tmpstack = "tmp.xmippStack.hed"
                apXmipp.gatherSingleFilesIntoStack(selfile,tmpstack)
                apFile.moveStack(tmpstack,outStackPath)

                ### clean up directory
                apFile.removeFile(selfile)
                apFile.removeDir("partfiles")
Ejemplo n.º 19
0
	def printTimeStats(self, name, timelist):
		if len(timelist) < 2:
			return
		meantime = self.stats['timesum']/float(self.stats['count'])
		timearray = numpy.array(timelist, dtype=numpy.float64)
		apDisplay.printColor("%s: %s (%.2f percent)"%
			(name, apDisplay.timeString(timearray.mean(), timearray.std()), 100*timearray.mean()/meantime), "blue")
	def importPicks(self, picks1, picks2, tight=False, msg=True):
		t0 = time.time()
		#print picks1
		#print self.currentpicks1
		curpicks1 = numpy.asarray(self.currentpicks1)
		curpicks2 = numpy.asarray(self.currentpicks2)
		#print curpicks1

		# get picks
		apTiltTransform.setPointsFromArrays(curpicks1, curpicks2, self.data)
		pixdiam = self.data['pixdiam']
		if tight is True:
			pixdiam /= 4.0
		#print self.data, pixdiam
		list1, list2 = apTiltTransform.alignPicks2(picks1, picks2, self.data, limit=pixdiam, msg=msg)
		if list1.shape[0] == 0 or list2.shape[0] == 0:
			apDisplay.printWarning("No new picks were found")

		# merge picks
		newpicks1, newpicks2 = apTiltTransform.betterMergePicks(curpicks1, list1, curpicks2, list2, msg=msg)
		newparts = newpicks1.shape[0] - curpicks1.shape[0]

		# copy over picks
		self.currentpicks1 = newpicks1
		self.currentpicks2 = newpicks2

		if msg is True:
			apDisplay.printMsg("Inserted "+str(newparts)+" new particles in "+apDisplay.timeString(time.time()-t0))

		return True
	def createSpiderFile(self):
		"""
		takes the stack file and creates a spider file ready for processing
		"""
		emancmd  = "proc2d "
		if not os.path.isfile(self.stack['file']):
			apDisplay.printError("stackfile does not exist: "+self.stack['file'])
		emancmd += self.stack['file']+" "

		spiderstack = os.path.join(self.params['rundir'], "start.spi")
		apFile.removeFile(spiderstack, warn=True)
		emancmd += spiderstack+" "

		emancmd += "apix="+str(self.stack['apix'])+" "
		if self.params['lowpass'] > 0:
			emancmd += "lp="+str(self.params['lowpass'])+" "
		if self.params['highpass'] > 0:
			emancmd += "hp="+str(self.params['highpass'])+" "
		if self.params['bin'] > 1:
			clipboxsize = self.boxsize*self.params['bin']
			emancmd += "shrink="+str(self.params['bin'])+" "
			emancmd += "clip="+str(clipboxsize)+","+str(clipboxsize)+" "
		emancmd += "last="+str(self.params['numpart']-1)+" "
		emancmd += "spider edgenorm"
		starttime = time.time()
		apDisplay.printColor("Running spider stack conversion this can take a while", "cyan")
		apEMAN.executeEmanCmd(emancmd, verbose=True)
		apDisplay.printColor("finished eman in "+apDisplay.timeString(time.time()-starttime), "cyan")
		return spiderstack
def getTransformImageIds(transformdata):
        t0 = time.time()
        img1 = transformdata.special_getitem('image1', dereference=False).dbid
        img2 = transformdata.special_getitem('image2', dereference=False).dbid
        if time.time()-t0 > 0.3:
                apDisplay.printMsg("long image query "+apDisplay.timeString(time.time()-t0))
        return img1, img2
	def optimizeAngles(self, msg=True):
		t0 = time.time()
		### run find theta
		na1 = numpy.array(self.currentpicks1, dtype=numpy.int32)
		na2 = numpy.array(self.currentpicks2, dtype=numpy.int32)
		fittheta = radermacher.tiltang(na1, na2)
		if not fittheta or not 'wtheta' in fittheta:
			return
		theta = fittheta['wtheta']
		thetadev = fittheta['wthetadev']
		if msg is True:
			thetastr = ("%3.3f +/- %2.2f" % (theta, thetadev))
			tristr = apDisplay.orderOfMag(fittheta['numtri'])+" of "+apDisplay.orderOfMag(fittheta['tottri'])
			tristr = (" (%3.1f " % (100.0 * fittheta['numtri'] / float(fittheta['tottri'])))+"%) "
			apDisplay.printMsg("Tilt angle "+thetastr+tristr)
		self.data['theta'] = fittheta['wtheta']
		### run optimize angles
		lastiter = [80,80,80]
		count = 0
		totaliter = 0
		while max(lastiter) > 75 and count < 30:
			count += 1
			lsfit = self.runLeastSquares()
			lastiter[2] = lastiter[1]
			lastiter[1] = lastiter[0]
			lastiter[0] = lsfit['iter']
			totaliter += lsfit['iter']
			if msg is True:
				apDisplay.printMsg("Least Squares: "+str(count)+" rounds, "+str(totaliter)
				+" iters, rmsd of "+str(round(lsfit['rmsd'],4))+" pixels in "+apDisplay.timeString(time.time()-t0))
		return
Ejemplo n.º 24
0
	def convertStackToSpider(self, emanstackfile, classnum):
		"""
		takes the stack file and creates a spider file ready for processing
		"""
		if not os.path.isfile(emanstackfile):
			apDisplay.printError("stackfile does not exist: "+emanstackfile)

		### first high pass filter particles
		apDisplay.printMsg("pre-filtering particles")
		apix = apStack.getStackPixelSizeFromStackId(self.params['tiltstackid'])
		emancmd = ("proc2d "+emanstackfile+" "+emanstackfile
			+" apix="+str(apix)+" hp="+str(self.params['highpasspart'])
			+" inplace")
		apEMAN.executeEmanCmd(emancmd, verbose=True)

		### convert imagic stack to spider
		emancmd  = "proc2d "
		emancmd += emanstackfile+" "
		spiderstack = os.path.join(self.params['rundir'], str(classnum), "otrstack"+self.timestamp+".spi")
		apFile.removeFile(spiderstack, warn=True)
		emancmd += spiderstack+" "

		emancmd += "spiderswap edgenorm"
		starttime = time.time()
		apDisplay.printColor("Running spider stack conversion this can take a while", "cyan")
		apEMAN.executeEmanCmd(emancmd, verbose=True)
		apDisplay.printColor("finished eman in "+apDisplay.timeString(time.time()-starttime), "cyan")
		return spiderstack
def getTiltTransformFromParticle(partdata):
        t0 = time.time()

        ### figure out if its particle 1 or 2
        tiltpartq1 = appiondata.ApTiltParticlePairData()
        tiltpartq1['particle1'] = partdata
        tiltpartdatas1 = tiltpartq1.query(results=1, readimages=False)

        tiltpartq2 = appiondata.ApTiltParticlePairData()
        tiltpartq2['particle2'] = partdata
        tiltpartdatas2 = tiltpartq2.query(results=1, readimages=False)

        if tiltpartdatas1 and not tiltpartdatas2:
                imgnum = 1
                transformdata = tiltpartdatas1[0]['transform']
                otherpartdata = tiltpartdatas1[0]['particle2']
        elif not tiltpartdatas1 and tiltpartdatas2:
                imgnum = 2
                transformdata = tiltpartdatas2[0]['transform']
                otherpartdata = tiltpartdatas2[0]['particle1']
        else:
                print partdata
                print tiltpartdatas1
                print tiltpartdatas2
                apDisplay.printError("failed to get tilt pair data")

        if time.time()-t0 > 1.0:
                apDisplay.printMsg("long getTiltTransFromPart1 "+apDisplay.timeString(time.time()-t0))
        return imgnum, transformdata, otherpartdata
def getParticleTiltRotationAnglesOTR(stackpartdata):
        partdata = stackpartdata['particle']
        imgnum, transformdata, otherpartdata = getTiltTransformFromParticle(partdata)

        t0 = time.time()
        tiltangle1, tiltangle2 = apDatabase.getTiltAnglesDegFromTransform(transformdata)
        if time.time()-t0 > 1.0:
                apDisplay.printMsg("long angle query "+apDisplay.timeString(time.time()-t0))

        if imgnum == 1:
                ### negative case, tilt picker theta < 0
                tiltrot = transformdata['image1_rotation']
                theta = transformdata['tilt_angle']
                notrot   = transformdata['image2_rotation']
                tiltangle = tiltangle1 - tiltangle2
        elif imgnum == 2:
                ### positive case, tilt picker theta > 0
                tiltrot = transformdata['image2_rotation']
                theta = transformdata['tilt_angle']
                notrot   = transformdata['image1_rotation']
                tiltangle = tiltangle2 - tiltangle1
        else:
                #no particle pair info was found or some other problem
                print partdata
                apDisplay.printError("failed to get tilt pair data or some other problem")

        if transformdata.timestamp < datetime.datetime(2009, 2, 19, 0, 0, 0):
                ### bugfix for switched tilt axis angles, before Feb 19, 2009
                #apDisplay.printWarning("Switching angles")
                temprot = notrot
                notrot = tiltrot
                tiltrot = temprot

        #print "tr=%.2f, th=%.2f, nr=%.2f, tilt=%.2f"%(tiltrot, theta, notrot, tiltangle)
        return tiltrot, theta, notrot, tiltangle
Ejemplo n.º 27
0
def rctParticleShift(volfile, origstackfile, eulerdocfile, iternum, numpart, pixrad, timestamp, dataext=".spi"):
	"""
	inputs:
		stack, in spider format
		eulerdocfile
	outputs:
		volume
	"""
	starttime = time.time()
	### create corresponding projections
	projstackfile = "projstack%s-%03d.spi"%(timestamp, iternum)
	projectVolume(volfile, eulerdocfile, projstackfile, numpart, pixrad, dataext)

	### clean up files
	ccdocfile = "ccdocfile%s-%03d.spi"%(timestamp, iternum)
	apFile.removeFile(ccdocfile)
	alignstackfile = "alignstack%s-%03d.spi"%(timestamp, iternum)
	apFile.removeFile(alignstackfile)

	### align particles to projection
	apDisplay.printMsg("Shifting particles")
	crossCorrelateAndShift(origstackfile, projstackfile, alignstackfile, ccdocfile, numpart)

	if not os.path.isfile(alignstackfile):
		apDisplay.printError("aligned stack file not found: "+alignstackfile)
	apDisplay.printColor("finished correlations in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return alignstackfile
Ejemplo n.º 28
0
def backprojectCG(stackfile, eulerdocfile, volfile, numpart, pixrad, dataext=".spi"):
	"""
	inputs:
		stack, in spider format
		eulerdocfile
	outputs:
		volume
	"""
	### setup
	starttime = time.time()
	stackfile = spyder.fileFilter(stackfile)
	eulerdocfile = spyder.fileFilter(eulerdocfile)
	volfile = spyder.fileFilter(volfile)
	if not os.path.isfile(stackfile+dataext):
		apDisplay.printError("stack file not found: "+stackfile+dataext)
	if not os.path.isfile(eulerdocfile+dataext):
		apDisplay.printError("euler doc file not found: "+eulerdocfile+dataext)
	apFile.removeFile(volfile+dataext)
	nproc = apParam.getNumProcessors()
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
	mySpider.toSpider("BP CG", 
		stackfile+"@*****", #stack file
		"1-%d"%(numpart), #number of particles
		str(pixrad), #particle radius
		eulerdocfile, #angle doc file
		"N", #has symmetry?, does not work
		volfile, #filename for volume
 		"%.1e,%.1f" % (1.0e-5, 0.0), #error, chi^2 limits
 		"%d,%d" % (25,1), #iterations, 1st derivative mode
 		"2000", #lambda - higher=less sensitive to noise
	)
	mySpider.close()
	apDisplay.printColor("finished backprojection in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return
Ejemplo n.º 29
0
def projectVolume(volfile, eulerdocfile, projstackfile, numpart, pixrad, dataext=".spi"):
	"""
	project 3D volumes using given Euler angles
	"""
	starttime = time.time()

	volfile = spyder.fileFilter(volfile)
	eulerdocfile = spyder.fileFilter(eulerdocfile)
	projstackfile = spyder.fileFilter(projstackfile)
	if not os.path.isfile(volfile+dataext):
		apDisplay.printError("volume file not found: "+volfile+dataext)
	if not os.path.isfile(eulerdocfile+dataext):
		apDisplay.printError("euler doc file not found: "+eulerdocfile+dataext)

	apFile.removeFile(projstackfile)
	nproc = apParam.getNumProcessors()
	mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
	mySpider.toSpider("PJ 3Q", 
		volfile, #input vol file
		str(pixrad), #pixel radius
		"1-%d"%(numpart), #number of particles		
		eulerdocfile, #Euler DOC file
		projstackfile+"@*****", #output projections
	)
	mySpider.close()
	apDisplay.printColor("finished projections in "+apDisplay.timeString(time.time()-starttime), "cyan")
	return
	def getOverlap(self, image1, image2, msg=True):
		t0 = time.time()
		bestOverlap, tiltOverlap = apTiltTransform.getOverlapPercent(image1, image2, self.data)
		overlapStr = str(round(100*bestOverlap,2))+"% and "+str(round(100*tiltOverlap,2))+"%"
		if msg is True:
			apDisplay.printMsg("Found overlaps of "+overlapStr+" in "+apDisplay.timeString(time.time()-t0))
		self.data['overlap'] = bestOverlap
Ejemplo n.º 31
0
    def processTiltPair(self,
                        imgfile1,
                        imgfile2,
                        picks1,
                        picks2,
                        tiltangle,
                        outfile,
                        pixdiam=20.0,
                        tiltaxis=-7.0,
                        msg=True):
        """
		Inputs:
			imgfile1
			imgfile2
			picks1, 2xN numpy array
			picks2, 2xN numpy array
			tiltangle
			outfile
		Modifies:
			outfile
		Output:
			None, failed
			True, success
		"""

        ### pre-load particle picks
        if len(picks1) < 10 or len(picks2) < 10:
            if msg is True:
                apDisplay.printWarning(
                    "Not enough particles ot run program on image pair")
            return None

        ### setup tilt data
        self.data['theta'] = tiltangle
        self.data['shiftx'] = 0.0
        self.data['shifty'] = 0.0
        self.data['gamma'] = tiltaxis
        self.data['phi'] = tiltaxis
        self.data['scale'] = 1.0
        self.data['pixdiam'] = pixdiam

        ### open image file 1
        img1 = self.openImageFile(imgfile1)
        if img1 is None:
            apDisplay.printWarning("Could not read image: " + imgfile1)
            return None

        ### open tilt file 2
        img2 = self.openImageFile(imgfile2)
        if img1 is None:
            apDisplay.printWarning("Could not read image: " + imgfile1)
            return None

        ### guess the shift
        t0 = time.time()
        if msg is True:
            apDisplay.printMsg("Refining tilt axis angles")
        origin, newpart, snr, bestang = apTiltShift.getTiltedCoordinates(
            img1, img2, tiltangle, picks1, True, tiltaxis, msg=msg)
        self.data['gamma'] = float(bestang)
        self.data['phi'] = float(bestang)
        if snr < 2.0:
            if msg is True:
                apDisplay.printWarning("Low confidence in initial shift")
            return None
        self.currentpicks1 = [origin]
        self.currentpicks2 = [newpart]

        ### search for the correct particles
        self.importPicks(picks1, picks2, tight=False, msg=msg)
        if len(self.currentpicks1) < 4:
            apDisplay.printWarning("Failed to find any particle matches")
            return None
        self.deleteFirstPick()
        self.printData(msg)
        for i in range(4):
            self.clearBadPicks(msg)
            if len(self.currentpicks1) < 5 or len(self.currentpicks2) < 5:
                if msg is True:
                    apDisplay.printWarning(
                        "Not enough particles to optimize angles")
                return None
            self.optimizeAngles(msg)
            self.printData(msg)
            self.clearBadPicks(msg)
            self.clearBadPicks(msg)
            if len(self.currentpicks1) < 5 or len(self.currentpicks2) < 5:
                if msg is True:
                    apDisplay.printWarning(
                        "Not enough particles to optimize angles")
                return None
            self.optimizeAngles(msg)
            self.printData(msg)
            self.clearBadPicks(msg)
            self.importPicks(picks1, picks2, tight=False, msg=msg)
        self.clearBadPicks(msg)
        self.printData(msg)
        if len(self.currentpicks1) < 5 or len(self.currentpicks2) < 5:
            if msg is True:
                apDisplay.printWarning(
                    "Not enough particles to optimize angles")
            return None
        self.optimizeAngles(msg)
        self.printData(msg)
        self.getOverlap(img1, img2, msg)
        if msg is True:
            apDisplay.printMsg("Completed alignment of " +
                               str(len(self.currentpicks1)) +
                               " particle pairs in " +
                               apDisplay.timeString(time.time() - t0))

        self.saveData(imgfile1, imgfile2, outfile)
        self.printData(msg)

        return True
def refBasedAlignParticles(stackfile,
                           templatestack,
                           origstackfile,
                           xysearch,
                           xystep,
                           numpart,
                           numtemplate,
                           firstring=2,
                           lastring=100,
                           dataext=".spi",
                           iternum=1,
                           oldpartlist=None):
    """
        inputs:
                stack
                template
                search params
        outputs:
                aligned stack
                rotation/shift params
        """
    ### setup
    if dataext in templatestack:
        templatestack = templatestack[:-4]
    if dataext in stackfile:
        stackfile = stackfile[:-4]
    if dataext in origstackfile:
        origstackfile = origstackfile[:-4]
    t0 = time.time()
    rundir = "alignments"
    apParam.createDirectory(rundir)
    nproc = apParam.getNumProcessors()

    ### remove previous iterations
    apFile.removeFile(rundir + "/paramdoc%02d%s" % (iternum, dataext))

    ### perform alignment, should I use 'AP SH' instead?
    mySpider = spyder.SpiderSession(dataext=dataext,
                                    logo=True,
                                    nproc=nproc,
                                    log=False)
    mySpider.toSpider(
        "AP MQ",
        spyder.fileFilter(templatestack) + "@**",  # reference image series
        "1-" + str(numtemplate),  # enter number of templates of doc file
        str(int(xysearch)) + "," +
        str(int(xystep)),  # translation search range, step size
        str(int(firstring)) + "," +
        str(int(lastring)),  # first and last ring for rotational correlation
        spyder.fileFilter(stackfile) + "@******",  # unaligned image series
        "1-" + str(numpart),  # enter number of particles of doc file
        rundir + ("/paramdoc%02d" % (iternum)),  # output angles document file
    )
    mySpider.close()
    """
        ### perform alignment, should I use 'AP SH' instead?
        mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)
        mySpider.toSpider("AP SH",
                spyder.fileFilter(templatestack)+"@**",     # reference image series
                "1-"+str(numtemplate),                      # enter number of templates of doc file
                str(int(xysearch))+","+str(int(xystep)),    # translation search range, step size
                str(int(firstring))+","+str(int(lastring)), # first and last ring for rotational correlation
                "*",                                                                                                      # initial angles
                spyder.fileFilter(stackfile)+"@******",     # unaligned image series
                "1-"+str(numpart),                          # enter number of particles of doc file
                "*",                                                                                                      # initial angles
                "0.0",                                                                                            # angle search
                "1",                                                                                              # check mirrors and shift/rotate input
                rundir+("/paramdoc%02d" % (iternum)),       # output angles document file
        )
        mySpider.close()
        """

    ### convert spider rotation, shift data to python
    docfile = rundir + ("/paramdoc%02d" % (iternum)) + dataext
    picklefile = rundir + ("/paramdoc%02d" % (iternum)) + ".pickle"
    if oldpartlist is not None and iternum > 1:
        apDisplay.printMsg("updating particle doc info")
        partlist = updateRefBasedDocFile(oldpartlist, docfile, picklefile)
    elif iternum == 1:
        apDisplay.printMsg("reading initial particle doc info")
        partlist = readRefBasedDocFile(docfile, picklefile)
    else:
        apDisplay.printError(
            "reading (not updating) particle doc info on iteration " +
            str(iternum))

    ### write aligned stack -- with python loop
    alignedstack = rundir + ("/alignedstack%02d" % (iternum))
    alignStack(origstackfile, alignedstack, partlist, dataext)

    ### average stack
    emancmd = ("proc2d " + alignedstack + dataext + " " + rundir +
               ("/avgimg%02d" % (iternum)) + ".mrc " + " average")
    apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=True)

    td1 = time.time() - t0

    apDisplay.printMsg("completed alignment of " + str(numpart) +
                       " particles in " + apDisplay.timeString(td1))
    if numpart < 1:
        apDisplay.printError("Failed to find any particles")

    return alignedstack + dataext, partlist
def refFreeAlignParticles(stackfile,
                          template,
                          numpart,
                          pixrad,
                          firstring=2,
                          lastring=100,
                          dataext=".spi",
                          rundir="alignment"):
    """
        inputs:
                stack
                template
                search params
        outputs:
                aligned stack
                rotation/shift params
        """
    ### setup
    if dataext in template:
        template = template[:-4]
    if dataext in stackfile:
        stackfile = stackfile[:-4]
    t0 = time.time()
    apParam.createDirectory(rundir)

    ### remove previous iterations
    numiter = 0
    while os.path.isfile(rundir + "/avgimg%02d%s" % (numiter + 1, dataext)):
        apFile.removeFile(rundir + "/avgimg%02d%s" % (numiter + 1, dataext))
        pngfile = rundir + "/avgimg%02d%s" % (numiter + 1, ".png")
        apFile.removeFile(pngfile)
        numiter += 1

    ### perform alignment
    mySpider = spyder.SpiderSession(dataext=dataext, logo=True, log=False)
    apDisplay.printMsg("Performing particle alignment")
    # copy template to memory
    mySpider.toSpiderQuiet("CP", (template + "@1"), "_9")
    mySpider.toSpider("AP SR",
                      spyder.fileFilter(stackfile) + "@******",
                      "1-" + str(numpart), str(int(pixrad)),
                      str(int(firstring)) + "," + str(int(lastring)), "_9",
                      rundir + "/avgimg**", rundir + "/paramdoc**")
    mySpider.close()

    ### find number of iterations
    numiter = 0
    while os.path.isfile(rundir + "/avgimg%02d%s" % (numiter + 1, dataext)):
        emancmd = ("proc2d " + " " + rundir + "/avgimg" +
                   ("%02d%s" % (numiter + 1, dataext)) + " " + rundir +
                   "/avgimg" + ("%02d%s" % (numiter + 1, ".png")))
        apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=False)
        numiter += 1
    if numiter == 0:
        apDisplay.printError("alignment failed, no iterations were found")
    emancmd = ("proc2d " + " " + rundir + "/avgimg" +
               ("%02d%s" % (numiter, dataext)) + " " + rundir + "/average.mrc")
    apEMAN.executeEmanCmd(emancmd, verbose=False, showcmd=False)
    apDisplay.printMsg(
        str(numiter) + " alignment iterations were run by spider")

    ### convert spider rotation, shift data to python
    docfile = rundir + ("/paramdoc%02d" % (numiter)) + dataext
    picklefile = rundir + ("/paramdoc%02d" % (numiter)) + ".pickle"
    partlist = readRefFreeDocFile(docfile, picklefile)

    ### write aligned stack -- with python loop
    alignedstack = "alignedstack"
    alignStack(stackfile, alignedstack, partlist, dataext)

    td1 = time.time() - t0
    apDisplay.printMsg("completed alignment of " + str(numpart) +
                       " particles in " + apDisplay.timeString(td1))

    return ("alignedstack.spi", partlist)
def alignStack(oldstack, alignedstack, partlist, dataext=".spi"):
    """
        write aligned stack -- with python loop

        inputs:
                oldstack
                newstack (empty)
                list of particle dictionaries for operations
        modifies:
                newstack
        output:
                none

        I tried this loop in both spider and python;
        python was faster?!? -neil
        """
    if not os.path.isfile(oldstack + dataext):
        apDisplay.printError("Could not find original stack: " + oldstack +
                             dataext)
    boxsize = apFile.getBoxSize(oldstack + dataext)

    apDisplay.printMsg("applying alignment parameters to stack")
    apFile.removeFile(alignedstack + dataext)
    count = 0
    t0 = time.time()
    nproc = apParam.getNumProcessors()

    mySpider = spyder.SpiderSession(dataext=dataext,
                                    logo=True,
                                    nproc=nproc,
                                    log=False)
    #create stack in core
    numpart = len(partlist)
    mySpider.toSpiderQuiet(
        "MS I",  #command
        "_2@",  #name
        "%d,%d,%d" % (boxsize),  #boxsize
        str(numpart + 1),  #num part to create in memory
        str(numpart + 1),  #max particle number
    )
    for partdict in partlist:
        partnum = partdict['num']
        #if partdict['num'] in [3,6,7]:
        #       print partdict['num'], partdict['template'], partdict['mirror'], round(partdict['rot'],3)

        ### Rotate and Shift operations
        count += 1
        #rotate/shift
        mySpider.toSpiderQuiet(
            "RT SQ",
            spyder.fileFilter(oldstack) + "@" + ("%06d" % (partnum)),
            "_1",
            str(partdict['rot']),
            str(partdict['xshift']) + "," + str(partdict['yshift']),
        )
        #mirror, if necessary
        if 'mirror' in partdict and partdict['mirror'] is True:
            mySpider.toSpiderQuiet(
                "MR",
                "_1",
                "_2@" + ("%06d" % (partnum)),
                "Y",
            )
        else:
            mySpider.toSpiderQuiet(
                "CP",
                "_1",
                "_2@" + ("%06d" % (partnum)),
            )

    ### finish up
    #save stack to file
    mySpider.toSpiderQuiet(
        "CP",
        "_2@",
        spyder.fileFilter(alignedstack) + "@",
    )
    #delete stack
    mySpider.toSpiderQuiet(
        "DE",
        "_2",
    )
    mySpider.close()

    apDisplay.printMsg("Completed transforming %d particles in %s" %
                       (count, apDisplay.timeString(time.time() - t0)))
    if count < 1:
        apDisplay.printError("Failed to transform any particles")

    if not os.path.isfile(alignedstack + dataext):
        apDisplay.printError("Failed to create stack " + alignedstack +
                             dataext)

    return
    def start(self):
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])

        ### test insert to make sure data is not overwritten
        self.params['runtime'] = 0
        #self.checkDuplicateRefBasedRun()

        ### set box size
        self.boxsize = int(
            math.floor(self.stack['boxsize'] / self.params['bin'] / 2.0)) * 2

        ### convert stack to spider
        spiderstack = self.createSpiderFile()

        ### create template stack
        templatestack = self.createTemplateStack()

        ### run the alignment
        aligntime = time.time()
        usestack = spiderstack
        oldpartlist = None
        for i in range(self.params['numiter']):
            iternum = i + 1
            apDisplay.printColor("\n\nITERATION " + str(iternum), "green")
            alignedstack, partlist = alignment.refBasedAlignParticles(
                usestack,
                templatestack,
                spiderstack,
                self.params['xysearch'],
                self.params['xystep'],
                self.params['numpart'],
                self.params['numtemplate'],
                self.params['firstring'],
                self.params['lastring'],
                iternum=iternum,
                oldpartlist=oldpartlist)
            oldpartlist = partlist
            usestack = alignedstack
            templatestack = self.updateTemplateStack(alignedstack, partlist,
                                                     iternum)
        aligntime = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(aligntime))

        ### remove large, worthless stack
        spiderstack = os.path.join(self.params['rundir'], "start.spi")
        apDisplay.printMsg("Removing un-aligned stack: " + spiderstack)
        apFile.removeFile(spiderstack, warn=True)

        ### convert aligned stack to imagic
        finalspistack = "aligned.spi"
        shutil.move(alignedstack, finalspistack)
        imagicstack = "aligned.hed"
        apFile.removeStack(imagicstack)
        emancmd = "proc2d " + finalspistack + " " + imagicstack
        apEMAN.executeEmanCmd(emancmd, verbose=True)

        ### average stack
        apStack.averageStack(imagicstack)

        ### calculate resolution for each reference
        apix = self.stack['apix'] * self.params['bin']
        self.calcResolution(partlist, imagicstack, apix)

        if self.params['commit'] is True:
            apDisplay.printMsg("committing results to DB")
            self.params['runtime'] = aligntime
            self.insertRefBasedRun(partlist, imagicstack, insert=True)
        else:
            apDisplay.printWarning("not committing results to DB")

        ### remove temporary files
        apFile.removeFilePattern("alignments/alignedstack*.spi")
        apFile.removeFile(finalspistack)
def runFindEM(imgdict, params, thread=False):
    """
        runs a separate thread of findem.exe for each template
        to get cross-correlation maps
        """
    ### check image
    apFindEM.processAndSaveImage(imgdict, params)
    dwnimgname = imgdict['filename'] + ".dwn.mrc"
    if not os.path.isfile(dwnimgname):
        apDisplay.printError("cound not find image to process: " + dwnimgname)

    ### check template
    if len(params['templatelist']) < 1:
        apDisplay.printError("templatelist == 0; there are no templates")

    joblist = []
    ccmapfilelist = []

    t0 = time.time()
    for i, templatename in enumerate(params['templatelist']):
        classavg = i + 1

        #DETERMINE OUTPUT FILE NAME
        #CHANGE THIS TO BE 00%i in future
        numstr = "%03d" % classavg
        #numstr = str(classavg%10)+"00"
        ccmapfile = "cccmaxmap" + numstr + ".mrc"
        apFile.removeFile(ccmapfile)

        #GET FINDEM RUN COMMANDS
        feed = findEMString(classavg, templatename, dwnimgname, ccmapfile,
                            params)

        #RUN THE PROGRAM
        if thread is True:
            job = findemjob(feed)
            joblist.append(job)
            job.start()
        else:
            execFindEM(feed)

        #TO REMOVE LATER: Fake output
        fakeOutput(dwnimgname, ccmapfile, params)

        #STORE OUTPUT FILE
        ccmapfilelist.append(ccmapfile)

    ### WAIT FOR THREADS TO COMPLETE
    if thread is True:
        apDisplay.printMsg("waiting for " + str(len(joblist)) +
                           " findem threads to complete")
        for i, job in enumerate(joblist):
            while job.isAlive():
                sys.stderr.write(".")
                time.sleep(1.5)
        sys.stderr.write("\n")
    apDisplay.printMsg("FindEM finished in " +
                       apDisplay.timeString(time.time() - t0))

    for ccmapfile in ccmapfilelist:
        if not os.path.isfile(ccmapfile):
            apDisplay.printError("findem.exe did not run or crashed.\n")

    return ccmapfilelist
Ejemplo n.º 37
0
    def _getAllSeries(self):
        startt = time.time()
        self.seriestree = []
        if self.params['mrcnames'] is not None:
            mrcfileroot = self.params['mrcnames'].split(",")
            if self.params['sessionname'] is not None:
                images = apDatabase.getSpecificImagesFromSession(
                    mrcfileroot, self.params['sessionname'])
            else:
                images = apDatabase.getSpecificImagesFromDB(mrcfileroot)
        elif self.params['sessionname'] is not None:
            if self.params['preset'] is not None:
                images = apDatabase.getImagesFromDB(self.params['sessionname'],
                                                    self.params['preset'])
            else:
                self.seriestree = apDatabase.getAllTiltSeriesFromSessionName(
                    self.params['sessionname'])
        else:
            if self.params['mrcnames'] is not None:
                apDisplay.printMsg("MRC List: " +
                                   str(len(self.params['mrcnames'])) + " : " +
                                   str(self.params['mrcnames']))
            apDisplay.printMsg("Session: " + str(self.params['sessionname']) +
                               " : " + str(self.params['preset']))
            apDisplay.printError("no files specified")
        # Only use finished tilt series
        if len(self.seriestree) > 0:
            indices_to_pop = []
            for tiltseriesdata in self.seriestree:
                if not apDatabase.getTiltSeriesDoneStatus(tiltseriesdata):
                    indices_to_pop.append(
                        self.seriestree.index(tiltseriesdata))
            indices_to_pop.sort()
            indices_to_pop.reverse()
            for index in indices_to_pop:
                self.seriestree.pop(index)
        else:
            for image in images:
                tiltseriesdata = image['tilt series']
                if tiltseriesdata and tiltseriesdata not in self.seriestree and apDatabase.getTiltSeriesDoneStatus(
                        tiltseriesdata):
                    self.seriestree.append(tiltseriesdata)
        precount = len(self.seriestree)
        apDisplay.printMsg("Found " + str(precount) + " tilt series in " +
                           apDisplay.timeString(time.time() - startt))

        ### REMOVE PROCESSED IMAGES
        apDisplay.printMsg("Remove processed series")
        self._removeProcessedSeries()

        ### SET SERIES ORDER
        if self.params['reverse'] is True:
            apDisplay.printMsg("Process series new to old")
        else:
            # by default series are new to old
            apDisplay.printMsg("Process series old to new")
            self.seriestree.sort(self._reverseSortSeriesTree)

        ### LIMIT NUMBER
        if self.params['limit'] is not None:
            lim = self.params['limit']
            if len(self.seriestree) > lim:
                apDisplay.printMsg("Limiting number of series to " + str(lim))
                self.seriestree = self.seriestree[:lim]
        self.stats['seriescount'] = len(self.seriestree)
	def start(self):
		### new stack path
		stackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		newstack = os.path.join(self.params['rundir'], stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		includelist = []
		excludelist = []
		### list of classes to be excluded
		if self.params['dropclasslist'] is not None:
			excludestrlist = self.params['dropclasslist'].split(",")
			for excludeitem in excludestrlist:
				excludelist.append(int(excludeitem.strip()))
		apDisplay.printMsg("Exclude list: "+str(excludelist))

		### list of classes to be included
		if self.params['keepclasslist'] is not None:
			includestrlist = self.params['keepclasslist'].split(",")
			for includeitem in includestrlist:
				includelist.append(int(includeitem.strip()))

		### or read from keepfile
		elif self.params['keepfile'] is not None:
			keeplistfile = open(self.params['keepfile'])
			for line in keeplistfile:
				if self.params['excludefrom'] is True:
					excludelist.append(int(line.strip()))
				else:
					includelist.append(int(line.strip()))
			keeplistfile.close()
		apDisplay.printMsg("Include list: "+str(includelist))

		### get particles from align or cluster stack
		apDisplay.printMsg("Querying database for particles")
		q0 = time.time()

		if self.params['alignid'] is not None:
			# DIRECT SQL STUFF
			sqlcmd = "SELECT " + \
				"apd.partnum, " + \
				"apd.xshift, apd.yshift, " + \
				"apd.rotation, apd.mirror, " + \
				"apd.spread, apd.correlation, " + \
				"apd.score, apd.bad, " + \
				"spd.particleNumber, " + \
				"ard.refnum "+ \
				"FROM ApAlignParticleData apd " + \
				"LEFT JOIN ApStackParticleData spd ON " + \
				"(apd.`REF|ApStackParticleData|stackpart` = spd.DEF_id) " + \
				"LEFT JOIN ApAlignReferenceData ard ON" + \
				"(apd.`REF|ApAlignReferenceData|ref` = ard.DEF_id) " + \
				"WHERE `REF|ApAlignStackData|alignstack` = %i"%(self.params['alignid'])
			# These are AlignParticles
			particles = sinedon.directq.complexMysqlQuery('appiondata',sqlcmd)

		elif self.params['clusterid'] is not None:
			clusterpartq = appiondata.ApClusteringParticleData()
			clusterpartq['clusterstack'] = self.clusterstackdata
			# These are ClusteringParticles
			particles = clusterpartq.query()
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-q0)))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		badscore = 0
		badshift = 0
		badspread = 0

		f = open("test.log", "w")
		count = 0
		t0 = time.time()
		apDisplay.printMsg("Parsing particle information")

		# find out if there is alignparticle info:
		is_cluster_p = False
		# alignparticle is a key of any particle in particles if the latter is
		# a CluateringParticle
		if 'alignparticle' in particles[0]:
			is_cluster_p = True

		for part in particles:
			count += 1
			if is_cluster_p:
				# alignpart is an item of ClusteringParticle
				alignpart = part['alignparticle']
				try:
					classnum = int(part['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
				emanstackpartnum = alignpart['stackpart']['particleNumber']-1
			else:
				# particle has info from AlignedParticle as results of direct query
				alignpart = part
				try:
					classnum = int(alignpart['refnum'])-1
				except:
					apDisplay.printWarning("particle %d was not put into any class" % (part['partnum']))
					classnum = None
				emanstackpartnum = int(alignpart['particleNumber'])-1

			### check shift
			if self.params['maxshift'] is not None:
				shift = math.hypot(alignpart['xshift'], alignpart['yshift'])
				if shift > self.params['maxshift']:
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badshift += 1
					continue

			if self.params['minscore'] is not None:
				### check score
				if ( alignpart['score'] is not None
				 and alignpart['score'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badscore += 1
					continue

				### check spread
				if ( alignpart['spread'] is not None
				 and alignpart['spread'] < self.params['minscore'] ):
					excludeParticle += 1
					if classnum is not None:
						f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
					else:
						f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
					badspread += 1
					continue

			if classnum is not None:
				if includelist and (classnum in includelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				elif excludelist and not (classnum in excludelist):
					includeParticle.append(emanstackpartnum)
					f.write("%d\t%d\t%d\tinclude\n"%(count, emanstackpartnum, classnum))
				else:
					excludeParticle += 1
					f.write("%d\t%d\t%d\texclude\n"%(count, emanstackpartnum, classnum))
			else:
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
			
		f.close()

		includeParticle.sort()
		if badshift > 0:
			apDisplay.printMsg("%d paricles had a large shift"%(badshift))
		if badscore > 0:
			apDisplay.printMsg("%d paricles had a low score"%(badscore))
		if badspread > 0:
			apDisplay.printMsg("%d paricles had a low spread"%(badspread))
		apDisplay.printMsg("Completed in %s\n"%(apDisplay.timeString(time.time()-t0)))
		apDisplay.printMsg("Keeping "+str(len(includeParticle))+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		if excludelist:
			self.params['description'] += ( " ... %d particle substack with %s classes excluded"
				% (numparticles, self.params['dropclasslist']))
		elif includelist:
			self.params['description'] += ( " ... %d particle substack with %s classes included"
				% (numparticles, self.params['keepclasslist']))

		outavg = os.path.join(self.params['rundir'],"average.mrc")

		### create the new sub stack
		# first check if virtual stack
		if not os.path.isfile(oldstack):
			vstackdata=apStack.getVirtualStackParticlesFromId(self.params['stackid'])
			vparts = vstackdata['particles']
			oldstack = vstackdata['filename']
			# get subset of virtualstack
			vpartlist = [int(vparts[p]['particleNumber'])-1 for p in includeParticle]
	
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, vpartlist, bad=self.params['savebad'])

			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=vpartlist)
		else:
			if self.params['writefile'] is True:
				apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=self.params['savebad'])
			apStack.averageStack(stack=oldstack,outfile=outavg,partlist=includeParticle)

		if self.params['writefile'] is True and not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		if self.params['commit'] is True:
			apStack.commitSubStack(self.params,included=includeParticle)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=4)
Ejemplo n.º 39
0
    def start(self):
        ### get original aligned stack name
        astack = self.analysisdata['alignstack']['imagicfile']
        ### spider has problems with file name if it includes an "x#"
        astack = re.sub(r'x(\d)', r'x-\1', astack)
        ### get original align stack
        imagicalignedstack = os.path.join(
            self.analysisdata['alignstack']['path']['path'], astack)
        alignedstack = re.sub("\.", "_", imagicalignedstack) + ".spi"
        while os.path.isfile(alignedstack):
            apFile.removeFile(alignedstack)
        emancmd = "proc2d %s %s spiderswap" % (imagicalignedstack,
                                               alignedstack)
        apEMAN.executeEmanCmd(emancmd, showcmd=True, verbose=True)

        ### get database information
        numpart = self.analysisdata['alignstack']['num_particles']
        corandata = os.path.join(self.analysisdata['path']['path'],
                                 "coran/corandata")

        ### parse factor list
        factorlist = self.params['factorstr'].split(",")
        factorstr, factorkey = operations.intListToString(factorlist)
        factorstr = re.sub(",", ", ", factorstr)
        apDisplay.printMsg("using factorlist " + factorstr)
        if len(factorlist) > self.analysisdata['coranrun']['num_factors']:
            apDisplay.printError(
                "Requested factor list is longer than available factors")

        if self.params['commit'] is True:
            self.insertClusterRun(insert=True)
        else:
            apDisplay.printWarning("not committing results to DB")

        numclasslist = self.params['numclasslist'].split(",")
        if self.params['method'] != "kmeans":
            rundir = "cluster"
            apParam.createDirectory(rundir)
            ### step 1: use coran data to create hierarchy
            dendrogramfile = classification.hierarchClusterProcess(
                numpart, factorlist, corandata, rundir, dataext=".spi")
            ### step 2: asssign particles to groups based on hierarchy

        for item in numclasslist:
            t0 = time.time()
            if not item or not re.match("^[0-9]+$", item):
                continue
            numclass = int(item)
            apDisplay.printColor(
                "\n============================\nprocessing class averages for "
                + str(numclass) + " classes\n============================\n",
                "green")

            #run the classification
            if self.params['method'] == "kmeans":
                apDisplay.printMsg("Using the k-means clustering method")
                classavg, classvar = classification.kmeansCluster(
                    alignedstack,
                    numpart,
                    numclasses=numclass,
                    timestamp=self.timestamp,
                    factorlist=factorlist,
                    corandata=corandata,
                    dataext=".spi")
            else:
                apDisplay.printMsg("Using the hierarch clustering method")
                classavg, classvar = classification.hierarchClusterClassify(
                    alignedstack,
                    dendrogramfile,
                    numclass,
                    self.timestamp,
                    rundir,
                    dataext=".spi")
                #classavg,classvar = classification.hierarchCluster(alignedstack, numpart, numclasses=numclass,
                #       timestamp=self.timestamp, factorlist=factorlist, corandata=corandata, dataext=".spi")
            if self.params['commit'] is True:
                self.insertClusterStack(classavg,
                                        classvar,
                                        numclass,
                                        insert=True)
            else:
                apDisplay.printWarning("not committing results to DB")

            apDisplay.printMsg("Completed " + str(numclass) + " classes in " +
                               apDisplay.timeString(time.time() - t0))
def normalizeVol(volfile, dataext=".spi"):
        """
        inputs:
                volume
        outputs:
                volume
        """
        ### setup
        starttime = time.time()
        volfile = spyder.fileFilter(volfile)
        if not os.path.isfile(volfile+dataext):
                apDisplay.printError("volume file not found: "+volfile+dataext)
                
        mySpider = spyder.SpiderSession(dataext=dataext, logo=True)
        ### read out the statistics of the volume
        mySpider.toSpider("FS x11,x12", 
                volfile, #filename for volume
        )
        mySpider.toSpider("IF(x12.LT.0.0)x12=-x12")
        ### set all values to positive
        mySpider.toSpider("AR",
                volfile, #filename for volume
                "_1",
                "(P1+x12)",
        )
        ### save file
        mySpider.toSpider("CP",
                "_1",
                volfile, #filename for volume
        )
        
        mySpider.close()
        apDisplay.printColor("finished normalizing the volume to set all values to be positive"+apDisplay.timeString(time.time()-starttime), "cyan")
        return  
Ejemplo n.º 41
0
    def removePtclsByJumps(self, particles, rejectlst):
        eulerjump = apEulerJump.ApEulerJump()
        numparts = len(particles)
        apDisplay.printMsg("finding euler jumps for " + str(numparts) +
                           " particles")

        ### check symmetry
        symmetry = eulerjump.getSymmetry(self.params['reconid'], msg=True)
        if not re.match("^[cd][0-9]+$", symmetry.lower()) and not re.match(
                "^icos", symmetry.lower()):
            apDisplay.printError(
                "Cannot calculate euler jumps for symmetry: " + symmetry)
            return
        self.params['sym'] = symmetry.lower()

        ### prepare file
        f = open('jumps.txt', 'w', 0666)
        f.write("#pnum\t")
        headerlist = ('mean', 'median', 'stdev', 'min', 'max')
        for key in headerlist:
            f.write(key + "\t")
        f.write("\n")

        ### get stack particles
        stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

        ### start loop
        t0 = time.time()
        medians = []
        count = 0
        apDisplay.printMsg("processing euler jumps for recon run=" +
                           str(self.params['reconid']))
        for stackpart in stackparts:
            count += 1
            partnum = stackpart['particleNumber']
            f.write('%d\t' % partnum)
            jumpdata = eulerjump.getEulerJumpData(
                self.params['reconid'],
                stackpartid=stackpart.dbid,
                stackid=self.params['stackid'],
                sym=symmetry)
            medians.append(jumpdata['median'])
            if (jumpdata['median'] >
                    self.params['avgjump']) and partnum not in rejectlst:
                rejectlst.append(partnum)
            for key in headerlist:
                f.write("%.3f\t" % (jumpdata[key]))
            f.write("\n")
            if count % 1000 == 0:
                timeremain = (time.time() - t0) / (count + 1) * (numparts -
                                                                 count)
                apDisplay.printMsg(
                    "particle=% 5d; median jump=% 3.2f, remain time= %s" %
                    (partnum, jumpdata['median'],
                     apDisplay.timeString(timeremain)))
                #f.flush()
        ### print stats
        apDisplay.printMsg("-- median euler jumper stats --")
        medians = numpy.asarray(medians, dtype=numpy.float32)
        apDisplay.printMsg("mean/std :: " + str(round(medians.mean(), 2)) +
                           " +/- " + str(round(medians.std(), 2)))
        apDisplay.printMsg("min/max  :: " + str(round(medians.min(), 2)) +
                           " <> " + str(round(medians.max(), 2)))

        perrej = round(
            100.0 * float(numparts - len(rejectlst)) / float(numparts), 2)
        apDisplay.printMsg("keeping " + str(numparts - len(rejectlst)) +
                           " of " + str(numparts) + " particles (" +
                           str(perrej) + "%) so far " + " in " +
                           apDisplay.timeString(time.time() - t0))

        return rejectlst
    def radonAlign(self, stackfile):
        """
                performs the meat of the program aligning the particles and creating references
                """
        ### FUTURE: only read a few particles into memory at one time
        imageinfo = apImagicFile.readImagic(stackfile, msg=False)
        imagelist = imageinfo['images']
        reflist = self.createReferences(imagelist)
        radonimagelist = self.getRadons(imagelist)

        ### a pre-normalization value so the reference pixels do not overflow
        partperref = self.params['numpart'] / float(self.params['numrefs'])

        for iternum in range(self.params['numiter']):
            ### save references to a file
            apImagicFile.writeImagic(reflist,
                                     "reflist%02d.hed" % (iternum),
                                     msg=False)

            ### create Radon transforms for references
            radonreflist = self.getRadons(reflist)

            ### create empty references
            newreflist = []
            newrefcount = []
            shape = imagelist[0].shape
            for i in range(self.params['numrefs']):
                newrefcount.append(0)
                newreflist.append(numpy.zeros(shape))

            ### get alignment parameters
            aligndatalist = []
            cclist = []
            t0 = time.time()
            for i in range(len(imagelist)):
                if i % 50 == 0:
                    ### FUTURE: add time estimate
                    sys.stderr.write(".")
                image = imagelist[i]
                radonimage = radonimagelist[i]
                aligndata = self.getBestAlignForImage(image, radonimage,
                                                      reflist, radonreflist,
                                                      None)
                #aligndatalist.append(aligndata)
                refid = aligndata['refid']
                cclist.append(aligndata['bestcc'])

                ### create new references
                refimage = reflist[refid]
                alignedimage = self.transformImage(image, aligndata, refimage)
                newreflist[refid] += alignedimage / partperref
                newrefcount[refid] += 1
            sys.stderr.write("\n")
            print "Alignment complete in %s" % (
                apDisplay.timeString(time.time() - t0))

            ### report median cross-correlation, it should get better each iter
            mediancc = numpy.median(numpy.array(cclist))
            apDisplay.printMsg("Iter %02d, Median CC: %.8f" %
                               (iternum, mediancc))
            print newrefcount

            ### FUTURE: re-calculate Radon transform for particles with large shift

            ### new references are now the old references
            shape = reflist[0].shape
            reflist = []
            for i in range(self.params['numrefs']):
                if newrefcount[i] == 0:
                    ### reference with no particles -- just add noise
                    apDisplay.printWarning("Reference %02d has no particles" %
                                           (i + 1))
                    ref = numpy.random.random(shape)
                else:
                    ref = (newreflist[i] / newrefcount[i]) * partperref
                reflist.append(ref)

        return aligndatalist
    def start(self):
        self.runtime = 0

        self.checkCoranRun()

        ### convert stack to spider
        self.alignstackdata = self.getAlignedStack()
        maskpixrad = self.params['maskrad'] / self.alignstackdata[
            'pixelsize'] / self.params['bin']
        boxpixdiam = int(math.ceil(maskpixrad) + 1) * 2
        if boxpixdiam * self.params['bin'] > self.alignstackdata['boxsize']:
            boxpixdiam = math.floor(self.alignstackdata['boxsize'] /
                                    self.params['bin'])
        clippixdiam = boxpixdiam * self.params['bin']
        apDisplay.printMsg("Pixel mask radius=" + str(maskpixrad))

        oldalignedstack = os.path.join(self.alignstackdata['path']['path'],
                                       self.alignstackdata['imagicfile'])
        alignedstackname = re.sub("\.", "_",
                                  self.alignstackdata['imagicfile']) + ".spi"
        alignedstack = os.path.join(self.params['rundir'], alignedstackname)
        apFile.removeFile(alignedstack)
        emancmd = ("proc2d %s %s spiderswap shrink=%d clip=%d,%d edgenorm" %
                   (oldalignedstack, alignedstack, self.params['bin'],
                    clippixdiam, clippixdiam))
        if self.params['numpart'] is not None:
            emancmd += " last=%d" % (self.params['numpart'] - 1)
            numpart = self.params['numpart']
        else:
            numpart = self.getNumAlignedParticles()
        apEMAN.executeEmanCmd(emancmd, verbose=True)

        esttime = classification.estimateTime(numpart, maskpixrad)
        apDisplay.printColor("Running spider this can take awhile, estimated time: "+\
                apDisplay.timeString(esttime),"cyan")

        ### do correspondence analysis
        corantime = time.time()
        self.contriblist = classification.correspondenceAnalysis(
            alignedstack,
            boxsize=boxpixdiam,
            maskpixrad=maskpixrad,
            numpart=numpart,
            numfactors=self.params['numfactors'])
        corantime = time.time() - corantime

        ### make dendrogram
        dendrotime = time.time()
        classification.makeDendrogram(
            numfactors=min(3, self.params['numfactors']))
        dendrotime = time.time() - dendrotime

        inserttime = time.time()
        if self.params['commit'] is True:
            self.runtime = corantime
            self.insertCoranRun(insert=True)
        else:
            apDisplay.printWarning("not committing results to DB")
        inserttime = time.time() - inserttime

        apFile.removeFile(alignedstack, warn=True)

        apDisplay.printMsg("Correspondence Analysis time: " +
                           apDisplay.timeString(corantime))
        apDisplay.printMsg("Make Dendrogram time: " +
                           apDisplay.timeString(dendrotime))
        apDisplay.printMsg("Database Insertion time: " +
                           apDisplay.timeString(inserttime))
Ejemplo n.º 44
0
    def start(self):
        self.runtime = 0
        self.partlist = []
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])
        self.clipsize = int(
            math.floor(self.stack['boxsize'] / self.params['bin'] / 2.0) *
            self.params['bin'] * 2.0)

        #self.checkRunNamePath()

        ### convert stack to spider
        spiderstack = self.createSpiderFile()

        #create template stack
        templatestack = self.createTemplateStack()

        #create orientation reference
        orientref = self.createOrientationReference()

        ###################################################################
        aligntime = time.time()

        ### create batch file
        batchfilepath = self.setupBatchFile(spiderstack, templatestack,
                                            orientref)

        ### run the spider alignment
        apDisplay.printColor(
            "Running iterative ref-classification and free-alignment with spider",
            "cyan")
        self.runSpiderBatch(batchfilepath, spiderstack)

        aligntime = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(aligntime))
        ###################################################################

        ### remove unaligned spider stack
        apDisplay.printMsg("Removing un-aligned stack: " + spiderstack)
        apFile.removeFile(spiderstack, warn=False)

        ### check to be sure files exist
        avgfile = os.path.join(self.params['rundir'],
                               "alignstack.spi")  #class averages
        if not os.path.isfile(avgfile):
            apDisplay.printError(
                "Final stack of aligned particles does not exist.")

        ### convert stacks to imagic
        self.convertStack2Imagic("alignstack.spi")
        self.convertStack2Imagic("avg.spi")

        ### make alignment average in mrc format
        emancmd = "proc2d avg.spi average.mrc average"
        apEMAN.executeEmanCmd(emancmd)

        inserttime = time.time()
        if self.params['commit'] is True:
            apDisplay.printWarning("committing results to DB")
            self.runtime = aligntime
            self.insertEdIterRun(insert=True)
        else:
            apDisplay.printWarning("not committing results to DB")
        inserttime = time.time() - inserttime

        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(aligntime))
        apDisplay.printMsg("Database Insertion time: " +
                           apDisplay.timeString(inserttime))
    def convertSQLtoTree(self, results, notstackid):
        t0 = time.time()
        parttree = []
        minpartnum1 = 1e6
        minpartnum2 = 1e6
        count = 0
        tiltangles = []
        notangles = []
        for row in results:
            if len(row) < 6:
                apDisplay.printError("delete MySQL cache file and run again")

            try:
                partnum1 = self.intOrNone(row[0])
                stackid1 = self.intOrNone(row[1])
                angle1 = self.floatOrNone(row[2])
                partnum2 = self.intOrNone(row[3])
                stackid2 = self.intOrNone(row[4])
                angle2 = self.floatOrNone(row[5])
                if partnum1 and partnum1 < minpartnum1:
                    minpartnum1 = partnum1
                if partnum2 and partnum2 < minpartnum2:
                    minpartnum2 = partnum2
                if partnum1 is not None and partnum2 is not None:
                    if stackid1 == notstackid:
                        partpair1 = {
                            'part1': partnum1,
                            'part2': -partnum2,
                            'stackid1': stackid1,
                            'stackid2': stackid2,
                            'tilt': angle1
                        }
                        notangles.append(angle1)
                        partpair2 = {
                            'part1': -partnum2,
                            'part2': partnum1,
                            'stackid1': stackid2,
                            'stackid2': stackid1,
                            'tilt': angle2
                        }
                        tiltangles.append(angle2)
                    else:
                        partpair1 = {
                            'part1': -partnum1,
                            'part2': partnum2,
                            'stackid1': stackid1,
                            'stackid2': stackid2,
                            'tilt': angle1
                        }
                        tiltangles.append(angle1)
                        partpair2 = {
                            'part1': partnum2,
                            'part2': -partnum1,
                            'stackid1': stackid2,
                            'stackid2': stackid1,
                            'tilt': angle2
                        }
                        notangles.append(angle2)
                elif partnum1 is not None:
                    if stackid1 == notstackid:
                        partpair1 = {
                            'part1': partnum1,
                            'part2': None,
                            'stackid1': stackid1,
                            'stackid2': None,
                            'tilt': angle1
                        }
                    else:
                        partpair1 = {
                            'part1': -partnum1,
                            'part2': None,
                            'stackid1': stackid1,
                            'stackid2': None,
                            'tilt': angle1
                        }
                elif partnum2 is not None:
                    if stackid2 == notstackid:
                        partpair2 = {
                            'part1': partnum2,
                            'part2': None,
                            'stackid1': stackid2,
                            'stackid2': None,
                            'tilt': angle2
                        }
                    else:
                        partpair2 = {
                            'part1': -partnum2,
                            'part2': None,
                            'stackid1': stackid2,
                            'stackid2': None,
                            'tilt': angle2
                        }
                if partnum1 is not None:
                    parttree.append(partpair1)
                    count += 1
                if partnum2 is not None:
                    parttree.append(partpair2)
                    count += 1
            except:
                print count, row
                apDisplay.printError("bad row entry")
        print count, minpartnum1, minpartnum2
        apDisplay.printMsg("Converted " + str(len(parttree)) +
                           " particles in " +
                           apDisplay.timeString(time.time() - t0))
        tiltarray = numpy.array(tiltangles, dtype=numpy.float32)
        self.tiltangle = tiltarray.mean()
        apDisplay.printColor(
            "  Tilted angle: %.3f +/- %.3f" %
            (self.tiltangle, tiltarray.std()), "cyan")
        print tiltarray
        notarray = numpy.array(notangles, dtype=numpy.float32)
        self.notangle = notarray.mean()
        apDisplay.printColor(
            "Untilted angle: %.3f +/- %.3f" % (self.notangle, notarray.std()),
            "green")
        print notarray
        return parttree
    def getParticles(self, notstackid, tiltstackid):
        cachefile = "parttree-" + str(notstackid) + "_" + str(
            tiltstackid) + ".cache"
        if os.path.isfile(cachefile):
            cachef = open(cachefile, "r")
            parttree = cPickle.load(cachef)
            cachef.close()
            return parttree

        t0 = time.time()
        query = (
            "SELECT \n" + "  stpart1.`particleNumber` AS partnum1, \n" +
            "  stpart1.`REF|ApStackData|stack` AS stackid1, \n" +
            "  DEGREES(scoped1.`SUBD|stage position|a`) AS alpha1, \n" +
            "  stpart2.`particleNumber` AS partnum2, \n" +
            "  stpart2.`REF|ApStackData|stack` AS stackid2, \n" +
            "  DEGREES(scoped2.`SUBD|stage position|a`) AS alpha2 \n" +
            "FROM `ApTiltParticlePairData` AS tiltd \n" +
            "LEFT JOIN `ApStackParticleData` AS stpart1 \n" +
            "  ON stpart1.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle1` \n"
            + "LEFT JOIN `ApParticleData` AS part1 \n" +
            "  ON stpart1.`REF|ApParticleData|particle` = part1.`DEF_id` \n" +
            "LEFT JOIN dbemdata.`AcquisitionImageData` AS imaged1 \n" +
            "  ON part1.`REF|leginondata|AcquisitionImageData|image` = imaged1.`DEF_id` \n"
            + "LEFT JOIN dbemdata.`ScopeEMData` AS scoped1 \n" +
            "  ON imaged1.`REF|ScopeEMData|scope` = scoped1.`DEF_id` \n" +
            "LEFT JOIN `ApStackParticleData` AS stpart2 \n" +
            "  ON stpart2.`REF|ApParticleData|particle` = tiltd.`REF|ApParticleData|particle2` \n"
            + "LEFT JOIN `ApParticleData` AS part2 \n" +
            "  ON stpart2.`REF|ApParticleData|particle` = part2.`DEF_id` \n" +
            "LEFT JOIN dbemdata.`AcquisitionImageData` AS imaged2 \n" +
            "  ON part2.`REF|leginondata|AcquisitionImageData|image` = imaged2.`DEF_id` \n"
            + "LEFT JOIN dbemdata.`ScopeEMData` AS scoped2 \n" +
            "  ON imaged2.`REF|ScopeEMData|scope` = scoped2.`DEF_id` \n" +
            "WHERE \n" + "     ( ( stpart1.`REF|ApStackData|stack` = " +
            str(notstackid) + " \n" +
            "      OR stpart1.`REF|ApStackData|stack` IS NULL ) \n" +
            "   AND ( stpart2.`REF|ApStackData|stack` = " + str(tiltstackid) +
            " \n" + "      OR stpart2.`REF|ApStackData|stack` IS NULL ) )\n" +
            "  OR ( ( stpart1.`REF|ApStackData|stack` = " + str(tiltstackid) +
            " \n" + "      OR stpart1.`REF|ApStackData|stack` IS NULL ) \n" +
            "   AND ( stpart2.`REF|ApStackData|stack` = " + str(notstackid) +
            " \n" + "      OR stpart2.`REF|ApStackData|stack` IS NULL ) )\n" +
            "ORDER BY stpart1.`particleNumber` ASC \n"
            #+"LIMIT 21 \n"
        )
        apDisplay.printMsg("particle query at " + time.asctime())
        self.cursor.execute(query)
        results = self.cursor.fetchall()
        if not results:
            apDisplay.printError("Failed to get stack particles")
        apDisplay.printMsg("Fetched " + str(len(results)) + " data pairs in " +
                           apDisplay.timeString(time.time() - t0))
        parttree = self.convertSQLtoTree(results, notstackid)
        parttree.sort(self.compPart)

        ### save to file
        cachef = open(cachefile, "w")
        cPickle.dump(parttree, cachef)
        cachef.close()

        #for part in parttree:
        #       if part['part2'] is None:
        #               print part
        #print ""
        #print ""

        return parttree
Ejemplo n.º 47
0
    def preprocessStack(self):
        '''
                Use database particle stack file to create a stack file with binning and filtering
                ready for processing.
                '''
        need_modify = False
        emancmd = "proc2d "
        if not os.path.isfile(self.stack['file']):
            apDisplay.printError("stackfile does not exist: " +
                                 self.stack['file'])
        emancmd += self.stack['file'] + " "

        extname, addformat = self.proc2dFormatConversion()
        outstack = os.path.join(self.params['rundir'], "start.%s" % extname)
        apFile.removeFile(outstack, warn=True)
        emancmd += outstack + " "
        stackfilenamebits = self.stack['file'].split('.')
        oldextname = stackfilenamebits[-1]
        if extname != oldextname:
            need_modify = True
        if addformat:
            need_modify = True
            emancmd += addformat + " "

        emancmd += "apix=" + str(self.stack['apix']) + " "
        if self.params['lowpass'] > 0:
            need_modify = True
            emancmd += "lp=" + str(self.params['lowpass']) + " "
        if self.params['highpass'] > 0:
            need_modify = True
            emancmd += "hp=" + str(self.params['highpass']) + " "
        if self.params['last'] > 0 and self.params['last'] < self.params[
                'totalpart']:
            need_modify = True
            emancmd += "last=" + str(self.params['last'] - 1) + " "
        if self.params['bin'] > 1:
            need_modify = True
            emancmd += "shrink=" + str(self.params['bin']) + " "
            clipsize = self.calcClipSize(self.stack['boxsize'],
                                         self.params['bin'])
            emancmd += "clip=" + str(clipsize) + "," + str(
                clipsize) + "  edgenorm" + " "
            self.stack['boxsize'] = clipsize / self.params['bin']
        if self.invert:
            need_modify = True
            emancmd += 'invert '

        if need_modify:
            apFile.removeStack(outstack, warn=False)
            starttime = time.time()
            apDisplay.printColor(
                "Running particle stack conversion.... This can take a while",
                "cyan")
            apEMAN.executeEmanCmd(emancmd, verbose=True)
            apDisplay.printColor(
                "finished eman in " +
                apDisplay.timeString(time.time() - starttime), "cyan")
        else:
            # no need to execute EmanCmd if the stack is not modified
            apDisplay.printColor(
                "No stack pre-processing is needed. Copying stack to run directory.",
                "cyan")
            shutil.copy(self.stack['file'], outstack)
            outstackimg = outstack.replace('hed', 'img')
            if not os.path.isfile(outstackimg):
                # only copy if not exist to save time
                shutil.copy(self.stack['file'].replace('hed', 'img'),
                            outstackimg)
        self.stack['file'] = outstack
        if not os.path.isfile(self.stack['file']):
            apDisplay.printColor(
                "Could not locate stack: %s" % self.stack['file'], "cyan")
        self.stack['apix'] = self.stack['apix'] * self.params['bin']
        return outstack
    def start(self):
        ### simple is written in Fortran, which cannot take inputs of certain length, therefore one needs
        ### to change to the directory to minimize the filename length, in particular for the stack
        os.chdir(self.params['rundir'])

        ### stack needs to be centered
        if self.params['no_center'] is False:
            if os.path.isfile(os.path.join(self.params['rundir'], "ali.hed")):
                apFile.removeStack(
                    os.path.join(self.params['rundir'], "ali.hed"))
            centstack = os.path.join(self.params['rundir'], "ali.hed")
            centcmd = "cenalignint %s > cenalignint.log" % (self.stack['file'])
            apParam.runCmd(centcmd, "EMAN")

        ### process stack to local file
        if self.params['timestamp'] is None:
            apDisplay.printMsg("creating timestamp")
            self.params['timestamp'] = self.timestamp
        self.params['localstack'] = os.path.join(
            self.params['rundir'], self.params['timestamp'] + ".spi")

        if os.path.isfile(self.params['localstack']):
            apFile.removeFile(self.params['localstack'])
        if self.params['no_center'] is False:
            proccmd = "proc2d " + centstack + " " + self.params[
                'localstack'] + " apix=" + str(self.stack['apix'])
        else:
            proccmd = "proc2d " + self.stack['file'] + " " + self.params[
                'localstack'] + " apix=" + str(self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   self.boxsize, self.boxsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        proccmd += " spiderswap"
        #		if self.params['highpass'] is not None and self.params['highpass'] > 1:
        #			proccmd += " hp="+str(self.params['highpass'])
        #		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
        #			proccmd += " lp="+str(self.params['lowpass'])
        apParam.runCmd(proccmd, "EMAN", verbose=True)

        #		if self.params['numpart'] != int(spider.getSpiderHeader(self.params['localstack'])[-2]):
        #			apDisplay.printError("Missing particles in stack")

        ### setup Simple command
        aligntime = time.time()
        simpleopts = (
            "" + " stk=%s" % os.path.basename(self.params['localstack']) +
            " box=%d" % self.boxsize + " nptcls=%d" % self.params['numpart'] +
            " smpd=%.3f" % self.apix + " ring2=%d" % self.params['ring2'] +
            " ncls=%d" % self.params['ncls'] +
            " minp=%d" % self.params['minp'] +
            " nvars=%d" % self.params['nvars'] +
            " nthr=%d" % self.params['nproc'])
        if self.params['no_kmeans'] is True:
            simpleopts += " kmeans=off"
        if self.params['nran'] is not None:
            simpleopts += "nran=%d" % self.params['nran']

        ### SIMPLE 2D clustering
        apDisplay.printColor(
            "Using " + str(self.params['nproc']) + " processors!", "green")
        simpleexe = apParam.getExecPath("cluster", die=True)
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="cluster.std")
        self.params['runtime'] = time.time() - aligntime
        apDisplay.printMsg("Alignment & Classification time: " +
                           apDisplay.timeString(self.params['runtime']))

        ### SIMPLE spider to Fourier format
        clsavgs = "cavgstk.spi"
        if not os.path.isfile(os.path.join(self.params['rundir'], clsavgs)):
            apDisplay.printError(
                "class averages were not created! try rerunning with centering, more particles, or less ppc"
            )
        try:
            nptcls = spider.getSpiderHeader(clsavgs)[-2]
        except:
            nptcls = self.params['ncls']
            apDisplay.printWarning(
                "class average file may not have been created! Please check existence of file cavgstk.spi"
            )
        projfile = "projs"
        projext = ".fim"
        simpleexe = apParam.getExecPath("spi_to_fim", die=True)
        simpleopts = ("" + " stk=%s" % clsavgs + " box=%d" % self.boxsize +
                      " nptcls=%d" % nptcls + " smpd=%.3f" % self.apix +
                      " outbdy=%s" % projfile +
                      " msk=%d" % self.params['mask'])
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="spi_to_fim.std")

        ### SIMPLE origami, ab initio 3D reconstruction
        refinetime = time.time()
        simpleexe = apParam.getExecPath("origami", die=True)
        simpleopts = (
            "" + " fstk=%s" % projfile + projext +
            " froms=%d" % self.params['froms'] +
            " tos=%d" % self.params['tos'] + " lp=%d" % self.params['lp'] +
            " hp=%d" % self.params['hp'] +
            " maxits=%d" % self.params['maxits'] +
            " msk=%d" % self.params['mask'] + " mw=%d" % self.params['mw'] +
            " frac=%.3f" % self.params['frac'] +
            " amsklp=%d" % self.params['amsklp'] +
            " edge=%d" % self.params['edge'] + " trs=%d" % self.params['trs'] +
            " nthr=%d" % self.params['nproc'])
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="origami.std")
        refinetime = time.time() - refinetime
        apDisplay.printMsg("Origami reconstruction time: " +
                           apDisplay.timeString(refinetime))

        #		'''

        ### minor post-processing
        self.clearIntermediateFiles()
        apParam.dumpParameters(
            self.params,
            "simple-" + self.params['timestamp'] + "-params.pickle")

        ### upload results
        self.runparams = apParam.readRunParameters("simple-" +
                                                   self.params['timestamp'] +
                                                   "-params.pickle")

        ### create average of aligned and clustered stacks, convert to IMAGIC
        alignedStackSpi = "inplalgnstk.spi"
        alignedStack = "inplalgnstk.hed"
        if os.path.isfile(alignedStack):
            apFile.removeStack(alignedStack)
        emancmd = "proc2d %s %s flip" % (alignedStackSpi, alignedStack)
        apParam.runCmd(emancmd, "EMAN")
        clusterStackSpi = "cavgstk.spi"
        clusterStack = "cavgstk.hed"
        if os.path.isfile(clusterStack):
            apFile.removeStack(clusterStack)
        emancmd = "proc2d %s %s flip" % (clusterStackSpi, clusterStack)
        apParam.runCmd(emancmd, "EMAN")
        #		apStack.averageStack(alignedStack)

        ### parse alignment and classification results
        if self.params['no_center'] is False:
            self.alignD = self.getAlignParameters(centparams="cenalignint.log")
        else:
            self.alignD = self.getAlignParameters()
        if self.params['no_kmeans'] is False:
            self.classD = self.getClassification("kmeans.spi", clusterStack)
        else:
            self.classD = self.getClassification("hcl.spi", clusterStack)

        ### upload to database
        self.insertSIMPLEAlignParamsIntoDatabase()
        self.insertAlignStackRunIntoDatabase(alignedStack, clusterStack)
        self.calcResolution(alignedStack)
        self.insertAlignParticlesIntoDatabase()
        self.insertClusterRunIntoDatabase()
        self.insertClusterStackIntoDatabase(clusterStack, len(self.classD))
        self.insertSIMPLEOrigamiParamsIntoDatabase()
Ejemplo n.º 49
0
def power(image, pixelsize, fieldsize=None, mask_radius=0.5, msg=True):
	"""
	computes power spectra of image using sub-field averaging

	image - (2d numpy float array) image to compute power spectra
	pixelsize - (float) used to compute frequency, freq. 
		can be either Angstroms or meters, but freq will have same inverse units
	fieldsize - (integer) size of box
	mask_radius - (float) passed to imagefun.power(), 
		creates a mask of size mask_radius in the center

	TODO: add median flag, requires saving individual power spectra rather than summing
	"""
	if fieldsize is None:
		fieldsize = getFieldSize(image.shape)

	t0 = time.time()
	xsize, ysize = image.shape
	xnumstep = int(math.floor(xsize/float(fieldsize)))*2-1
	ynumstep = int(math.floor(ysize/float(fieldsize)))*2-1
	f = fieldsize
	#powersum = numpy.zeros((fieldsize,fieldsize))
	#envelop = numpy.ones((fieldsize,fieldsize)) 
	envelop = twodHann(fieldsize)
	count = 0
	psdlist = []
	if msg is True:
		sys.stderr.write("Computing power spectra in %dx%d blocks"%(fieldsize,fieldsize))
	for i in range(xnumstep):
		for j in range(ynumstep):
			count += 1
			x1 = f*i/2
			x2 = x1 + f
			y1 = f*j/2
			y2 = y1 + f
			if debug is True:
				print "%03d: %d:%d, %d:%d"%(count, x1, x2, y1, y2)
			elif msg is True:
				sys.stderr.write(".")
			cutout = image[x1:x2, y1:y2]
			powerspec = imagefun.power(cutout*envelop, mask_radius)
			psdlist.append(powerspec)
	if xsize%fieldsize > fieldsize*0.1:
		for j in range(ynumstep):
			count += 1
			x1 = xsize-f
			x2 = xsize
			y1 = f*j/2
			y2 = y1 + f
			if debug is True:
				print "%03d: %d:%d, %d:%d"%(count, x1, x2, y1, y2)
			elif msg is True:
				sys.stderr.write(".")
			cutout = image[x1:x2, y1:y2]
			powerspec = imagefun.power(cutout*envelop, mask_radius)
			psdlist.append(powerspec)
	if ysize%fieldsize > fieldsize*0.1:
		for i in range(xnumstep):
			count += 1
			x1 = f*i/2
			x2 = x1 + f
			y1 = ysize-f
			y2 = ysize
			if debug is True:
				print "%03d: %d:%d, %d:%d"%(count, x1, x2, y1, y2)
			elif msg is True:
				sys.stderr.write(".")
			cutout = image[x1:x2, y1:y2]
			powerspec = imagefun.power(cutout*envelop, mask_radius)
			psdlist.append(powerspec)
	sys.stderr.write("\n")
	freq = 1.0/(powerspec.shape[0]*pixelsize)

	#poweravg = numpy.array(psdlist).mean(0)
	apDisplay.printMsg("Computing median of power spectra series")
	poweravg = numpy.median(psdlist, axis=0)
	if msg is True:
		apDisplay.printMsg("Compute PSD with fieldsize %d and %d images complete in %s"
			%(fieldsize, count, apDisplay.timeString(time.time()-t0)))
	return poweravg, freq
Ejemplo n.º 50
0
def mergeStacks(stacklist, mergestack, msg=True):
    ### initialization
    t0 = time.time()
    apFile.removeStack(mergestack, warn=msg)
    root = os.path.splitext(mergestack)[0]
    mergeheader = root + ".hed"
    mergedata = root + ".img"

    ### merge data files
    fout = file(mergedata, 'wb')
    numpart = 0
    totalsize = 0
    for stackfile in stacklist:
        stackdatafile = os.path.splitext(stackfile)[0] + ".img"
        ### size checks
        npart = apFile.numImagesInStack(stackdatafile)
        size = apFile.fileSize(stackdatafile)
        if msg is True:
            apDisplay.printMsg("%d particles in %s (%s)" %
                               (npart, stackdatafile, apDisplay.bytes(size)))
        totalsize += size
        numpart += npart

        fin = file(stackdatafile, 'rb')
        shutil.copyfileobj(fin, fout, 65536)
        fin.close()
    fout.close()
    if numpart < 1:
        apDisplay.printError("found %d particles" % (numpart))
    if msg is True:
        apDisplay.printMsg("found %d particles" % (numpart))
    finalsize = apFile.fileSize(mergedata)
    if finalsize != totalsize:
        apDisplay.printError(
            "size mismatch %s vs. %s" %
            (apDisplay.bytes(finalsize), apDisplay.bytes(totalsize)))
    if msg is True:
        apDisplay.printMsg(
            "size match %s vs. %s" %
            (apDisplay.bytes(finalsize), apDisplay.bytes(totalsize)))

    ### merge header files
    #apDisplay.printError("not finished")
    mergehead = open(mergeheader, 'wb')
    partnum = 1
    totalsize = 0
    for stackfile in stacklist:
        headerfilename = os.path.splitext(stackfile)[0] + ".hed"
        headfile = open(headerfilename, 'rb')
        ### size checks
        size = apFile.fileSize(headerfilename)
        if msg is True:
            apDisplay.printMsg("%s (%d kB)" % (headerfilename, size / 1024))
        totalsize += size

        #apDisplay.printMsg("%d\t%s"%(npart, stackfile))
        i = 0
        npart = apFile.numImagesInStack(stackfile)
        while i < npart:
            #print i, npart, partnum
            ### read old header
            data = headfile.read(1024)
            ### start new string
            headerstr = ""
            ### first image number
            headerstr += intToFourByte(partnum)
            ### number of images, less one
            headerstr += intToFourByte(numpart - partnum)
            ### always 0,1 ???
            headerstr += intToFourByte(0)
            headerstr += intToFourByte(1)
            ### creation date: day, month, year, hour, min, sec
            headerstr += intToFourByte(time.localtime()[2])
            headerstr += intToFourByte(
                time.localtime()[1])  #eman always uses month-1?
            headerstr += intToFourByte(time.localtime()[0])
            headerstr += intToFourByte(time.localtime()[3])
            headerstr += intToFourByte(time.localtime()[4])
            headerstr += intToFourByte(time.localtime()[5])
            ### append other header info, 4 character per item
            headerstr += data[10 * 4:60 * 4]
            ### number of z slices
            headerstr += intToFourByte(1)
            ### first image number, EMAN does this
            headerstr += intToFourByte(partnum)
            ### append other header info, 4 character per item
            headerstr += data[62 * 4:68 * 4]
            headerstr += intToFourByte(33686018)
            headerstr += data[69 * 4:]
            mergehead.write(headerstr)
            partnum += 1
            i += 1
    mergehead.close()
    if msg is True:
        apDisplay.printMsg("wrote %d particles to file %s" %
                           (numpart, mergestack))
    finalsize = apFile.fileSize(mergeheader)
    if finalsize != totalsize:
        apDisplay.printError(
            "size mismatch %s vs. %s" %
            (apDisplay.bytes(finalsize), apDisplay.bytes(totalsize)))
    if msg is True:
        apDisplay.printMsg(
            "size match %s vs. %s" %
            (apDisplay.bytes(finalsize), apDisplay.bytes(totalsize)))
        apDisplay.printMsg(
            "finished stack merge of %s in %s" %
            (mergestack, apDisplay.timeString(time.time() - t0)))
Ejemplo n.º 51
0
			%(fieldsize, count, apDisplay.timeString(time.time()-t0)))
	return poweravg, freq

#===================
#===================
#===================
if __name__ == "__main__":
	a = mrc.read("/home/vosslab/test.mrc")
	a = imagefilter.planeRegression(a)
	fullpower = imagefun.power(a)
	#imagestat.printImageInfo(a)
	t0 = time.time()
	x = numpy.arange(6, 13)
	N = 2**x
	print N
	for n in N:
		print "====================================="
		b = power(a, n)
		b = imagefilter.frame_cut(b, numpy.array(b.shape)/2)
		imagefile.arrayToPng(b, "%04d-field.png"%(n))
		imagestat.printImageInfo(b)

		bin = int(round(2**12/n))
		b = imagefun.bin2(fullpower, bin)
		b = imagefilter.frame_cut(b, numpy.array(b.shape)/2)
		imagefile.arrayToPng(b, "%04d-binned.png"%(n))
		imagestat.printImageInfo(b)

	print "complete in %s"%(apDisplay.timeString(time.time()-t0))
	#imagestat.printImageInfo(b)
Ejemplo n.º 52
0
	def insertIteration(self, iteration):
		refineparamsq=appiondata.ApEmanRefineIterData()
		refineparamsq['ang']=iteration['ang']
		refineparamsq['lpfilter']=iteration['lpfilter']
		refineparamsq['hpfilter']=iteration['hpfilter']
		refineparamsq['pad']=iteration['pad']
		refineparamsq['EMAN_maxshift']=iteration['maxshift']
		refineparamsq['EMAN_hard']=iteration['hard']
		refineparamsq['EMAN_classkeep']=iteration['classkeep']
		refineparamsq['EMAN_classiter']=iteration['classiter']
		refineparamsq['EMAN_filt3d']=iteration['filt3d']
		refineparamsq['EMAN_shrink']=iteration['shrink']
		refineparamsq['EMAN_euler2']=iteration['euler2']
		refineparamsq['EMAN_xfiles']=iteration['xfiles']
		refineparamsq['EMAN_median']=iteration['median']
		refineparamsq['EMAN_phasecls']=iteration['phasecls']
		refineparamsq['EMAN_fscls']=iteration['fscls']
		refineparamsq['EMAN_refine']=iteration['refine']
		refineparamsq['EMAN_goodbad']=iteration['goodbad']
		refineparamsq['EMAN_perturb']=iteration['perturb']
		refineparamsq['MsgP_cckeep']=iteration['msgpasskeep']
		refineparamsq['MsgP_minptls']=iteration['msgpassminp']

		#create Chimera snapshots
		fscfile = os.path.join(self.params['rundir'], "fsc.eotest."+iteration['num'])
		halfres = apRecon.calcRes(fscfile, self.params['boxsize'], self.params['apix'])
		if self.params['snapfilter']:
			halfres = self.params['snapfilter']
		volumeDensity = 'threed.'+iteration['num']+'a.mrc'
		volDensPath = os.path.join(self.params['rundir'], volumeDensity)

		apChimera.filterAndChimera(volDensPath, halfres, self.params['apix'], 
			self.params['boxsize'], 'snapshot', self.params['contour'], self.params['zoom'],
			sym=iteration['sym']['eman_name'], mass=self.params['mass'])

		## uncommment this for chimera image only runs...
		if self.params['chimeraonly'] is True:
			return

		# insert resolution data
		if halfres != True:
			resData = self.getResolutionData(iteration)
		else:
			apDisplay.printWarning("resolution reported as nan, not committing results to database")
			return

		if self.params['package']== 'EMAN':
			refineclassavg='classes_eman.'+iteration['num']+'.img'
			postrefineclassavg=None
		elif self.params['package']== 'EMAN/SpiCoran':
			refineclassavg='classes_eman.'+iteration['num']+'.img'
			postrefineclassavg='classes_coran.'+iteration['num']+'.img'
		elif self.params['package']== 'EMAN/MsgP':
			refineclassavg='classes_eman.'+iteration['num']+'.img'
			postrefineclassavg='classes_msgp.'+iteration['num']+'.img'
		else:
			apDisplay.printError("Refinement Package Not Valid")

		# insert refinement results
		refineq = appiondata.ApRefineIterData()
		refineq['refineRun'] = self.params['refineRun']
		refineq['emanParams'] = refineparamsq
		refineq['iteration'] = iteration['num']
		refineq['resolution'] = resData
		refineq['rMeasure'] = self.getRMeasureData(iteration)
		refineq['mask'] = iteration['mask']
		refineq['imask'] = iteration['imask']
		refineq['symmetry']=iteration['sym']
		refineq['exemplar'] = False
		classvar = 'classes.'+iteration['num']+'.var.img'
		refineq['refineClassAverages'] = refineclassavg
		refineq['postRefineClassAverages'] = postrefineclassavg
		if classvar in self.params['classvars']:
			refineq['classVariance'] = classvar
		if volumeDensity in self.params['volumes']:
			refineq['volumeDensity'] = volumeDensity

		apDisplay.printMsg("inserting Refinement Data into database")
		if self.params['commit'] is True:
			refineq.insert()
		else:
			apDisplay.printWarning("not committing results to database")

		#insert FSC data
		fscfile = os.path.join(self.params['rundir'], "fsc.eotest."+iteration['num'])
		self.insertFSC(fscfile, refineq, self.params['commit'])
		halfres = apRecon.calcRes(fscfile, self.params['boxsize'], self.params['apix'])
		apDisplay.printColor("FSC 0.5 Resolution: "+str(halfres), "cyan")

		# get projections eulers for iteration:
		eulers = self.getEulersFromProj(iteration['num'])

		# get list of bad particles for this iteration
		badprtls = self.readParticleLog(self.params['rundir'], iteration['num'])

		# expand cls.*.tar into temp file
		clsf=os.path.join(self.params['rundir'], "cls."+iteration['num']+".tar")
		#print "reading",clsf
		clstar=tarfile.open(clsf)
		clslist=clstar.getmembers()
		clsnames=clstar.getnames()
		#print "extracting",clsf,"into temp directory"
		for clsfile in clslist:
			clstar.extract(clsfile,self.params['tmpdir'])
		clstar.close()

		# for each class, insert particle alignment info into database
		apDisplay.printColor("Inserting Particle Classification Data for "
			+str(len(clsnames))+" classes", "magenta")
		t0 = time.time()
		for cls in clsnames:
			self.insertRefineParticleData(cls, iteration, eulers, badprtls, refineq, len(clsnames))
		apDisplay.printColor("\nFinished in "+apDisplay.timeString(time.time()-t0), "magenta")

		# remove temp directory
		for file in os.listdir(self.params['tmpdir']):
			os.remove(os.path.join(self.params['tmpdir'],file))
		os.rmdir(self.params['tmpdir'])

		#create euler freq map
		apDisplay.printMsg("creating euler frequency map")
		refrunid = int(self.params['refineRun'].dbid)
		iternum = int(iteration['num'])
		if self.params['package'] != 'EMAN':
			postrefine = True
		else:
			postrefine = False

		apEulerDraw.createEulerImages(refrunid, iternum, path=self.params['rundir'], postrefine=postrefine)
		return
Ejemplo n.º 53
0
def readImagic(filename, first=1, last=None, msg=True):
    """
	Rudimentary Imagic stack reader
	Could be improved with more sophisticated error testing and header parsing
	Currently  only reads image data as floats
	Currently reads header information for only first image in stack
	"""
    t0 = time.time()
    if first < 1:
        apDisplay.printError("particle numbering starts at 1")
    if last is not None and first > last:
        apDisplay.printError(
            "requested first particle %d is greater than last particle %d" %
            (first, last))
    if msg is True:
        apDisplay.printMsg("reading stack from disk into memory: " +
                           os.path.basename(filename))
        if last is not None:
            apDisplay.printMsg("particles %d through %d" % (first, last))
    root = os.path.splitext(filename)[0]
    headerfilename = root + ".hed"
    datafilename = root + ".img"

    ### check file size, no more than 2 GB is possible
    ### it takes double memory on machine to read stack
    filesize = apFile.fileSize(datafilename)
    if first is None and last is None and filesize > bytelimit:
        apDisplay.printError("Stack is too large to read %s" %
                             (apDisplay.bytes(filesize)))

    ### read stack header

    headerdict = readImagicHeader(headerfilename)

    ### determine amount of memory needed
    partbytes = 4 * headerdict['rows'] * headerdict['lines']
    if last is None:
        last = headerdict['nimg']
    elif last > headerdict['nimg']:
        apDisplay.printWarning(
            "requested particle %d from stack of length %d" %
            (last, headerdict['nimg']))
        last = headerdict['nimg']
    numpart = last - first + 1
    if partbytes * numpart > filesize:
        apDisplay.printError("requested particle %d from stack of length %d" %
                             (last, filesize / partbytes))
    if partbytes * numpart > bytelimit:
        apDisplay.printError(
            "Stack is too large to read %d particles, requesting %s" %
            (numpart, apDisplay.bytes(partbytes * numpart)))

    ### read stack images
    images = readImagicData(datafilename, headerdict, first, numpart)
    stack = {'header': headerdict, 'images': images}

    if msg is True:
        apDisplay.printMsg("read %d particles equaling %s in size" %
                           (numpart, apDisplay.bytes(partbytes * numpart)))
        apDisplay.printMsg("finished in " +
                           apDisplay.timeString(time.time() - t0))

    return stack
Ejemplo n.º 54
0
def writeImagic(array, filename, msg=True):
    """
	Rudimentary Imagic stack writer: requires 2D images to be in list format
	Could be improved with more sophisticated error testing and header parsing
	Currently only reads image data as floats
	Currently reads header information for only first image in stack

	Inputs:
		3d numpy array (numimg x row x col) OR python list of 2d numpy arrays (row x col)
		filename
	Modifies:
		overwrites files on disk
	Outputs:
		none
	"""
    if isinstance(array, list):
        ### python list of 2d numpy arrays (row x col)
        if len(array) == 0:
            apDisplay.printWarning("writeImagic: no particles to write")
            return
        try:
            array = numpy.asarray(array, dtype=numpy.float32)
            array = numpy.fliplr(array)
        except:
            boxsizes = []
            for part in array:
                shape = part.shape
                if not shape in boxsizes:
                    boxsizes.append(shape)
            if len(boxsizes) > 1:
                apDisplay.printError(
                    "your particles have different boxsizes: " + str(boxsizes))
            apDisplay.printError(
                "unknown error in particle list to numpy array conversion")

    t0 = time.time()
    if msg is True:
        apDisplay.printMsg("writing stack to disk from memory: " + filename)
    root = os.path.splitext(filename)[0]
    headerfilename = root + ".hed"
    datafilename = root + ".img"
    if os.path.isfile(headerfilename) or os.path.isfile(datafilename):
        apDisplay.printWarning("stack files '" + headerfilename +
                               "' already exist")

    ### write header file info, and dump images to image file
    i = 0
    headfile = open(headerfilename, 'wb')
    datafile = open(datafilename, 'wb')
    partnum = 0
    while i < array.shape[0]:
        partimg = array[i]
        avg1, stdev1, min1, max1 = getImageInfo(partimg)
        partnum = i + 1
        headerstr = makeHeaderStr(partnum, array.shape, avg1, stdev1, min1,
                                  max1)
        headfile.write(headerstr)
        # write to imagic file
        datafile.write(partimg.tostring())
        i += 1
    headfile.close()
    datafile.close()
    if partnum < 1:
        apDisplay.printWarning("did not write any particles to file")
    if msg is True:
        apDisplay.printMsg("wrote " + str(partnum) +
                           " particles to header file")
        apDisplay.printMsg("finished in " +
                           apDisplay.timeString(time.time() - t0))
    return
def peakExtender(raddata, rotdata, extrema, extrematype="below"):
        """
        raddata - x data in inverse Angstroms
        rotdata - powerspectra data, almost normalized to 0 and 1
        extrema - numpy array of peak or valley locations in inverse Angstroms
        extrematype - type of extrema, must be either below or above

        this program looks at the CTF data using the "known" location of the extrema
        extracts their extreme values 
        and does a linear interpolation between the extreme points
        """
        t0 = time.time()
        apDisplay.printMsg("starting peak extension")
        extremeindices = numpy.searchsorted(raddata, extrema)

        raddatasq = raddata**2

        xdata = []
        ydata = []
        minx = extremeindices[0]
        for i in range(extremeindices.shape[0]-1):
                if extremeindices[i] > raddata.shape[0]-2:
                        break
                if extremeindices[i+1] > raddata.shape[0]-1:
                        extremeindices[i+1] = raddata.shape[0]-1
                eindex = extremeindices[i]
                if i == 0:
                        preveindex = int(eindex/2)
                else:
                        preveindex = extremeindices[i-1]
                nexteindex = extremeindices[i+1]
                eindex1 = int(round(eindex - abs(preveindex-eindex)/2.0))
                eindex2 = int(round(eindex + abs(nexteindex-eindex)/2.0))

                values = rotdata[eindex1:eindex2]
                if extrematype is "below":
                        value = values.min()
                elif extrematype is "above":
                        value = values.max()

                maxx = eindex
                xdata.append(raddatasq[eindex])
                ydata.append(value)

        if len(xdata) < 2:
                #not enough indices
                if extrematype is "below":
                        return numpy.zeros(raddata.shape)
                elif extrematype is "above":            
                        return numpy.ones(raddata.shape)

        func = scipy.interpolate.interp1d(xdata, ydata, kind='linear')
        extremedata = func(raddatasq[minx:maxx])
        if extrematype is "below":
                if minx < 3:
                        startvalue = 0.0
                else:
                        startvalue = rotdata[int(minx*0.5):minx].min()
                endvalue = rotdata[maxx:].min()
        elif extrematype is "above":
                if minx < 3:
                        startvalue = 1.0
                else:
                        startvalue = rotdata[int(minx*0.5):minx].max()
                endvalue = rotdata[maxx:].max()

        #print startvalue, endvalue

        startdata = numpy.ones((minx)) * startvalue
        enddata = numpy.ones((raddata.shape[0]-maxx)) * endvalue

        extremedata = numpy.hstack( (startdata, extremedata, enddata) )

        apDisplay.printColor("Peak Extension Complete in %s"
                %(apDisplay.timeString(time.time()-t0)), "cyan")

        return extremedata
Ejemplo n.º 56
0
    def start(self, stackfile, partlist=None):
        self.stackfile = stackfile
        self.starttime = time.time()
        if partlist is not None:
            partlist.sort()
            numrequest = len(partlist)
        else:
            numrequest = None
        self.initValues(stackfile, numrequest)

        ### custom pre-loop command
        self.preLoop()

        first = 1
        last = self.stepsize
        self.index = 0
        t0 = time.time()

        while self.index < self.numpart and first <= self.numpart:
            ### print message
            if self.index > 10:
                esttime = (time.time() - t0) / float(self.index +
                                                     1) * float(self.numpart -
                                                                self.index)
                self.message(
                    "partnum %d to %d of %d, %s remain" %
                    (first, last, self.numpart, apDisplay.timeString(esttime)))
            else:
                self.message("partnum %d to %d of %d" %
                             (first, last, self.numpart))

            ### read images
            if partlist is None:
                stackdata = readImagic(stackfile,
                                       first=first,
                                       last=last,
                                       msg=False)
                stackarray = stackdata['images']
            else:
                sublist = partlist[first - 1:last]
                self.message("actual partnum %d to %d" %
                             (sublist[0], sublist[len(sublist) - 1]))
                stackarray = readParticleListFromStack(stackfile,
                                                       sublist,
                                                       msg=False)

            ### process images
            self.processStack(stackarray)

            ### check for proper implementation
            if self.index == 0:
                apDisplay.printError(
                    "No particles were processed in stack loop")

            ### setup for next iteration
            first = last + 1
            last += self.stepsize
            if last > self.numpart:
                last = self.numpart
            ### END LOOP

        ### check for off-one reading errors
        if self.index < self.numpart - 1:
            print "INDEX %d -- NUMPART %d" % (self.index, self.numpart)
            apDisplay.printError("Did not properly process all particles")

        ### custom post-loop command
        self.postLoop()

        self.message("finished processing stack in " +
                     apDisplay.timeString(time.time() - self.starttime))
        return
        def modelCTFNoise(self, xdata, ctfdata, contraint="below"):
                """
                Master control function to fit the CTF noise function

                xdata - should be in inverse Angstroms
                """
                t0 = time.time()
                ### need to reduce precision of the xdata
                ### otherwise it takes too long, with no better of a fit
                xdata = xdata.astype(numpy.float32)

                if self.debug is True:
                        apDisplay.printColor("CTF limits %.1f A -->> %.1fA"
                                %(1./xdata.min(), 1./xdata.max()), "cyan")

                if contraint == "above":
                        if self.debug is True:
                                print "constrained above function"
                        contraintFunction = self.modelConstFunAbove
                        #filterctfdata = scipy.ndimage.maximum_filter(ctfdata, size=2)
                        #for i in range(1):
                        #       filterctfdata = (filterctfdata + scipy.ndimage.maximum_filter(filterctfdata, size=2))/2.0
                        #firstmax = filterctfdata[0:250].max()
                        #filterctfdata = numpy.where(filterctfdata>firstmax, firstmax, filterctfdata)
                        #filterctfdata = self.upwardLeftMonotonicFilter(ctfdata)
                        filterctfdata = ctfdata
                else:
                        if self.debug is True:
                                print "constrained below function"
                        contraintFunction = self.modelConstFunBelow
                        #filterctfdata = scipy.ndimage.minimum_filter(ctfdata, size=2)
                        #for i in range(1):
                        #       filterctfdata = (filterctfdata + scipy.ndimage.minimum_filter(filterctfdata, size=2))/2.0
                        #firstmin = filterctfdata[0:250].min()
                        #filterctfdata = numpy.where(filterctfdata>firstmin, firstmin, filterctfdata)
                        #filterctfdata = self.downwardRightMonotonicFilter(ctfdata)
                        filterctfdata = ctfdata

                ### run the initial minimizations
                namelist, valuelist, fitparamslist = self.getAllInitialParameters(xdata, 
                        filterctfdata, contraintFunction)

                ### figure out which initial fit was best
                if self.debug is True:
                        namestr = "|"
                        valstr = "|"
                        conststr = "|"
                        for i in range(len(valuelist)):
                                constrainval = contraintFunction(fitparamslist[i], xdata, filterctfdata)
                                namestr += apDisplay.rightPadString("%s"%(namelist[i][:15]), 15)+"|"
                                valstr += apDisplay.leftPadString("%.4f"%(valuelist[i]), 15)+"|"
                                conststr += apDisplay.leftPadString("%.4e"%(constrainval), 15)+"|"
                        print namestr
                        print valstr
                        print conststr

                ### lowest is best
                minvalindex = numpy.argmin(valuelist)
                constrainval = contraintFunction(fitparamslist[minvalindex], xdata, filterctfdata)
                valuelist = numpy.array(valuelist)
                if contraint == "below":
                        minconval = -1e-2
                elif contraint == "above":
                        minconval = -1e-4
                else:
                        minconval = -1e-3
                while constrainval < minconval and valuelist.min() < 1e6:
                        if constrainval < 0.1 and self.debug is True:
                                apDisplay.printMsg("Constraint violation: %.3e < %.3e"%(constrainval, minconval))
                        valuelist[minvalindex] *= 1e10
                        minvalindex = numpy.argmin(valuelist)
                        constrainval = contraintFunction(fitparamslist[minvalindex], xdata, filterctfdata)
                if self.debug is True:
                        apDisplay.printColor( namelist[minvalindex]+" is best" , "cyan")
                midfitparams = fitparamslist[minvalindex]

                if self.debug is True:
                        print ( "middle parameters (%.5e, %.5e, %.5e, %.5e, %.5e)"
                                %(midfitparams[0], midfitparams[1], midfitparams[2], midfitparams[3], midfitparams[4]))
                midvalue = self.modelFitFun(midfitparams, xdata, ctfdata)
                if self.debug is True:
                        print "middle function value %.10f"%(midvalue)
                        constrainval = contraintFunction(midfitparams, xdata, ctfdata)
                        print "constrained value %.10e"%(constrainval)

                ### run the full minimization
                rhobeg = (numpy.where(numpy.abs(midfitparams)<1e-20, 1e20, numpy.abs(midfitparams))).min()/1e7
                if self.debug: print "RHO begin", rhobeg
                fitparams = scipy.optimize.fmin_cobyla( self.modelFitFun, midfitparams, 
                        args=(xdata, ctfdata), cons=[contraintFunction,],
                        consargs=(xdata, ctfdata), rhobeg=rhobeg, rhoend=rhobeg/1e4, iprint=0, maxfun=1e8)
                if self.debug is True: 
                        print ( "final parameters (%.4e, %.4e, %.4e, %.4e, %.4e)"
                                %(fitparams[0], fitparams[1], fitparams[2], fitparams[3], fitparams[4]))
                finalvalue = self.modelFitFun(fitparams, xdata, ctfdata)
                if self.debug is True: 
                        print "final function value %.10f"%(finalvalue)
                #writeDatFile("finalvalue.dat", fitparams, xdata, ctfdata)
                
                if finalvalue <= midvalue:
                        if self.debug is True:
                                apDisplay.printColor("Final value is better", "green")
                        bestfitparams = fitparams
                else:
                        if self.debug is True:
                                apDisplay.printColor("Final value is worse", "red")
                        bestfitparams = midfitparams

                z = numpy.polyfit(xdata, filterctfdata, 3)
                polyfitparams = [z[3], 0.0, z[2], z[1], z[0]]

                if self.debug is True:
                        xdatasq = xdata**2
                        xdatasq = numpy.arange(0, len(xdata), 1)

                        from matplotlib import pyplot
                        pyplot.plot(xdatasq, ctfdata, 'r-', )
                        pyplot.plot(xdatasq, filterctfdata, 'b-', )
                        midfitdata = self.noiseModel(midfitparams, xdata)
                        pyplot.plot(xdatasq, midfitdata, 'm:', )
                        polyfitdata = self.noiseModel(polyfitparams, xdata)
                        pyplot.plot(xdatasq, polyfitdata, 'y-', )
                        finalfitdata = self.noiseModel(fitparams, xdata)
                        pyplot.plot(xdatasq, finalfitdata, 'k-', )
                        pyplot.show()
                        pyplot.clf()

                        """
                        datadiff1  = scipy.ndimage.median_filter(numpy.diff(ctfdata), 3)
                        datadiff2  = scipy.ndimage.median_filter(numpy.diff(datadiff1), 27)
                        pyplot.plot(xdatasq[:500], (datadiff2/datadiff2.std())[:500], 'y-', )
                        pyplot.plot(xdatasq[:500], (ctfdata - ctfdata.mean())[:500], 'r-', )
                        pyplot.plot(xdatasq[:500], (datadiff1/datadiff1.std())[:500], 'c-', )
                        pyplot.show()
                        pyplot.clf()
                        """

                if self.debug is True:
                        apDisplay.printColor("Noise Model Complete in %s"
                                %(apDisplay.timeString(time.time()-t0)), "cyan")

                return bestfitparams
	def start(self):
		### new stack path
		oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		newstack = os.path.join(self.params['rundir'], self.stackdata['name'])
		apStack.checkForPreviousStack(newstack)

		### get particles from stack
		apDisplay.printMsg("Querying stack particles")
		t0 = time.time()
		stackpartq =  appiondata.ApRefineParticleData()
		stackpartq['refineIter'] = self.iterdata
		particles = stackpartq.query()
		apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

		### write included particles to text file
		includeParticle = []
		excludeParticle = 0
		f = open("test.log", "w")
		count = 0
		apDisplay.printMsg("Processing stack particles")
		t0 = time.time()
		for part in particles:
			count += 1
			if count%500 == 0:
				sys.stderr.write(".")
			emanstackpartnum = part['particle']['particleNumber']-1

			if part['postRefine_keep'] == 1:
				### good particle
				includeParticle.append(emanstackpartnum)
				f.write("%d\t%d\tinclude\n"%(count, emanstackpartnum))
			else:
				### bad particle
				excludeParticle += 1
				f.write("%d\t%d\texclude\n"%(count, emanstackpartnum))
		sys.stderr.write("\n")
		apDisplay.printMsg("Finished in "+apDisplay.timeString(time.time()-t0))

		f.close()
		includeParticle.sort()
		apDisplay.printMsg("Keeping "+str(len(includeParticle))
			+" and excluding "+str(excludeParticle)+" particles")

		### write kept particles to file
		self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
		apDisplay.printMsg("writing to keepfile "+self.params['keepfile'])
		kf = open(self.params['keepfile'], "w")
		for partnum in includeParticle:
			kf.write(str(partnum)+"\n")
		kf.close()

		### get number of particles
		numparticles = len(includeParticle)
		self.params['description'] += ( " ... %d no jumpers substack" % (numparticles,))

		### create the new sub stack
		apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)

		if not os.path.isfile(newstack):
			apDisplay.printError("No stack was created")

		apStack.averageStack(stack=newstack)
		if self.params['commit'] is True:
			apStack.commitSubStack(self.params)
			newstackid = apStack.getStackIdFromPath(newstack)
			apStackMeanPlot.makeStackMeanPlot(newstackid, gridpoints=6)
def crossCorrelateAndShift(infile, reffile, alignfile, ccdocfile, numpart, dataext=".spi"):
        ### rewriten to do the whole thing in memory in SPIDER, it should be faster
        starttime = time.time()
        infile = spyder.fileFilter(infile)
        reffile = spyder.fileFilter(reffile)
        alignfile = spyder.fileFilter(alignfile)
        partimg = "_4"
        ccmap = "_5"
        windccmap = "_6"

        boxsize = apFile.getBoxSize(infile+dataext)

        if not os.path.isfile(infile+dataext):
                apDisplay.printError("input stack file not found: "+infile+dataext)
        if not os.path.isfile(reffile+dataext):
                apDisplay.printError("reference stack file not found: "+reffile+dataext)
        nproc = apParam.getNumProcessors()
        mySpider = spyder.SpiderSession(dataext=dataext, logo=True, nproc=nproc, log=False)

        ### Allocate empty stack
        mySpider.toSpiderQuiet(
                "MS I", #command
                "_2@", #name
                "%d,%d,%d"%(boxsize), #boxsize
                str(numpart+1), #num part to create in memory
                str(numpart+1), #max particle number
        )

        partnum = 0
        while partnum < numpart:
                partnum+=1

                mySpider.toSpiderQuiet("CP", 
                        infile+("@%05d"%(partnum)), #picture
                        partimg,
                )

                ### cross correlate images; reversed order to avoid -1*shift

                mySpider.toSpiderQuiet("CC N", 
                        reffile+("@%05d"%(partnum)), #reference
                        partimg, #picture
                        ccmap, #output file
                )

                ### cannot shift more the 1/4 size of the image
                mySpider.toSpiderQuiet("FI x52", partimg, "12" )
                mySpider.toSpiderQuiet("x54=int(x52/2)") #window size
                mySpider.toSpiderQuiet("x55=int(x52/4)") #window topleft
                mySpider.toSpiderQuiet("WI", 
                        ccmap, #input file
                        windccmap, #output file
                        "x54,x54", #window size
                        "x55,x55", #window origin
                )

                ### find the cross-correlation peak
                mySpider.toSpiderQuiet("x56=int(x52/4)+1") #center of window
                mySpider.toSpiderQuiet("PK M x11,x12,x13,x14", 
                        windccmap, #input ccmap file
                        "x56,x56", #origin coordinates
                )

                ### save info to doc file
                mySpider.toSpiderQuiet("SD %d,x13,x14"%(partnum), 
                        ccdocfile, #input ccmap file
                )

                ### shift the images images
                mySpider.toSpiderQuiet("SH", 
                        partimg, #old stack
                        ("_2@%05d"%(partnum)), #new stack
                        "x13,x14", #shift value file
                )
        ### finish up
        #save stack to file
        mySpider.toSpiderQuiet(
                "CP", "_2@",
                alignfile+"@",  
        )
        #delete stack
        mySpider.toSpiderQuiet(
                "DE", "_2",
        )
        mySpider.close()

        apDisplay.printColor("finished shifting particles in "+apDisplay.timeString(time.time()-starttime), "cyan")

        return
    def getStackParticleParams(self):
        """
		for each particle in the stack, get the information that RELION needs
		"""
        stackPartList = apStack.getStackParticlesFromId(self.params['stackid'])

        if 'last' not in self.params:
            self.params['last'] = len(stackPartList)

        firstImageId = stackPartList[0]['particle']['image'].dbid
        count = 0
        lastImageId = -1
        lastCtfData = None
        lastKv = -1
        partParamsList = []
        sys.stderr.write("reading stack particle data\n")
        t0 = time.time()
        for stackPart in stackPartList:
            count += 1
            if count % 100 == 0:
                sys.stderr.write(".")
            if count % 10000 == 0:
                sys.stderr.write("\nparticle %d of %d\n" %
                                 (count, self.params['last']))

            # extra particle number information not read by Relion
            if count != stackPart['particleNumber']:
                apDisplay.printWarning(
                    "particle number in database is not in sync")

            if count > self.params['last']:
                break

            partParams = {}
            partParams['ptclnum'] = count
            partParams['filmNum'] = self.getFilmNumber(stackPart, firstImageId)
            #print partParams['filmNum']
            ### get image data
            imagedata = stackPart['particle']['image']
            if self.originalStackData.defocpair is True:
                imagedata = apDefocalPairs.getDefocusPair(imagedata)

            if lastImageId == imagedata.dbid:
                ctfdata = lastCtfData
                partParams['kv'] = lastKv
            else:
                ctfdata = ctfdb.getBestCtfValue(
                    imagedata, msg=False, method=self.params['ctfmethod'])
                partParams['kv'] = imagedata['scope']['high tension'] / 1000.0
            lastCtfData = ctfdata
            lastImageId = imagedata.dbid
            lastKv = partParams['kv']

            ### get CTF data from image
            if ctfdata is not None:
                # use defocus & astigmatism values
                partParams['defocus1'] = abs(ctfdata['defocus1'] * 1e10)
                partParams['defocus2'] = abs(ctfdata['defocus2'] * 1e10)
                partParams['angle_astigmatism'] = ctfdata['angle_astigmatism']
                partParams['amplitude_contrast'] = ctfdata[
                    'amplitude_contrast']
            else:
                apDisplay.printWarning(
                    "No ctf information for particle %d in image %d" %
                    (count, imagedata.dbid))
                partParams['defocus1'] = 0.1
                partParams['defocus2'] = 0.1
                partParams['angle_astigmatism'] = 0.0
                partParams['amplitude_contrast'] = 0.07

            if self.params['reconiterid'] is not None:
                eulerDict = self.getStackParticleEulersForIteration(stackPart)
                partParams.update(eulerDict)

            partParamsList.append(partParams)
        print "no class %d ; mismatch %d" % (self.noClassification,
                                             self.mismatch)
        sys.stderr.write("\ndone in %s\n\n" %
                         (apDisplay.timeString(time.time() - t0)))
        return partParamsList