def start(self):
         stack1data=apStack.getStackParticlesFromId(self.params['sibstack1'])
         stack2data=apStack.getStackParticlesFromId(self.params['sibstack2'])
         
         path1=os.path.join(stack1data[0]['stack']['path']['path'],stack1data[0]['stack']['name'])
         path2=os.path.join(stack2data[0]['stack']['path']['path'],stack2data[0]['stack']['name'])
         
         stack2dict={}
         stack2values=range(0,len(stack2data))
         ###This is a little hacky, but is a solution so that stack2data is only looped through once             
         print "making ptcl dict\n"
         for ptcl in stack2values:
                 stack2dict[stack2data[ptcl]['particle'].dbid]=ptcl
                 
         
         syncstack1name=os.path.splitext(stack1data[0]['stack']['name'])[0]+'.sync1.hed'
         syncstack2name=os.path.splitext(stack2data[0]['stack']['name'])[0]+'.sync2.hed'
         for ptcl1 in stack1data:
                 ptcl1_id = ptcl1['particle'].dbid
                 if ptcl1_id in stack2dict.keys():
                         ptcl2number=stack2dict[ptcl1_id]
                         ptcl2=stack2data[ptcl2number]
                         command1='proc2d %s %s first=%d last=%d' % (path1,syncstack1name,ptcl1['particleNumber']-1, ptcl1['particleNumber']-1)
                         command2='proc2d %s %s first=%d last=%d' % (path2,syncstack2name,ptcl2['particleNumber']-1, ptcl2['particleNumber']-1)
                         print command1
                         print command2
                         os.system(command1)
                         os.system(command2)
                         print '\n'
Esempio n. 2
0
	def start(self):
		stack1data=apStack.getStackParticlesFromId(self.params['sibstack1'])
		stack2data=apStack.getStackParticlesFromId(self.params['sibstack2'])
		
		path1=os.path.join(stack1data[0]['stack']['path']['path'],stack1data[0]['stack']['name'])
		path2=os.path.join(stack2data[0]['stack']['path']['path'],stack2data[0]['stack']['name'])
		
		stack2dict={}
		stack2values=range(0,len(stack2data))
		###This is a little hacky, but is a solution so that stack2data is only looped through once		
		print "making ptcl dict\n"
		for ptcl in stack2values:
			stack2dict[stack2data[ptcl]['particle'].dbid]=ptcl
			
		
		syncstack1name=os.path.splitext(stack1data[0]['stack']['name'])[0]+'.sync1.hed'
		syncstack2name=os.path.splitext(stack2data[0]['stack']['name'])[0]+'.sync2.hed'
		for ptcl1 in stack1data:
			ptcl1_id = ptcl1['particle'].dbid
			if ptcl1_id in stack2dict.keys():
				ptcl2number=stack2dict[ptcl1_id]
				ptcl2=stack2data[ptcl2number]
				command1='proc2d %s %s first=%d last=%d' % (path1,syncstack1name,ptcl1['particleNumber']-1, ptcl1['particleNumber']-1)
				command2='proc2d %s %s first=%d last=%d' % (path2,syncstack2name,ptcl2['particleNumber']-1, ptcl2['particleNumber']-1)
				print command1
				print command2
				os.system(command1)
				os.system(command2)
				print '\n'
    def checkConflicts(self):

        ### setup correct database after we have read the project id
        if 'projectid' in self.params and self.params['projectid'] is not None:
            apDisplay.printMsg("Using split database")
            # use a project database
            newdbname = apProject.getAppionDBFromProjectId(
                self.params['projectid'])
            sinedon.setConfig('appiondata', db=newdbname)
            apDisplay.printColor("Connected to database: '" + newdbname + "'",
                                 "green")

        # DD processes
        self.dd = apDDprocess.DDStackProcessing()
        print self.dd

        # get stack data
        self.stackdata = appiondata.ApStackData.direct_query(
            self.params['stackid'])
        self.stackparts = apStack.getStackParticlesFromId(
            self.params['stackid'], msg=True)
        self.sessiondata = apStack.getSessionDataFromStackId(
            self.params['stackid'])

        # query image
        qimage = self.stackparts[0]['particle']['image']

        # DD info
        self.dd.setImageData(qimage)
        self.dd.setDDStackRun(self.params['ddstackid'])
        self.ddstackpath = self.dd.getDDStackRun()['path']['path']
	def checkConflicts(self):
		
		### setup correct database after we have read the project id
		if 'projectid' in self.params and self.params['projectid'] is not None:
			apDisplay.printMsg("Using split database")
			# use a project database
			newdbname = apProject.getAppionDBFromProjectId(self.params['projectid'])
			sinedon.setConfig('appiondata', db=newdbname)
			apDisplay.printColor("Connected to database: '"+newdbname+"'", "green")
		
		# DD processes
		self.dd = apDDprocess.DDStackProcessing()
		print self.dd
	
		# get stack data
		self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid'])
		self.stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True)
		self.sessiondata = apStack.getSessionDataFromStackId(self.params['stackid'])
		
		# query image
		qimage = self.stackparts[0]['particle']['image']

		# DD info
		self.dd.setImageData(qimage)
		self.dd.setDDStackRun(self.params['ddstackid'])
		self.ddstackpath = self.dd.getDDStackRun()['path']['path']
Esempio n. 5
0
    def start(self):
        ### final stack file
        self.dbstackfile = os.path.join(self.params['rundir'],
                                        self.params['stackfilename'])
        if os.path.isfile(self.dbstackfile):
            apDisplay.printError("A stack with name " +
                                 self.params['stackfilename'] + " and path " +
                                 self.params['rundir'] + " already exists.")

        #self.stackid = int(self.params['stackid'])
        #self.stackdata = apStack.getOnlyStackData(self.stackid)

        stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
        originalstack = os.path.join(stackdata['path']['path'],
                                     stackdata['name'])
        #self.dbstackfile #new file
        shutil.copyfile(originalstack, self.dbstackfile)
        shutil.copyfile(originalstack[:-3] + "img",
                        self.dbstackfile[:-3] + "img")

        stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])
        dbids = [part.dbid for part in stackpartdata]

        for i, id in enumerate(dbids):
            numpy.memmap(self.dbstackfile,
                         dtype="float32",
                         offset=i * 1024 + 19 * 4)[0] = id
def getStackParticlesInOrder(params):
        partorderfile = os.path.join(params['rundir'],'stackpartorder.list')
        stackid = params['stackid']
        if not os.path.isfile(partorderfile):
                return apStack.getStackParticlesFromId(stackid)
        partfile = open(partorderfile,'r')
        lines = partfile.readlines()
        partorder = map((lambda x:int(x[:-1])),lines)
        partsort = list(partorder)
        partsort.sort()
        if partsort == partorder:
                        return apStack.getStackParticlesFromId(stackid)
        apDisplay.printMsg("Preped stack has different order from the original stack.  Getting information by the order of the preped stack")
        stpartdatas = []
        for partnum in partorder:
                stpartdatas.append(apStack.getStackParticle(stackid, partnum))
        return stpartdatas
    def start(self):
        self.stackdata = appiondata.ApStackData.direct_query(
            self.params['stackid'])
        if self.params['vertical'] is not True:
            self.alignstackdata = appiondata.ApAlignStackData.direct_query(
                self.params['alignstackid'])

        # Path of the stack
        self.stackdata = apStack.getOnlyStackData(self.params['stackid'])
        fn_oldstack = os.path.join(self.stackdata['path']['path'],
                                   self.stackdata['name'])

        rotfile = None
        if self.params['vertical'] is not True:
            # get averaged image:
            self.alignstackdata = appiondata.ApAlignStackData.direct_query(
                self.params['alignstackid'])
            avgimg = os.path.join(self.alignstackdata['path']['path'],
                                  self.alignstackdata['avgmrcfile'])

            # Convert averaged aligned mrcfile to spider
            spiavg = os.path.join(self.params['rundir'], "avg.spi")
            emancmd = "proc2d %s %s spiderswap edgenorm" % (avgimg, spiavg)
            apEMAN.executeEmanCmd(emancmd, verbose=True)

            # find rotation for vertical alignment
            rot = self.findRotation(spiavg)
            apDisplay.printMsg("found average rotation: %.2f" % rot)

            rotlist = self.getInplaneRotations()
            rotfile = self.createRotationSpiList(rotlist, rot)

        # Convert the original stack to spider
        spistack = self.convertStackToSpider(fn_oldstack)
        # boxmask the particles
        spimaskfile = "masked" + self.timestamp + ".spi"
        self.boxMask(spistack, spimaskfile, rotfile)
        # Convert the spider stack to imagic
        imgstack = self.convertStackToImagic(spimaskfile)

        # Create average MRC
        apStack.averageStack(imgstack)

        # Clean up
        apDisplay.printMsg("deleting temporary processing files")
        os.remove(spistack)
        os.remove(spimaskfile)

        # Upload results
        if self.params['commit'] is True:
            oldstackparts = apStack.getStackParticlesFromId(
                self.params['stackid'])
            apStack.commitMaskedStack(self.params,
                                      oldstackparts,
                                      newname='start.hed')

        time.sleep(1)
        return
	def removePtclsByJumps(self, particles, rejectlst):
		eulerjump = apEulerJump.ApEulerJump()
		numparts = len(particles)
		apDisplay.printMsg("finding euler jumps for "+str(numparts)+" particles")

		### check symmetry
		symmetry = eulerjump.getSymmetry(self.params['reconid'], msg=True)
		if not re.match("^[cd][0-9]+$", symmetry.lower()) and not re.match("^icos", symmetry.lower()):
			apDisplay.printError("Cannot calculate euler jumps for symmetry: "+symmetry)
			return
		self.params['sym']=symmetry.lower()

		### prepare file
		f = open('jumps.txt','w', 0666)
		f.write("#pnum\t")
		headerlist = ('mean', 'median', 'stdev', 'min', 'max')
		for key in headerlist:
			f.write(key+"\t")
		f.write("\n")

		### get stack particles
		stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

		### start loop
		t0 = time.time()
		medians = []
		count = 0
		apDisplay.printMsg("processing euler jumps for recon run="+str(self.params['reconid']))
		for stackpart in stackparts:
			count += 1
			partnum = stackpart['particleNumber']
			f.write('%d\t' % partnum)
			jumpdata = eulerjump.getEulerJumpData(self.params['reconid'], stackpartid=stackpart.dbid, stackid=self.params['stackid'], sym=symmetry)
			medians.append(jumpdata['median'])
			if (jumpdata['median'] > self.params['avgjump']) and partnum not in rejectlst:
				rejectlst.append(partnum)
			for key in headerlist:
				f.write("%.3f\t" % (jumpdata[key]))
			f.write("\n")
			if count % 1000 == 0:
				timeremain = (time.time()-t0)/(count+1)*(numparts-count)
				apDisplay.printMsg("particle=% 5d; median jump=% 3.2f, remain time= %s" % (partnum, jumpdata['median'],
					apDisplay.timeString(timeremain)))
				#f.flush()
		### print stats
		apDisplay.printMsg("-- median euler jumper stats --")
		medians = numpy.asarray(medians, dtype=numpy.float32)
		apDisplay.printMsg("mean/std :: "+str(round(medians.mean(),2))+" +/- "
			+str(round(medians.std(),2)))
		apDisplay.printMsg("min/max  :: "+str(round(medians.min(),2))+" <> "
			+str(round(medians.max(),2)))

		perrej = round(100.0*float(numparts-len(rejectlst))/float(numparts),2)
		apDisplay.printMsg("keeping "+str(numparts-len(rejectlst))+" of "+str(numparts)
			+" particles ("+str(perrej)+"%) so far "
			+" in "+apDisplay.timeString(time.time()-t0))

		return rejectlst
Esempio n. 9
0
def getStackParticlesInOrder(params):
    partorderfile = os.path.join(params['rundir'], 'stackpartorder.list')
    stackid = params['stackid']
    if not os.path.isfile(partorderfile):
        return apStack.getStackParticlesFromId(stackid)
    partfile = open(partorderfile, 'r')
    lines = partfile.readlines()
    partorder = map((lambda x: int(x[:-1])), lines)
    partsort = list(partorder)
    partsort.sort()
    if partsort == partorder:
        return apStack.getStackParticlesFromId(stackid)
    apDisplay.printMsg(
        "Preped stack has different order from the original stack.  Getting information by the order of the preped stack"
    )
    stpartdatas = []
    for partnum in partorder:
        stpartdatas.append(apStack.getStackParticle(stackid, partnum))
    return stpartdatas
Esempio n. 10
0
    def start(self):

        self.stackdata = apStack.getOnlyStackData(self.params['stackid'],
                                                  msg=False)

        # creating a keepfile, fixed filename
        self.params['keepfile'] = os.path.join(self.params['newstackpath'],
                                               "keepfile.lst")

        #path to the old stack
        oldstack = os.path.join(self.stackdata['path']['path'],
                                self.stackdata['name'])

        #path to the new stack. the stack path will be provided by the db in the future
        newstack = os.path.join(self.params['newstackpath'],
                                self.params['newstack'])

        #messy way to count the number of particles in a stack
        h = open(newstack, 'r')
        numimg = 0
        while h.read(1024):
            numimg += 1

        #have to use this function to make sure i get the same particle number like in the download
        stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])

        #since the keepfile has to be a proc2d like file, i create a dictionary to transfer the
        #uniqe particle id into the stack position. I have to decrement 1 to make it count from 0
        #to numing
        partdict = {}
        dbids = [(part.dbid, part['particleNumber']) for part in stackpartdata]
        for part in dbids:
            partdict[int(part[0])] = int(part[1] - 1)

        #writing the keepfile
        f = open(self.params['keepfile'], 'w')
        for i in range(0, numimg):
            partnumber = partdict[int(
                numpy.memmap(newstack,
                             dtype="float32",
                             offset=i * 1024 + 19 * 4)[0])]
            f.write('%d\n' % partnumber)
        f.close()

        newcreatestack = os.path.join(self.params['rundir'],
                                      self.params['newstack'])
        apStack.makeNewStack(oldstack,
                             newcreatestack,
                             self.params['keepfile'],
                             bad=True)
        apStack.commitSubStack(self.params,
                               self.params['newstack'],
                               sorted=False)
        apStack.averageStack(stack=newcreatestack)
        newstackid = apStack.getStackIdFromPath(newcreatestack)
    def start(self):
        stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        newname = stackdata['name']

        oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
        newstack = os.path.join(self.params['rundir'], newname)

        # calculate slop and intercept from the four points given
        slope = (self.params['maxy'] - self.params['miny']) / (
            self.params['maxx'] - self.params['minx'])
        intercept = self.params['miny'] - (slope * self.params['minx'])

        #               print slope
        #               print intercept

        numparticles = 0

        self.params['keepfile'] = os.path.join(
            self.params['rundir'], "keepfile-" + self.timestamp + ".list")
        f = open(self.params['keepfile'], 'w')

        for stackpart in stackparts:
            #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
            if stackpart['mean'] > self.params['minx'] and stackpart[
                    'mean'] < self.params['maxx']:
                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                calcY = slope * stackpart['mean'] + intercept
                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                    emanpartnum = stackpart['particleNumber'] - 1
                    f.write('%i\n' % emanpartnum)
                    numparticles += 1

        f.close()
        self.params['description'] += (
            (" ... %d particle substack of stackid %d" %
             (numparticles, self.params['stackid'])))

        #create the new sub stack
        apStack.makeNewStack(oldstack,
                             newstack,
                             self.params['keepfile'],
                             bad=True)
        if not os.path.isfile(newstack):
            apDisplay.printError("No stack was created")
        apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
        apStack.averageStack(stack=newstack)

        # stack mean plot
        newstackid = apStack.getStackIdFromPath(newstack)
        apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
        apStackMeanPlot.makeStackMeanPlot(newstackid)
    def start(self):
        #find stack
        stackparticles = apStack.getStackParticlesFromId(
            self.params['stackid'])

        if self.params['logsplit']:
            #stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions'])
            stacklist = evenLogSplit(self.params['logstart'],
                                     len(stackparticles))
        elif self.params['nptcls']:
            stacklist = [self.params['nptcls']]
        else:
            apDisplay.printError("Please specify nptlcs or logsplit")

        oldstackdata = apStack.getOnlyStackData(self.params['stackid'])
        oldstack = os.path.join(oldstackdata['path']['path'],
                                oldstackdata['name'])
        #create run directory
        if self.params['rundir'] is None:
            path = oldstackdata['path']['path']
            path = os.path.split(os.path.abspath(path))[0]
            self.params['rundir'] = path
        apDisplay.printMsg("Out directory: " + self.params['rundir'])

        origdescription = self.params['description']
        for stack in stacklist:
            self.params['description'] = (
                origdescription +
                (" ... split %d particles from original stackid=%d" %
                 (stack, self.params['stackid'])))
            workingdir = os.path.join(self.params['rundir'], str(stack))

            #check for previously commited stacks
            newstack = os.path.join(workingdir, self.params['stackname'])
            apStack.checkForPreviousStack(newstack)

            #create rundir and change to that directory
            apDisplay.printMsg("Run directory: " + workingdir)
            apParam.createDirectory(workingdir)
            os.chdir(workingdir)

            #create random list
            lstfile = makeRandomLst(stack, stackparticles, self.params)
            #shutil.copy(lstfile, workingdir)

            #make new stack
            apStack.makeNewStack(oldstack, newstack, lstfile)
            #apStack.makeNewStack(lstfile, self.params['stackname'])

            #commit new stack
            self.params['keepfile'] = os.path.abspath(lstfile)
            self.params['rundir'] = os.path.abspath(workingdir)
            apStack.commitSubStack(self.params)
	def start(self):
		#find stack
		stackparticles = apStack.getStackParticlesFromId(self.params['stackid'])

		if self.params['logsplit']:
			#stacklist = oldLogSplit(self.params['logstart'], len(stackparticles), self.params['logdivisions'])
			stacklist = evenLogSplit(self.params['logstart'], len(stackparticles))
		elif self.params['nptcls']:
			stacklist = [self.params['nptcls']]
		else:
			apDisplay.printError("Please specify nptlcs or logsplit")

		oldstackdata = apStack.getOnlyStackData(self.params['stackid'])
		oldstack = os.path.join(oldstackdata['path']['path'], oldstackdata['name'])
		#create run directory
		if self.params['rundir'] is None:
			path = oldstackdata['path']['path']
			path = os.path.split(os.path.abspath(path))[0]
			self.params['rundir'] = path
		apDisplay.printMsg("Out directory: "+self.params['rundir'])

		origdescription=self.params['description']
		for stack in stacklist:
			self.params['description'] = (
				origdescription+
				(" ... split %d particles from original stackid=%d"
				% (stack, self.params['stackid']))
			)
			workingdir = os.path.join(self.params['rundir'], str(stack))

			#check for previously commited stacks
			newstack = os.path.join(workingdir ,self.params['stackname'])
			apStack.checkForPreviousStack(newstack)

			#create rundir and change to that directory
			apDisplay.printMsg("Run directory: "+workingdir)
			apParam.createDirectory(workingdir)
			os.chdir(workingdir)

			#create random list
			lstfile = makeRandomLst(stack, stackparticles, self.params)
			#shutil.copy(lstfile, workingdir)

			#make new stack
			apStack.makeNewStack(oldstack, newstack, lstfile)
			#apStack.makeNewStack(lstfile, self.params['stackname'])

			#commit new stack
			self.params['keepfile'] = os.path.abspath(lstfile)
			self.params['rundir'] = os.path.abspath(workingdir)
			apStack.commitSubStack(self.params)
        def start(self):
                ### check for existing run
                selectrunq = appiondata.ApSelectionRunData()
                selectrunq['name'] = self.params['runname']
                selectrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                selectrundata = selectrunq.query(readimages=False)
                if selectrundata:
                        apDisplay.printError("Runname already exists")

                ### stack data
                stackdata = apStack.getOnlyStackData(self.params['stackid'])

                ### stack particles
                stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True)
                stackparts.reverse()

                ### selection run for first particle
                oldselectrun = stackparts[0]['particle']['selectionrun']

                ### set selection run
                manualparamsq = appiondata.ApManualParamsData()
                manualparamsq['diam'] = self.getDiamFromSelectionRun(oldselectrun)
                manualparamsq['oldselectionrun'] = oldselectrun
                manualparamsq['trace'] = False
                selectrunq = appiondata.ApSelectionRunData()
                selectrunq['name'] = self.params['runname']
                selectrunq['hidden'] = False
                selectrunq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
                selectrunq['session'] = apStack.getSessionDataFromStackId(self.params['stackid'])
                selectrunq['manparams'] = manualparamsq

                ### insert particles
                apDisplay.printMsg("Inserting particles into database")
                count = 0
                t0 = time.time()
                startmem = mem.active()
                numpart = len(stackparts)
                for stackpart in stackparts:
                        count += 1
                        if count > 10 and count%100 == 0:
                                perpart = (time.time()-t0)/float(count+1)
                                apDisplay.printColor("part %d of %d :: %.1fM mem :: %s/part :: %s remain"%
                                        (count, numpart, (mem.active()-startmem)/1024. , apDisplay.timeString(perpart),
                                        apDisplay.timeString(perpart*(numpart-count))), "blue")
                        oldpartdata = stackpart['particle']
                        newpartq = appiondata.ApParticleData(initializer=oldpartdata)
                        newpartq['selectionrun'] = selectrunq
                        if self.params['commit'] is True:
                                newpartq.insert()
                apDisplay.printMsg("Completed in %s"%(apDisplay.timeString(time.time()-t0)))
	def start(self):
		self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid'])
		if self.params['vertical'] is not True:
			self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])

		# Path of the stack
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'])
		fn_oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		rotfile = None
		if self.params['vertical'] is not True:
			# get averaged image:
			self.alignstackdata = appiondata.ApAlignStackData.direct_query(self.params['alignstackid'])
			avgimg = os.path.join(self.alignstackdata['path']['path'], self.alignstackdata['avgmrcfile'])

			# Convert averaged aligned mrcfile to spider
			spiavg = os.path.join(self.params['rundir'],"avg.spi")
			emancmd = "proc2d %s %s spiderswap edgenorm"%(avgimg,spiavg)
			apEMAN.executeEmanCmd(emancmd, verbose=True)

			# find rotation for vertical alignment
			rot = self.findRotation(spiavg)
			apDisplay.printMsg("found average rotation: %.2f"%rot)

			rotlist = self.getInplaneRotations()
			rotfile = self.createRotationSpiList(rotlist,rot)

		# Convert the original stack to spider
		spistack = self.convertStackToSpider(fn_oldstack)
		# boxmask the particles
		spimaskfile = "masked"+self.timestamp+".spi"
		self.boxMask(spistack,spimaskfile,rotfile)
		# Convert the spider stack to imagic
		imgstack = self.convertStackToImagic(spimaskfile)

		# Create average MRC
		apStack.averageStack(imgstack)

		# Clean up
		apDisplay.printMsg("deleting temporary processing files")
		os.remove(spistack)
		os.remove(spimaskfile)

		# Upload results
		if self.params['commit'] is True:
			oldstackparts = apStack.getStackParticlesFromId(self.params['stackid'])
			apStack.commitMaskedStack(self.params, oldstackparts, newname='start.hed')

		time.sleep(1)
		return
        def start(self):
                stackparts = apStack.getStackParticlesFromId(self.params['stackid'])
                
                stackdata = apStack.getOnlyStackData(self.params['stackid'])
                newname = stackdata['name']
                
                oldstack = os.path.join(stackdata['path']['path'], stackdata['name'])
                newstack = os.path.join(self.params['rundir'], newname)

                # calculate slop and intercept from the four points given       
                slope = (self.params['maxy'] - self.params['miny']) / (self.params['maxx'] - self.params['minx'])
                intercept = self.params['miny'] - (slope*self.params['minx'])
                
#               print slope
#               print intercept
                
                numparticles = 0
                
                self.params['keepfile'] = os.path.join(self.params['rundir'], "keepfile-"+self.timestamp+".list")
                f=open(self.params['keepfile'],'w')
                
                for stackpart in stackparts:
                        #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                        if stackpart['mean'] > self.params['minx'] and stackpart['mean'] < self.params['maxx']:
                                #print str(stackpart['particleNumber'])+","+ str(stackpart['mean'])+","+str(stackpart['stdev'])
                                calcY = slope*stackpart['mean']+intercept 
                                if (calcY >= stackpart['stdev'] and self.params['keepabove'] is not True) or \
                                        (calcY <= stackpart['stdev'] and self.params['keepabove'] is True):
                                        emanpartnum = stackpart['particleNumber']-1
                                        f.write('%i\n' % emanpartnum)
                                        numparticles+=1
                                        
                f.close()
                self.params['description'] +=(
                                (" ... %d particle substack of stackid %d" 
                                 % (numparticles, self.params['stackid']))
                        )

                #create the new sub stack
                apStack.makeNewStack(oldstack, newstack, self.params['keepfile'], bad=True)
                if not os.path.isfile(newstack):
                        apDisplay.printError("No stack was created")
                apStack.commitSubStack(self.params, newname, oldstackparts=stackparts)
                apStack.averageStack(stack=newstack)

                # stack mean plot
                newstackid = apStack.getStackIdFromPath(newstack)
                apDisplay.printMsg("creating Stack Mean Plot montage for stackid")
                apStackMeanPlot.makeStackMeanPlot(newstackid)
	def calculateEulerJumpsForEntireRecon(self, reconrunid, stackid=None, sym=None, multimodelrunid=None):
		if sym is None:
			sym = self.getSymmetry(reconrunid)
		if re.match("^icos", sym.lower()):
			apDisplay.printWarning("Processing Icos symmetry "+sym)
		if not re.match("^[cd][0-9]+$", sym.lower()) and not re.match("^icos", sym.lower()):
			apDisplay.printError("Cannot calculate euler jumps for symmetry: "+sym)
			return
		### get stack particles
		if stackid is None:
			stackid = apStack.getStackIdFromRecon(reconrunid, msg=False)
		stackparts = apStack.getStackParticlesFromId(stackid)
		stackparts.sort(self.sortStackParts)
		numparts = len(stackparts)
		### start loop
		t0 = time.time()
		medians = []
		count = 0
		miscount = 0
		for stackpart in stackparts:
			count += 1
			if multimodelrunid is None:
				jumpdata = self.getEulerJumpData(reconrunid, stackpartid=stackpart.dbid, stackid=stackid, sym=sym)
			else:
				jumpdata = self.getEulerJumpData(reconrunid, stackpartid=stackpart.dbid, stackid=stackid, sym=sym, multimodelrunid=multimodelrunid)
			if jumpdata is None:
				if miscount < 25:
					continue
				else:
					break
			medians.append(jumpdata['median'])
			if count % 500 == 0:
				timeremain = (time.time()-t0)/(count+1)*(numparts-count)
				print ("particle=% 5d; median jump=% 3.2f, remain time= %s" % (stackpart['particleNumber'], jumpdata['median'],
					apDisplay.timeString(timeremain)))
		if len(medians) > 0:
			### print stats
			apDisplay.printMsg("complete "+str(len(stackparts))+" particles in "+apDisplay.timeString(time.time()-t0))
			print "-- median euler jumper stats --"
			medians = numpy.asarray(medians, dtype=numpy.float32)
			print ("mean/std :: "+str(round(medians.mean(),2))+" +/- "
				+str(round(medians.std(),2)))
			print ("min/max  :: "+str(round(medians.min(),2))+" <> "
				+str(round(medians.max(),2)))
		else:
			apDisplay.printWarning("no Euler jumpers inserted into the database, make sure that the angles are read by the recon uploader")			
		return
    def commitStack(self, stackid):

        startpart = self.partnum

        stackq = appiondata.ApStackData()
        oldstackdata = apStack.getOnlyStackData(stackid)
        stackq['name'] = self.params['stackfilename']
        stackq['path'] = appiondata.ApPathData(
            path=os.path.abspath(self.params['rundir']))
        stackq['description'] = self.params[
            'description'] + " ... combined stack ids " + str(
                self.params['stacks'])
        stackq['substackname'] = self.params['runname']
        stackq['hidden'] = False
        stackq['pixelsize'] = self.newpixelsize * 1e-10
        stackq['boxsize'] = self.newboxsize

        rinstackdata = apStack.getRunsInStack(stackid)
        for run in rinstackdata:
            rinstackq = appiondata.ApRunsInStackData()
            rinstackq['stack'] = stackq
            rinstackq['stackRun'] = run['stackRun']
            rinstackq.insert()

        stpartsdata = apStack.getStackParticlesFromId(stackid)
        apDisplay.printMsg("inserting " + str(len(stpartsdata)) +
                           " particles into DB")
        for particle in stpartsdata:
            stpartq = appiondata.ApStackParticleData()
            stpartq['particleNumber'] = self.partnum
            stpartq['stack'] = stackq
            stpartq['stackRun'] = particle['stackRun']
            stpartq['particle'] = particle['particle']
            stpartq.insert()
            self.partnum += 1
            if self.partnum % 1000 == 0:
                sys.stderr.write(".")
        sys.stderr.write("\n")

        apDisplay.printMsg("commited particles " + str(startpart) + "-" +
                           str(self.partnum))

        return
	def start(self):
		
		self.stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		
		# creating a keepfile, fixed filename
		self.params['keepfile'] = os.path.join(self.params['newstackpath'],"keepfile.lst")

		#path to the old stack
		oldstack = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		#path to the new stack. the stack path will be provided by the db in the future
		newstack = os.path.join(self.params['newstackpath'], self.params['newstack'])

		#messy way to count the number of particles in a stack
		h = open(newstack, 'r')
		numimg = 0
		while h.read(1024):
			numimg += 1

		#have to use this function to make sure i get the same particle number like in the download
		stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])
		
		#since the keepfile has to be a proc2d like file, i create a dictionary to transfer the 
		#uniqe particle id into the stack position. I have to decrement 1 to make it count from 0 
		#to numing
		partdict = {}
		dbids = [(part.dbid,part['particleNumber']) for part in stackpartdata]
		for part in dbids:
			partdict[int(part[0])] = int(part[1]-1)

		#writing the keepfile
		f = open(self.params['keepfile'], 'w')
		for i in range(0,numimg):
			partnumber = partdict[int(numpy.memmap(newstack, dtype="float32", offset=i*1024+19*4)[0])]
			f.write('%d\n' % partnumber)
		f.close()

		newcreatestack = os.path.join(self.params['rundir'],self.params['newstack'])
		apStack.makeNewStack(oldstack, newcreatestack, self.params['keepfile'], bad=True)
		apStack.commitSubStack(self.params, self.params['newstack'], sorted=False)
		apStack.averageStack(stack=newcreatestack)
		newstackid = apStack.getStackIdFromPath(newcreatestack)
	def start(self):
		### final stack file
		self.dbstackfile = os.path.join( self.params['rundir'], self.params['stackfilename'] )
		if os.path.isfile(self.dbstackfile):
			apDisplay.printError("A stack with name "+self.params['stackfilename']+" and path "
				+self.params['rundir']+" already exists.")

		#self.stackid = int(self.params['stackid'])
		#self.stackdata = apStack.getOnlyStackData(self.stackid)

		stackdata = apStack.getOnlyStackData(self.params['stackid'], msg=False)
		originalstack = os.path.join(stackdata['path']['path'], stackdata['name'])
		#self.dbstackfile #new file
		shutil.copyfile(originalstack,self.dbstackfile)
		shutil.copyfile(originalstack[:-3]+"img",self.dbstackfile[:-3]+"img")
		
		stackpartdata = apStack.getStackParticlesFromId(self.params['stackid'])
		dbids = [part.dbid for part in stackpartdata]

		for i,id in enumerate(dbids):
			numpy.memmap(self.dbstackfile, dtype="float32", offset=i*1024+19*4)[0] = id
	def commitStack(self, stackid):

		startpart = self.partnum

		stackq = appiondata.ApStackData()
		oldstackdata = apStack.getOnlyStackData(stackid)
		stackq['name'] = self.params['stackfilename']
		stackq['path'] = appiondata.ApPathData(path=os.path.abspath(self.params['rundir']))
		stackq['description'] = self.params['description']+" ... combined stack ids "+str(self.params['stacks'])
		stackq['substackname'] = self.params['runname']
		stackq['hidden'] = False
		stackq['pixelsize'] = self.newpixelsize*1e-10
		stackq['boxsize'] = self.newboxsize

		rinstackdata = apStack.getRunsInStack(stackid)
		for run in rinstackdata:
			rinstackq = appiondata.ApRunsInStackData()
			rinstackq['stack']    = stackq
			rinstackq['stackRun'] = run['stackRun']
			rinstackq.insert()

		stpartsdata = apStack.getStackParticlesFromId(stackid)
		apDisplay.printMsg("inserting "+str(len(stpartsdata))+" particles into DB")
		for particle in stpartsdata:
			stpartq = appiondata.ApStackParticleData()
			stpartq['particleNumber'] = self.partnum
			stpartq['stack']    = stackq
			stpartq['stackRun'] = particle['stackRun']
			stpartq['particle'] = particle['particle']
			stpartq.insert()
			self.partnum += 1
			if self.partnum % 1000 == 0:
				sys.stderr.write(".")
		sys.stderr.write("\n")

		apDisplay.printMsg("commited particles "+str(startpart)+"-"+str(self.partnum))

		return
	def getExistingStackInfo(self):
		stackfile=os.path.join(self.params['rundir'], self.params['single'])
		stackid = apStack.getStackIdFromPath(stackfile)
		numdbpart = len(apStack.getStackParticlesFromId(stackid))

		if numdbpart == 0:
			self.params['continue'] = False
			apDisplay.printWarning("file exists but no particles in database, deleting stack file")
			apFile.removeStack(stackfile)
			return 0

		### we now have particles in the database
		if self.params['continue'] is False:
			apDisplay.printWarning("particles exist in database, must force continue")
			self.params['continue'] = True

		### we better have the same number of particles in the file and the database
		numfilepart = apFile.numImagesInStack(stackfile)
		if numfilepart != numdbpart:
			apDisplay.printError("database and file have different number of particles, \n"+
				"create a new stack this one is corrupt")

		return numfilepart
Esempio n. 23
0
    def start(self):
        self.params['output_fileformat'] = 'mrc'
        newstackname = 'framealigned.hed'
        stackdata = apStack.getStackParticlesFromId(self.params['stackid'])
        stackrundata = apStack.getOnlyStackData(self.params['stackid'])
        apix = stackrundata['pixelsize'] * 1e10
        kev = stackdata[0]['particle']['image']['scope']['high tension'] / 1000
        origstackpath = os.path.join(stackrundata['path']['path'],
                                     stackrundata['name'])
        boxsize = stackdata[0]['stackRun']['stackParams']['boxSize']
        binning = stackdata[0]['stackRun']['stackParams']['bin']

        #determine camera type
        cameratype = stackdata[0]['particle']['image']['camera']['ccdcamera'][
            'name']
        if self.params['override_camera'] is not None:
            cameratype = self.params['override_camera']

        #create sorted boxfiles
        imagedict = {}
        masterlist = []
        for particle in stackdata:
            parentimage = particle['particle']['image']['filename']
            if parentimage in imagedict.keys():
                imagedict[parentimage].append(particle['particle'])
            else:
                imagedict[parentimage] = []
                imagedict[parentimage].append(particle['particle'])
            index = len(imagedict[parentimage]) - 1
            masterlist.append({
                'particle': particle,
                'key': parentimage,
                'index': index
            })
        #print masterlist

        for key in imagedict:
            particlelst = imagedict[key]
            parentimage = key
            framespath = particlelst[0]['image']['session']['frame path']

            print cameratype
            if 'Gatan' in cameratype:
                #prepare frames
                print framespath

                #prepare frame directory
                framespathname = os.path.join(self.params['rundir'],
                                              parentimage + '.frames')
                if os.path.exists(framespathname):
                    pass
                else:
                    os.mkdir(framespathname)
                print framespathname

                mrcframestackname = parentimage + '.frames.mrc'

                print mrcframestackname

                nframes = particlelst[0]['image']['camera']['nframes']

                print "Extracting frames for", mrcframestackname
                for n in range(nframes):
                    a = mrc.read(os.path.join(framespath, mrcframestackname),
                                 n)
                    numpil.write(a,
                                 imfile=os.path.join(framespathname,
                                                     'RawImage_%d.tif' % (n)),
                                 format='tiff')

            elif 'DE' in cameratype:
                framespathname = os.path.join(framespath,
                                              parentimage + '.frames')

            print os.getcwd()
            print framespathname
            #generate DE script call
            if os.path.exists(framespathname):
                print "found frames for", parentimage

                nframes = particlelst[0]['image']['camera']['nframes']
                boxname = parentimage + '.box'
                boxpath = os.path.join(framespathname, boxname)
                shiftdata = {'scale': 1, 'shiftx': 0, 'shifty': 0}

                #flatfield references
                brightrefpath = particlelst[0]['image']['bright']['session'][
                    'image path']
                brightrefname = particlelst[0]['image']['bright']['filename']
                brightnframes = particlelst[0]['image']['bright']['camera'][
                    'nframes']
                darkrefpath = particlelst[0]['image']['dark']['session'][
                    'image path']
                darkrefname = particlelst[0]['image']['dark']['filename']
                darknframes = particlelst[0]['image']['dark']['camera'][
                    'nframes']
                brightref = os.path.join(brightrefpath, brightrefname + '.mrc')
                darkref = os.path.join(darkrefpath, darkrefname + '.mrc')
                print brightref
                print darkref
                apBoxer.processParticleData(particle['particle']['image'],
                                            boxsize, particlelst, shiftdata,
                                            boxpath)
                print framespathname

                #set appion specific options
                self.params['gainreference_filename'] = brightref
                self.params['gainreference_framecount'] = brightnframes
                self.params['darkreference_filename'] = darkref
                self.params['darkreference_framecount'] = darknframes
                self.params['input_framecount'] = nframes
                self.params['boxes_fromfiles'] = 1
                #self.params['run_verbosity']=3
                self.params['output_invert'] = 0
                #self.params['radiationdamage_apix=']=apix
                self.params['radiationdamage_voltage'] = kev
                #self.params['boxes_boxsize']=boxsize

                outpath = os.path.join(self.params['rundir'], key)
                if os.path.exists(outpath):
                    shutil.rmtree(outpath)
                os.mkdir(outpath)

                command = ['deProcessFrames.py']
                keys = self.params.keys()
                keys.sort()
                for key in keys:
                    param = self.params[key]
                    #print key, param, type(param)
                    if param == None or param == '':
                        pass
                    else:
                        option = '--%s=%s' % (key, param)
                        command.append(option)
                command.append(outpath)
                command.append(framespathname)
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #recreate particle stack
        for n, particledict in enumerate(masterlist):
            parentimage = particledict['key']
            correctedpath = os.path.join(self.params['rundir'], parentimage)
            print correctedpath
            if os.path.exists(correctedpath):

                correctedparticle = glob.glob(
                    os.path.join(correctedpath,
                                 ('%s.*.region_%03d.*' %
                                  (parentimage, particledict['index']))))
                print os.path.join(correctedpath,
                                   ('%s.*.region_%03d.*' %
                                    (parentimage, particledict['index'])))
                print correctedparticle
                #sys.exit()
                command = ['proc2d', correctedparticle[0], newstackname]
                if self.params['output_rotation'] != 0:
                    command.append('rot=%d' % self.params['output_rotation'])

                if self.params['show_DE_command'] is True:
                    print command
                subprocess.call(command)
            else:
                print "did not find frames for ", parentimage
                command = [
                    'proc2d', origstackpath, newstackname, ('first=%d' % n),
                    ('last=%d' % n)
                ]
                print command
                if self.params['dryrun'] is False:
                    subprocess.call(command)

        #upload stack

        #make keep file
        self.params['keepfile'] = 'keepfile.txt'
        f = open(self.params['keepfile'], 'w')
        for n in range(len(masterlist)):
            f.write('%d\n' % (n))
        f.close()

        apStack.commitSubStack(self.params, newname=newstackname)
        apStack.averageStack(stack=newstackname)

        print "Done!!!!"
	def start(self):
		"""
		for each particle in the stack, get the information that RELION needs
		"""
		stackPartList = apStack.getStackParticlesFromId(self.params['stackid'])
		nptcls=len(stackPartList)
		currentImageId = stackPartList[0]['particle']['image'].dbid
		count = 0
		imagenumber=1
		partParamsList = []
		sys.stderr.write("reading stack particle data\n")
		
		#create list of dictionaries that will be passed to starFile maker later
		for stackPart in stackPartList:
			count += 1
			if count % 100 == 0:
				sys.stderr.write(".")
			if count % 10000 == 0:
				sys.stderr.write("\nparticle %d of %d\n"%(count, nptcls))
			
			# extra particle number information not read by Relion
			if count != stackPart['particleNumber']:
				apDisplay.printWarning("particle number in database is not in sync")
							
			partParams = {}
			partParams['ptclnum'] = count
			
			### get image data
			imagedata = stackPart['particle']['image']
			if imagedata.dbid != currentImageId:
				imagenumber+=1
				currentImageId=imagedata.dbid
			partParams['filmNum'] = imagenumber
			#print partParams['filmNum']
			partParams['kv'] = imagedata['scope']['high tension']/1000.0
			partParams['cs'] =imagedata['scope']['tem']['cs']*1000
			### get CTF data from image			
			ctfdata = ctfdb.getBestCtfValue(imagedata, msg=False, sortType='maxconf')
			if ctfdata is not None:
				# use defocus & astigmatism values
				partParams['defocus1'] = abs(ctfdata['defocus1']*1e10)
				partParams['defocus2'] = abs(ctfdata['defocus2']*1e10)
				partParams['angle_astigmatism'] = ctfdata['angle_astigmatism']
				partParams['amplitude_contrast'] = ctfdata['amplitude_contrast']
			else:
				apDisplay.printError("No ctf information for particle %d in image %s"%(count, imagedata['filename']))
			partParamsList.append(partParams)
		
		###now make star file
		
		#first make header
		star = starFile.StarFile(self.params['outstar'])
		labels = ['_rlnImageName', '_rlnMicrographName',
			'_rlnDefocusU', '_rlnDefocusV', '_rlnDefocusAngle', '_rlnVoltage',
			'_rlnSphericalAberration', '_rlnAmplitudeContrast', 
		]

		valueSets = [] #list of strings for star file
		
		###now make particle data
		for partParams in partParamsList:
			relionDataLine = ("%[email protected] mic%d %.6f %.6f %.6f %d %.6f %.6f"
				%( partParams['ptclnum'], partParams['filmNum'],
					partParams['defocus2'], partParams['defocus1'], partParams['angle_astigmatism'], 
					partParams['kv'], partParams['cs'], partParams['amplitude_contrast'],
									
				))
			valueSets.append(relionDataLine)
		star.buildLoopFile( "data_", labels, valueSets )
		star.write()
	def getStackParticleParams(self):
		"""
		for each particle in the stack, get the information that RELION needs
		"""
		stackPartList = apStack.getStackParticlesFromId(self.params['stackid'])
		
		if 'last' not in self.params:
			self.params['last'] = len(stackPartList)

		firstImageId = stackPartList[0]['particle']['image'].dbid
		count = 0
		lastImageId = -1
		lastCtfData = None
		lastKv = -1
		partParamsList = []
		sys.stderr.write("reading stack particle data\n")
		t0 = time.time()
		for stackPart in stackPartList:
			count += 1
			if count % 100 == 0:
				sys.stderr.write(".")
			if count % 10000 == 0:
				sys.stderr.write("\nparticle %d of %d\n"%(count, self.params['last']))
			
			# extra particle number information not read by Relion
			if count != stackPart['particleNumber']:
				apDisplay.printWarning("particle number in database is not in sync")
							
			if count > self.params['last']:
				break
				
			partParams = {}
			partParams['ptclnum'] = count
			partParams['filmNum'] = self.getFilmNumber(stackPart, firstImageId)
			#print partParams['filmNum']
			### get image data
			imagedata = stackPart['particle']['image']
			if self.originalStackData.defocpair is True:
				imagedata = apDefocalPairs.getDefocusPair(imagedata)

			if lastImageId == imagedata.dbid:
				ctfdata = lastCtfData
				partParams['kv'] = lastKv
			else:
				ctfdata = ctfdb.getBestCtfValue(imagedata, msg=False, method=self.params['ctfmethod'])
				partParams['kv'] = imagedata['scope']['high tension']/1000.0
			lastCtfData = ctfdata
			lastImageId = imagedata.dbid
			lastKv = partParams['kv']

			### get CTF data from image			
			if ctfdata is not None:
				# use defocus & astigmatism values
				partParams['defocus1'] = abs(ctfdata['defocus1']*1e10)
				partParams['defocus2'] = abs(ctfdata['defocus2']*1e10)
				partParams['angle_astigmatism'] = ctfdata['angle_astigmatism']
				partParams['amplitude_contrast'] = ctfdata['amplitude_contrast']
			else:
				apDisplay.printWarning("No ctf information for particle %d in image %d"%(count, imagedata.dbid))
				partParams['defocus1'] = 0.1
				partParams['defocus2'] = 0.1
				partParams['angle_astigmatism'] = 0.0
				partParams['amplitude_contrast'] = 0.07

			if self.params['reconiterid'] is not None:
				eulerDict = self.getStackParticleEulersForIteration(stackPart)
				partParams.update(eulerDict)

			partParamsList.append(partParams)
		print "no class %d ; mismatch %d"%(self.noClassification, self.mismatch)
		sys.stderr.write("\ndone in %s\n\n"%(apDisplay.timeString(time.time()-t0)))	
		return partParamsList			
    def checkConflicts(self):

        ### setup correct database after we have read the project id
        if 'projectid' in self.params and self.params['projectid'] is not None:
            apDisplay.printMsg("Using split database")
            # use a project database
            newdbname = apProject.getAppionDBFromProjectId(
                self.params['projectid'])
            sinedon.setConfig('appiondata', db=newdbname)
            apDisplay.printColor("Connected to database: '" + newdbname + "'",
                                 "green")

        if self.params['stackid'] is None:
            apDisplay.printError("stackid was not defined")

        if self.params['expweight'] is False:
            apDisplay.printWarning(
                "Exposure weighting is turned off, make sure this is what you want"
            )
        if self.params['localavg'] is False:
            apDisplay.printWarning(
                "Trajectory local averaging is turned off, make sure this is what you want"
            )

        # DD processes
        self.dd = apDDprocess.DDStackProcessing()
        print self.dd

        # get stack data
        self.stackdata = appiondata.ApStackData.direct_query(
            self.params['stackid'])
        self.stackparts = apStack.getStackParticlesFromId(
            self.params['stackid'], msg=True)
        self.sessiondata = apStack.getSessionDataFromStackId(
            self.params['stackid'])

        # query image
        qimage = self.stackparts[0]['particle']['image']

        # pixel size info
        self.params['apix'] = apStack.getMicrographPixelSizeFromStackId(
            self.params['stackid'])
        self.params['box'] = self.stackdata['boxsize']
        self.params['particleradius'] = self.params[
            'particleradius'] / self.params['apix']
        if self.params['particleradius'] > self.params['box'] / 2.0:
            apDisplay.printWarning(
                "specified particle radius greater than box radius, \
				setting particle radius to 0.8 * boxsize")

        # micrograph & frame info
        frames = qimage['use frames']
        nframes = len(frames)
        if self.params['framelastali'] is None:
            self.params['framelastali'] = frames[-1]
        if self.params['framelastave'] is None:
            self.params['framelastave'] = frames[-1]

        # microscope kV
        self.params['kv'] = qimage['scope']['high tension'] / 1000.0

        # query exposure per frame, if not set here
        if self.params['total_dose'] is not None:
            dose = self.params['total_dose']
        else:
            try:
                dose = apDatabase.getDoseFromImageData(qimage)
            except:
                apDisplay.printError(
                    "dose not specified and not in database, please specify explicitly"
                )
        if self.params['expperframe'] is None and self.params[
                'expweight'] is True:
            if dose is not None:
                self.params['expperframe'] = dose / nframes
            else:
                apDisplay.printError(
                    "exposure per frame needs to be specified, cannot find in database"
                )

        # dimensions
        self.params['framex'] = int(
            apDatabase.getDimensionsFromImageData(qimage)['x'])
        self.params['framey'] = int(
            apDatabase.getDimensionsFromImageData(qimage)['y'])

        # DD info
        self.dd.setImageData(qimage)
        self.dd.setDDStackRun(self.params['ddstackid'])
        self.ddstackpath = self.dd.getDDStackRun()['path']['path']
	def start(self):
		### default parameters
		starfile = self.params['starfile']
		images = self.params['images']
		dbnum = self.params['dbnum']
		ctfrunid = self.params['ctfrunid']
		stackid = self.params['stackid']
		voltage = self.params['voltage']
		cs = self.params['cs']
		wgh = self.params['wgh']
	
		### particles, angles
		appiondata.sinedon.setConfig('appiondata', db="ap%d" % dbnum)
		particledata = apStack.getStackParticlesFromId(stackid)

		### write Relion starfile header
		sf = open(starfile, "w")
		sf.write("data_images\n")
		sf.write("loop_\n")
		sf.write("_rlnImageName\n")
		sf.write("_rlnMicrographName\n")
		sf.write("_rlnDefocusU\n")
		sf.write("_rlnDefocusV\n")
		sf.write("_rlnDefocusAngle\n")
		sf.write("_rlnVoltage\n")
		sf.write("_rlnSphericalAberration\n")
		sf.write("_rlnAmplitudeContrast\n")
	
		### write info to starfile
		olddx = 0
		micn = 0
		oldimgid = None
		for i in range(len(particledata)):
			if i % 1000 == 0:
				print "done with %d particles" % i
	
			### CTF info
			image = particledata[i]['particle']['image']
			imgid = image.dbid
			try:
				ctf = ctfdb.getCtfValueForCtfRunId(image, ctfrunid, msg=False)
				dx = ctf['defocus1'] * 10e9 
				dy = ctf['defocus2'] * 10e9 
				astig = ctf['angle_astigmatism']
			except: 
				ctf = None
#				print "didn't find CTF values for image ", image.dbid
	
			if ctf is None:
				if oldimgid != imgid:
					print "particle %d: " % i, "getting best value for image: %d" % imgid
					ctf = ctfdb.getBestCtfValueForImage(image, msg=False, method='ctffind')
					dx = ctf[0]['defocus1'] * 10e9 
					dy = ctf[0]['defocus2'] * 10e9 
					astig = ctf[0]['angle_astigmatism']		
					oldctf = ctf
					oldimgid = imgid
				else:
					try:
						ctf = oldctf
						dx = oldctf[0]['defocus1'] * 10e9
						dy = oldctf[0]['defocus2'] * 10e9
						astig = oldctf[0]['angle_astigmatism']
					except:
						apDisplay.printError("no CTF information for image")
	
			if dx != olddx:
				micn += 1
				olddx = dx
	
			### write input Relion parameters 
			sf.write("%06d@%s%10d%12.3f%12.3f%12.3f%8.3f%8.3f%8.3f\n" 
				% (i+1, images, micn, dx, dy, astig, voltage, cs, wgh)
			)
		sf.close()
        sys.exit()

    #parse params
    params = {}
    params['stackid'] = int(sys.argv[1])
    params['newstackname'] = sys.argv[2]
    params['bin'] = int(sys.argv[3])
    params['newstackpath'] = os.getcwd()
    params['description'] = "stackid %d was scaled by a factor of %d" % (
        params['stackid'], params['bin'])

    #check for multiple runs in stack
    runs_in_stack = apStack.getRunsInStack(params['stackid'])
    if len(runs_in_stack) > 1:
        print "scalestack.py can't scale this stack because it is a combination of multiple makestack runs."
        print "Instead, use makestack to create a new single scaled stack"
        sys.exit()

    #get stackdata
    stackdata = apStack.getStackParticlesFromId(params['stackid'])

    #do operations on stack
    print "Scaling stack"
    scaleStack(stackdata, params)

    #commit new stack to db
    print "Commiting new stack to db"
    commitScaledStack(stackdata, params)

    print "Done!"
Esempio n. 29
0
def makeStackMeanPlot(stackid, gridpoints=16):
        if gridpoints > 30:
                apDisplay.printError("Too large of a grid")
        apDisplay.printMsg("creating Stack Mean Plot montage for stackid: "+str(stackid))
        t0 = time.time()
        ### big stacks are too slow
        boxsize = apStack.getStackBoxsize(stackid)
        bin = 1
        if boxsize is not None:
                while boxsize/bin > 128:
                        bin+=1
        apDisplay.printMsg("binning stack by "+str(bin))
        stackdata = apStack.getOnlyStackData(stackid, msg=False)
        stackfile = os.path.join(stackdata['path']['path'], stackdata['name'])
        partdatas = apStack.getStackParticlesFromId(stackid, msg=False)
        #check only first 100 particles for now
        #partdatas = partdatas[:500]
        apFile.removeFile("montage"+str(stackid)+".png")

        ### find limits
        limits = {'minmean': 1e12, 'maxmean': -1e12, 'minstdev': 1e12, 'maxstdev': -1e12,}
        for partdata in partdatas:
                if partdata['mean'] is None:
                        continue
                mean = partdata['mean']
                stdev = partdata['stdev']
                if mean < limits['minmean']:
                        limits['minmean'] = mean
                if mean > limits['maxmean']:
                        limits['maxmean'] = mean
                if stdev < limits['minstdev']:
                        limits['minstdev'] = stdev
                if stdev > limits['maxstdev']:
                        limits['maxstdev'] = stdev
        if limits['minmean'] > 1e11:
                apDisplay.printWarning("particles have no mean values in database")
                return
        apDisplay.printMsg(str(limits))

        ### create particle bins
        partlists = {}
        for i in range(gridpoints):
                for j in range(gridpoints):
                        key = ("%02dx%02d"%(i,j))
                        partlists[key] = []

        ### sort paritcles into bins
        for partdata in partdatas:
                key = meanStdevToKey(partdata['mean'], partdata['stdev'], limits, gridpoints)
                partnum = int(partdata['particleNumber'])
                partlists[key].append(partnum)

        printPlot(partlists, gridpoints)

        ### createStackAverages
        keys = partlists.keys()
        keys.sort()
        count = 0
        backs = "\b\b\b\b\b\b\b\b\b\b\b"
        montagestack = "montage"+str(stackid)+".hed"
        apFile.removeStack(montagestack)
        mystack = []
        for key in keys:
                count += 1
                sys.stderr.write(backs+backs+backs+backs)
                sys.stderr.write("% 3d of % 3d, %s: % 6d"%(count, len(keys), key, len(partlists[key])))
                avgimg = averageSubStack(partlists[key], stackfile, bin)
                if avgimg is not False:
                        mystack.append(avgimg)
        apImagicFile.writeImagic(mystack, montagestack)
        sys.stderr.write("\n")
        assemblePngs(keys, str(stackid), montagestack)
        apDisplay.printMsg("/bin/mv -v montage"+str(stackid)+".??? "+stackdata['path']['path'])
        apDisplay.printMsg("finished in "+apDisplay.timeString(time.time()-t0))
	def start(self):
		self.params['output_fileformat'] = 'mrc'
		newstackname='framealigned.hed'
		stackdata=apStack.getStackParticlesFromId(self.params['stackid'])
		stackrundata=apStack.getOnlyStackData(self.params['stackid'])
		apix=stackrundata['pixelsize']*1e10
		kev=stackdata[0]['particle']['image']['scope']['high tension']/1000
		origstackpath=os.path.join(stackrundata['path']['path'],stackrundata['name'])
		boxsize=stackdata[0]['stackRun']['stackParams']['boxSize']
		binning=stackdata[0]['stackRun']['stackParams']['bin']	
		
		#determine camera type
		cameratype=stackdata[0]['particle']['image']['camera']['ccdcamera']['name']
		if self.params['override_camera'] is not None:
			cameratype=self.params['override_camera']
		
		#create sorted boxfiles
		imagedict={}
		masterlist=[]
		for particle in stackdata:
			parentimage=particle['particle']['image']['filename']
			if parentimage in imagedict.keys():
				imagedict[parentimage].append(particle['particle'])
			else:
				imagedict[parentimage]=[]
				imagedict[parentimage].append(particle['particle'])
			index=len(imagedict[parentimage])-1
			masterlist.append({'particle':particle,'key':parentimage,'index':index})
		#print masterlist
		
		for key in imagedict:
			particlelst=imagedict[key]
			parentimage=key
			framespath=particlelst[0]['image']['session']['frame path']
			
			print cameratype
			if 'Gatan' in cameratype:
				#prepare frames
				print framespath
				
				#prepare frame directory
				framespathname=os.path.join(self.params['rundir'],parentimage+'.frames')
				if os.path.exists(framespathname):
					pass
				else:
					os.mkdir(framespathname)
				print framespathname
				
				mrcframestackname=parentimage+'.frames.mrc'
				
				print mrcframestackname
				
				nframes=particlelst[0]['image']['camera']['nframes']
				
				print "Extracting frames for", mrcframestackname
				for n in range(nframes):
					a=mrc.read(os.path.join(framespath,mrcframestackname),n)
					numpil.write(a,imfile=os.path.join(framespathname,'RawImage_%d.tif' % (n)), format='tiff')
				
			elif 'DE' in cameratype:
				framespathname=os.path.join(framespath,parentimage+'.frames')
			
			print os.getcwd()
			print framespathname
			#generate DE script call
			if os.path.exists(framespathname):
				print "found frames for", parentimage

				nframes=particlelst[0]['image']['camera']['nframes']
				boxname=parentimage + '.box'
				boxpath=os.path.join(framespathname,boxname)
				shiftdata={'scale':1,'shiftx':0,'shifty':0}

				#flatfield references
				brightrefpath=particlelst[0]['image']['bright']['session']['image path']
				brightrefname=particlelst[0]['image']['bright']['filename']
				brightnframes=particlelst[0]['image']['bright']['camera']['nframes']
				darkrefpath=particlelst[0]['image']['dark']['session']['image path']
				darkrefname=particlelst[0]['image']['dark']['filename']
				darknframes=particlelst[0]['image']['dark']['camera']['nframes']
				brightref=os.path.join(brightrefpath,brightrefname+'.mrc')
				darkref=os.path.join(darkrefpath,darkrefname+'.mrc')
				print brightref
				print darkref			
				apBoxer.processParticleData(particle['particle']['image'],boxsize,particlelst,shiftdata,boxpath)
				print framespathname			

				#set appion specific options
				self.params['gainreference_filename']=brightref
				self.params['gainreference_framecount']=brightnframes
				self.params['darkreference_filename']=darkref
				self.params['darkreference_framecount']=darknframes
				self.params['input_framecount']=nframes
				self.params['boxes_fromfiles']=1
				#self.params['run_verbosity']=3
				self.params['output_invert']=0
				#self.params['radiationdamage_apix=']=apix
				self.params['radiationdamage_voltage']=kev
				#self.params['boxes_boxsize']=boxsize

				outpath=os.path.join(self.params['rundir'],key)
				if os.path.exists(outpath):
					shutil.rmtree(outpath)
				os.mkdir(outpath)
				
				command=['deProcessFrames.py']
				keys=self.params.keys()
				keys.sort()
				for key in keys:
					param=self.params[key]
					#print key, param, type(param)
					if param == None or param=='':
						pass
					else:
						option='--%s=%s' % (key,param)
						command.append(option)
				command.append(outpath)
				command.append(framespathname)
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
					
		
		#recreate particle stack
		for n,particledict in enumerate(masterlist):
			parentimage=particledict['key']
			correctedpath=os.path.join(self.params['rundir'],parentimage)
			print correctedpath
			if os.path.exists(correctedpath):
			
				correctedparticle=glob.glob(os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index']))))
				print os.path.join(correctedpath,('%s.*.region_%03d.*' % (parentimage,particledict['index'])))
				print correctedparticle
				#sys.exit()
				command=['proc2d',correctedparticle[0], newstackname]
				if self.params['output_rotation'] !=0:
					command.append('rot=%d' % self.params['output_rotation'])
				
				if self.params['show_DE_command'] is True:
					print command
				subprocess.call(command)
			else:
				print "did not find frames for ", parentimage
				command=['proc2d', origstackpath, newstackname,('first=%d' % n), ('last=%d' % n)]
				print command
				if self.params['dryrun'] is False:
					subprocess.call(command)
				
		#upload stack
		
		#make keep file
		self.params['keepfile']='keepfile.txt'
		f=open(self.params['keepfile'],'w')
		for n in range(len(masterlist)):
			f.write('%d\n' % (n))
		f.close()
		
		apStack.commitSubStack(self.params, newname=newstackname)
		apStack.averageStack(stack=newstackname)
		
		print "Done!!!!"
    def generateParticleParams(self):
        paramfile = os.path.join(self.params['rundir'], 'params.iter000.par')
        apDisplay.printMsg("Creating parameter file: " + paramfile)

        numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
        if os.path.isfile(paramfile):
            f = open(paramfile, 'r')
            numparam = len(f.readlines())
            if numparam > self.params['last']:
                apDisplay.printMsg("Param file exists")
                return paramfile
            else:
                apDisplay.printWarning(
                    "Param file exists with too few particles: %d vs %d" %
                    (numparam, numpart))

        stackdata = apStack.getStackParticlesFromId(self.params['stackid'])
        particleparams = {}

        f = open(paramfile, 'w')
        f.write(
            "C           PSI   THETA     PHI     SHX     SHY    MAG   FILM      DF1      DF2  ANGAST  PRESA\n"
        )
        apDisplay.printMsg("Writing out particle parameters")
        count = 0
        t0 = time.time()
        self.noeulers = 0
        for particle in stackdata:
            count += 1
            if (count % 200 == 0):
                estime = (time.time() - t0) * (numpart - count) / float(count)
                apDisplay.printMsg("particle %d -- %s remain" %
                                   (count, apDisplay.timeString(estime)))
            if count > self.params['last']:
                break
            # defaults
            ## Niko says that if the defocus is negative it will not perform CTF correction
            ## But it will also not correct the amplitudes
            particleparams = {
                'ptclnum': particle['particleNumber'],
                'psi': 0.0,
                'theta': 0.0,
                'phi': 0.0,
                'df1': -1.0,  # workaround if no ctf correction
                'df2': -1.0,  # set defocus to -1.0 Angstroms
                'angast': 0.0,
                'mag': 10000,  # workaround to get around dstep
                'shx': 0.0,
                'shy': 0.0,
                'film': 1,
                'presa': 0.0,
                'dpres': 0.0,
            }
            imagedata = particle['particle']['image']
            if self.params['noctf'] is False:
                ctfdata, confidence = ctfdb.getBestCtfValueForImage(
                    imagedata, msg=False, method=self.params['ctfmethod'])
                if ctfdata is not None:
                    ### use defocus and astigmatism values
                    particleparams['df1'] = abs(ctfdata['defocus1'] * 1e10)
                    particleparams['df2'] = abs(ctfdata['defocus2'] * 1e10)
                    particleparams['angast'] = ctfdata['angle_astigmatism']
            # if using parameters from previous reconstruction
            if self.params['reconiterid'] is not None:
                emaneuler = self.getStackParticleEulersForIteration(
                    particle['particleNumber'])
                frealigneulers = apFrealign.convertEmanEulersToFrealign(
                    emaneuler, sym=self.params['sym'])
                particleparams['psi'] = frealigneulers['psi']
                particleparams['theta'] = frealigneulers['theta']
                particleparams['phi'] = frealigneulers['phi']
                particleparams['shx'] = emaneuler['shiftx']
                particleparams['shy'] = emaneuler['shifty']
                if emaneuler['mirror'] is True:
                    particleparams['shx'] *= -1
            self.writeParticleParamLine(particleparams, f)
        f.close()
        return paramfile
    def generateParticleParams(self):
        paramfile = os.path.join(self.params["rundir"], "params.iter000.par")
        apDisplay.printMsg("Creating parameter file: " + paramfile)

        numpart = apStack.getNumberStackParticlesFromId(self.params["stackid"])
        if os.path.isfile(paramfile):
            f = open(paramfile, "r")
            numparam = len(f.readlines())
            if numparam > self.params["last"]:
                apDisplay.printMsg("Param file exists")
                return paramfile
            else:
                apDisplay.printWarning("Param file exists with too few particles: %d vs %d" % (numparam, numpart))

        stackdata = apStack.getStackParticlesFromId(self.params["stackid"])
        particleparams = {}

        f = open(paramfile, "w")
        f.write("C           PSI   THETA     PHI     SHX     SHY    MAG   FILM      DF1      DF2  ANGAST  PRESA\n")
        apDisplay.printMsg("Writing out particle parameters")
        count = 0
        t0 = time.time()
        self.noeulers = 0
        for particle in stackdata:
            count += 1
            if count % 200 == 0:
                estime = (time.time() - t0) * (numpart - count) / float(count)
                apDisplay.printMsg("particle %d -- %s remain" % (count, apDisplay.timeString(estime)))
            if count > self.params["last"]:
                break
            # defaults
            ## Niko says that if the defocus is negative it will not perform CTF correction
            ## But it will also not correct the amplitudes
            particleparams = {
                "ptclnum": particle["particleNumber"],
                "psi": 0.0,
                "theta": 0.0,
                "phi": 0.0,
                "df1": -1.0,  # workaround if no ctf correction
                "df2": -1.0,  # set defocus to -1.0 Angstroms
                "angast": 0.0,
                "mag": 10000,  # workaround to get around dstep
                "shx": 0.0,
                "shy": 0.0,
                "film": 1,
                "presa": 0.0,
                "dpres": 0.0,
            }
            imagedata = particle["particle"]["image"]
            if self.params["noctf"] is False:
                ctfdata, confidence = ctfdb.getBestCtfValueForImage(
                    imagedata, msg=False, method=self.params["ctfmethod"]
                )
                if ctfdata is not None:
                    ### use defocus and astigmatism values
                    particleparams["df1"] = abs(ctfdata["defocus1"] * 1e10)
                    particleparams["df2"] = abs(ctfdata["defocus2"] * 1e10)
                    particleparams["angast"] = ctfdata["angle_astigmatism"]
            # if using parameters from previous reconstruction
            if self.params["reconiterid"] is not None:
                emaneuler = self.getStackParticleEulersForIteration(particle["particleNumber"])
                frealigneulers = apFrealign.convertEmanEulersToFrealign(emaneuler, sym=self.params["sym"])
                particleparams["psi"] = frealigneulers["psi"]
                particleparams["theta"] = frealigneulers["theta"]
                particleparams["phi"] = frealigneulers["phi"]
                particleparams["shx"] = emaneuler["shiftx"]
                particleparams["shy"] = emaneuler["shifty"]
                if emaneuler["mirror"] is True:
                    particleparams["shx"] *= -1
            self.writeParticleParamLine(particleparams, f)
        f.close()
        return paramfile
Esempio n. 33
0
    def getStackParticleParams(self):
        """
		for each particle in the stack, get the information that RELION needs
		"""
        stackPartList = apStack.getStackParticlesFromId(self.params['stackid'])

        if 'last' not in self.params:
            self.params['last'] = len(stackPartList)

        firstImageId = stackPartList[0]['particle']['image'].dbid
        count = 0
        lastImageId = -1
        lastCtfData = None
        lastKv = -1
        partParamsList = []
        sys.stderr.write("reading stack particle data\n")
        t0 = time.time()
        for stackPart in stackPartList:
            count += 1
            if count % 100 == 0:
                sys.stderr.write(".")
            if count % 10000 == 0:
                sys.stderr.write("\nparticle %d of %d\n" %
                                 (count, self.params['last']))

            # extra particle number information not read by Relion
            if count != stackPart['particleNumber']:
                apDisplay.printWarning(
                    "particle number in database is not in sync")

            if count > self.params['last']:
                break

            partParams = {}
            partParams['ptclnum'] = count
            partParams['filmNum'] = self.getFilmNumber(stackPart, firstImageId)
            #print partParams['filmNum']
            ### get image data
            imagedata = stackPart['particle']['image']
            if self.originalStackData.defocpair is True:
                imagedata = apDefocalPairs.getDefocusPair(imagedata)

            if lastImageId == imagedata.dbid:
                ctfdata = lastCtfData
                partParams['kv'] = lastKv
            else:
                ctfdata = ctfdb.getBestCtfValue(
                    imagedata, msg=False, method=self.params['ctfmethod'])
                partParams['kv'] = imagedata['scope']['high tension'] / 1000.0
            lastCtfData = ctfdata
            lastImageId = imagedata.dbid
            lastKv = partParams['kv']

            ### get CTF data from image
            if ctfdata is not None:
                # use defocus & astigmatism values
                partParams['defocus1'] = abs(ctfdata['defocus1'] * 1e10)
                partParams['defocus2'] = abs(ctfdata['defocus2'] * 1e10)
                partParams['angle_astigmatism'] = ctfdata['angle_astigmatism']
                partParams['amplitude_contrast'] = ctfdata[
                    'amplitude_contrast']
            else:
                apDisplay.printWarning(
                    "No ctf information for particle %d in image %d" %
                    (count, imagedata.dbid))
                partParams['defocus1'] = 0.1
                partParams['defocus2'] = 0.1
                partParams['angle_astigmatism'] = 0.0
                partParams['amplitude_contrast'] = 0.07

            if self.params['reconiterid'] is not None:
                eulerDict = self.getStackParticleEulersForIteration(stackPart)
                partParams.update(eulerDict)

            partParamsList.append(partParams)
        print "no class %d ; mismatch %d" % (self.noClassification,
                                             self.mismatch)
        sys.stderr.write("\ndone in %s\n\n" %
                         (apDisplay.timeString(time.time() - t0)))
        return partParamsList
		sys.exit()

	#parse params
	params={}
	params['stackid']=int(sys.argv[1])
	params['newstackname']=sys.argv[2]
	params['bin']=int(sys.argv[3])
	params['newstackpath']=os.getcwd()
	params['description']="stackid %d was scaled by a factor of %d" % (params['stackid'],params['bin'])

	#check for multiple runs in stack
	runs_in_stack=apStack.getRunsInStack(params['stackid'])
	if len(runs_in_stack) > 1:
		print "scalestack.py can't scale this stack because it is a combination of multiple makestack runs."
		print "Instead, use makestack to create a new single scaled stack"
		sys.exit()

	#get stackdata
	stackdata=apStack.getStackParticlesFromId(params['stackid'])

	#do operations on stack
	print "Scaling stack"
	scaleStack(stackdata,params)

	#commit new stack to db
	print "Commiting new stack to db"
	commitScaledStack(stackdata,params)

	print "Done!"

Esempio n. 35
0
    def start(self):
        subtomorunq = appiondata.ApSubTomogramRunData()
        subtomorundata = subtomorunq.direct_query(self.params['subtomoId'])
        volshape, totalbin, pixelsize = apTomo.getSubvolumeInfo(subtomorundata)
        if volshape is None:
            apDisplay.printError('No subvolume exists for the subtomoId')
        sessionname = subtomorundata['session']['name']
        stackq = appiondata.ApStackData()
        stackdata = stackq.direct_query(self.params['stackId'])
        diameter = apStack.getStackParticleDiameter(stackdata)
        diameterpixel = diameter * 1e-10 / pixelsize
        halfwidth = diameterpixel / 4
        ztolerance = halfwidth
        zbackgroundrange = max(((volshape[0] - diameterpixel * 3) / 2, 10))
        if self.params['commit']:
            avgrundata = apTomo.insertTomoAverageRun(
                self.params['runname'],
                self.params['rundir'],
                subtomorundata,
                stackdata,
                halfwidth,
                self.params['description'],
            )
        profiles = {}
        sumvol = numpy.zeros(volshape)
        substacktype, conditionstackdata = apStack.findSubStackConditionData(
            stackdata)
        if substacktype in ['clustersub', 'alignsub']:
            alignstack = apStack.getAlignStack(substacktype,
                                               conditionstackdata)
            alignpackage = apAlignment.getAlignPackage(alignstack['alignrun'])
            stackprtls = apStack.getStackParticlesFromId(stackdata.dbid)
            i = 0
            for stackp in stackprtls:
                alignp = apAlignment.getAlignParticle(stackp, alignstack)
                shift = apAlignment.getAlignShift(alignp, alignpackage)
                subtomodata = apTomo.getSubTomogramData(subtomorundata, stackp)
                subtomofile = os.path.join(subtomodata['path']['path'],
                                           subtomodata['name'] + '.rec')
                subvolume = apTomo.getTomoVolume(subtomodata)
                if subvolume is not None:
                    zcenter = volshape[0] / 2
                    profile = apTomo.getParticleCenterZProfile(
                        subvolume, shift, halfwidth, zbackgroundrange)
                    subtomoid = subtomodata.dbid
                    profiles[subtomoid] = profile
                    center = apTomo.gaussianCenter(profile)
                    if center > zcenter - ztolerance and center < zcenter + ztolerance:
                        i += 1
                        shiftz = zcenter - center
                        transformedvolume = apTomo.transformTomo(
                            subvolume, subtomofile, alignpackage, alignp,
                            shiftz, totalbin)
                        ## write transformed mrc file to check the result
                        filename = os.path.join(
                            self.params['rundir'],
                            './transformed%05d.mrc' % subtomoid)
                        mrc.write(transformedvolume, filename)
                        sumvol += transformedvolume
                        t = numpy.sum(transformedvolume, axis=0)
                        filename = os.path.join(self.params['rundir'],
                                                './p%05d.mrc' % subtomoid)
                        mrc.write(transformedvolume, filename)
                        if self.params['commit']:
                            apTomo.insertTomoAvgParticle(
                                avgrundata, subtomodata, alignp, shiftz)
            if i < 1:
                apDisplay.printError('no subtomogram qualifies for averaging')
            else:
                avgvol = sumvol / i
            avgvolfilename = sessionname + "_" + self.params['runname'] + ".mrc"
            avgvolpath = os.path.join(self.params['rundir'], avgvolfilename)
            mrc.write(avgvol, avgvolpath)
            if not os.path.isfile(avgvolpath):
                apDisplay.printError("tomogram not exist")
            apTomo.makeMovie(avgvolpath, self.params['maxsize'])
            apTomo.makeProjection(avgvolpath, self.params['maxsize'])

        proshape = profile.shape
        for id in profiles.keys():
            out = open('profile_%05d.txt' % id, 'w')
            for z in range(0, proshape[0]):
                str = "%5d\t" % z
                str += "%6.3f\t" % profiles[id][z]
                str += "\n"
                out.write(str)
            out.close()
	def start(self):
		subtomorunq = appiondata.ApSubTomogramRunData()
		subtomorundata = subtomorunq.direct_query(self.params['subtomoId'])
		volshape,totalbin,pixelsize = apTomo.getSubvolumeInfo(subtomorundata)
		if volshape is None:
			apDisplay.printError('No subvolume exists for the subtomoId')
		sessionname = subtomorundata['session']['name']
		stackq = appiondata.ApStackData()
		stackdata = stackq.direct_query(self.params['stackId'])
		diameter = apStack.getStackParticleDiameter(stackdata)
		diameterpixel = diameter * 1e-10 / pixelsize
		halfwidth = diameterpixel / 4
		ztolerance = halfwidth
		zbackgroundrange = max(((volshape[0] - diameterpixel*3)/2,10))
		if self.params['commit']:
			avgrundata = apTomo.insertTomoAverageRun(self.params['runname'],
					self.params['rundir'],
					subtomorundata,
					stackdata,
					halfwidth,
					self.params['description'],
			)
		profiles = {}
		sumvol = numpy.zeros(volshape)
		substacktype,conditionstackdata = apStack.findSubStackConditionData(stackdata)
		if substacktype in ['clustersub','alignsub']:
			alignstack = apStack.getAlignStack(substacktype,conditionstackdata)
			alignpackage = apAlignment.getAlignPackage(alignstack['alignrun'])
			stackprtls = apStack.getStackParticlesFromId(stackdata.dbid)
			i = 0
			for stackp in stackprtls:
				alignp = apAlignment.getAlignParticle(stackp,alignstack)
				shift = apAlignment.getAlignShift(alignp,alignpackage)
				subtomodata = apTomo.getSubTomogramData(subtomorundata,stackp)
				subtomofile = os.path.join(subtomodata['path']['path'],subtomodata['name']+'.rec')
				subvolume = apTomo.getTomoVolume(subtomodata)
				if subvolume is not None:
					zcenter = volshape[0] / 2
					profile = apTomo.getParticleCenterZProfile(subvolume,shift,halfwidth,zbackgroundrange)
					subtomoid = subtomodata.dbid
					profiles[subtomoid] = profile
					center = apTomo.gaussianCenter(profile)
					if center > zcenter - ztolerance and center < zcenter + ztolerance:
						i += 1
						shiftz = zcenter - center
						transformedvolume = apTomo.transformTomo(subvolume,subtomofile,alignpackage,alignp,shiftz,totalbin)
						## write transformed mrc file to check the result
						filename = os.path.join(self.params['rundir'],'./transformed%05d.mrc' %subtomoid)
						mrc.write(transformedvolume,filename)
						sumvol += transformedvolume
						t = numpy.sum(transformedvolume,axis=0)
						filename = os.path.join(self.params['rundir'],'./p%05d.mrc' %subtomoid)
						mrc.write(transformedvolume,filename)
						if self.params['commit']:
							apTomo.insertTomoAvgParticle(avgrundata,subtomodata,alignp,shiftz)
			if i < 1:
				apDisplay.printError('no subtomogram qualifies for averaging')
			else:
				avgvol = sumvol / i
			avgvolfilename = sessionname+"_"+self.params['runname']+".mrc"
			avgvolpath = os.path.join(self.params['rundir'],avgvolfilename)
			mrc.write(avgvol,avgvolpath)
			if not os.path.isfile(avgvolpath):
					apDisplay.printError("tomogram not exist")
			apTomo.makeMovie(avgvolpath,self.params['maxsize'])
			apTomo.makeProjection(avgvolpath,self.params['maxsize'])

		proshape = profile.shape
		for id in profiles.keys():
			out = open('profile_%05d.txt'%id,'w')
			for z in range(0,proshape[0]):
				str = "%5d\t" % z
				str += "%6.3f\t" % profiles[id][z]
				str += "\n"
				out.write(str)
			out.close()
	def checkConflicts(self):
		
		### setup correct database after we have read the project id
		if 'projectid' in self.params and self.params['projectid'] is not None:
			apDisplay.printMsg("Using split database")
			# use a project database
			newdbname = apProject.getAppionDBFromProjectId(self.params['projectid'])
			sinedon.setConfig('appiondata', db=newdbname)
			apDisplay.printColor("Connected to database: '"+newdbname+"'", "green")
		
		if self.params['stackid'] is None:
			apDisplay.printError("stackid was not defined")

		if self.params['expweight'] is False:
			apDisplay.printWarning("Exposure weighting is turned off, make sure this is what you want")
		if self.params['localavg'] is False:
			apDisplay.printWarning("Trajectory local averaging is turned off, make sure this is what you want")

		# DD processes
		self.dd = apDDprocess.DDStackProcessing()
		print self.dd
	
		# get stack data
		self.stackdata = appiondata.ApStackData.direct_query(self.params['stackid'])
		self.stackparts = apStack.getStackParticlesFromId(self.params['stackid'], msg=True)
		self.sessiondata = apStack.getSessionDataFromStackId(self.params['stackid'])
		
		# query image
		qimage = self.stackparts[0]['particle']['image']

		# pixel size info
		self.params['apix'] = apStack.getMicrographPixelSizeFromStackId(self.params['stackid'])
		self.params['box'] = self.stackdata['boxsize']
		self.params['particleradius'] = self.params['particleradius'] / self.params['apix']
		if self.params['particleradius'] > self.params['box'] / 2.0:
			apDisplay.printWarning("specified particle radius greater than box radius, \
				setting particle radius to 0.8 * boxsize")

		# micrograph & frame info
		frames = qimage['use frames']
		nframes = len(frames)
		if self.params['framelastali'] is None:
			self.params['framelastali'] = frames[-1]
		if self.params['framelastave'] is None:
			self.params['framelastave'] = frames[-1]

		# microscope kV
		self.params['kv'] = qimage['scope']['high tension']/1000.0

		# query exposure per frame, if not set here
		if self.params['total_dose'] is not None:
			dose = self.params['total_dose']
		else:
			try:
				dose = apDatabase.getDoseFromImageData(qimage)
			except:
				apDisplay.printError("dose not specified and not in database, please specify explicitly")
		if self.params['expperframe'] is None and self.params['expweight'] is True:
			if dose is not None:
				self.params['expperframe'] = dose / nframes
			else:
				apDisplay.printError("exposure per frame needs to be specified, cannot find in database")
	
		# dimensions
		self.params['framex'] = int(apDatabase.getDimensionsFromImageData(qimage)['x'])
		self.params['framey'] = int(apDatabase.getDimensionsFromImageData(qimage)['y'])

		# DD info
		self.dd.setImageData(qimage)
		self.dd.setDDStackRun(self.params['ddstackid'])
		self.ddstackpath = self.dd.getDDStackRun()['path']['path']
    def start(self):
        ### check for existing run
        selectrunq = appiondata.ApSelectionRunData()
        selectrunq["name"] = self.params["runname"]
        selectrunq["path"] = appiondata.ApPathData(path=os.path.abspath(self.params["rundir"]))
        selectrundata = selectrunq.query(readimages=False)
        if selectrundata:
            apDisplay.printError("Runname already exists")

        if self.params["ddstack"]:
            self.other_ddstack_used = []
            self.dd = apDDprocess.DDStackProcessing()
            self.dd.setDDStackRun(self.params["ddstack"])
            self.newddstackrun = self.dd.getDDStackRun(show_msg=True)
            ### stack data
        stackdata = apStack.getOnlyStackData(self.params["stackid"])

        ### stack particles
        stackparts = apStack.getStackParticlesFromId(self.params["stackid"], msg=True)
        stackparts.reverse()

        ### selection run for first particle
        oldselectrun = stackparts[0]["particle"]["selectionrun"]

        ### set selection run
        manualparamsq = appiondata.ApManualParamsData()
        manualparamsq["diam"] = self.getDiamFromSelectionRun(oldselectrun)
        manualparamsq["oldselectionrun"] = oldselectrun
        manualparamsq["trace"] = False
        selectrunq = appiondata.ApSelectionRunData()
        selectrunq["name"] = self.params["runname"]
        selectrunq["hidden"] = False
        selectrunq["path"] = appiondata.ApPathData(path=os.path.abspath(self.params["rundir"]))
        selectrunq["session"] = apStack.getSessionDataFromStackId(self.params["stackid"])
        selectrunq["manparams"] = manualparamsq

        ### insert particles
        apDisplay.printMsg("Inserting particles into database")
        count = 0
        t0 = time.time()
        startmem = mem.active()
        numpart = len(stackparts)
        for stackpart in stackparts:
            count += 1
            if count > 10 and count % 100 == 0:
                perpart = (time.time() - t0) / float(count + 1)
                apDisplay.printColor(
                    "part %d of %d :: %.1fM mem :: %s/part :: %s remain"
                    % (
                        count,
                        numpart,
                        (mem.active() - startmem) / 1024.0,
                        apDisplay.timeString(perpart),
                        apDisplay.timeString(perpart * (numpart - count)),
                    ),
                    "blue",
                )
            oldpartdata = stackpart["particle"]
            newpartq = appiondata.ApParticleData(initializer=oldpartdata)
            newpartq["selectionrun"] = selectrunq
            if self.params["ddstack"]:
                newimagedata = self.getNewImageFromDDStack(oldpartdata["image"])
                if newimagedata is False:
                    # no pick transferred
                    continue
                newpartq["image"] = newimagedata
            if self.params["commit"] is True:
                newpartq.insert()
        apDisplay.printMsg("Completed in %s" % (apDisplay.timeString(time.time() - t0)))
 def calculateEulerJumpsForEntireRecon(self,
                                       reconrunid,
                                       stackid=None,
                                       sym=None,
                                       multimodelrunid=None):
     if sym is None:
         sym = self.getSymmetry(reconrunid)
     if re.match("^icos", sym.lower()):
         apDisplay.printWarning("Processing Icos symmetry " + sym)
     if not re.match("^[cd][0-9]+$", sym.lower()) and not re.match(
             "^icos", sym.lower()):
         apDisplay.printError(
             "Cannot calculate euler jumps for symmetry: " + sym)
         return
     ### get stack particles
     if stackid is None:
         stackid = apStack.getStackIdFromRecon(reconrunid, msg=False)
     stackparts = apStack.getStackParticlesFromId(stackid)
     stackparts.sort(self.sortStackParts)
     numparts = len(stackparts)
     ### start loop
     t0 = time.time()
     medians = []
     count = 0
     miscount = 0
     for stackpart in stackparts:
         count += 1
         if multimodelrunid is None:
             jumpdata = self.getEulerJumpData(reconrunid,
                                              stackpartid=stackpart.dbid,
                                              stackid=stackid,
                                              sym=sym)
         else:
             jumpdata = self.getEulerJumpData(
                 reconrunid,
                 stackpartid=stackpart.dbid,
                 stackid=stackid,
                 sym=sym,
                 multimodelrunid=multimodelrunid)
         if jumpdata is None:
             if miscount < 25:
                 continue
             else:
                 break
         medians.append(jumpdata['median'])
         if count % 500 == 0:
             timeremain = (time.time() - t0) / (count + 1) * (numparts -
                                                              count)
             print("particle=% 5d; median jump=% 3.2f, remain time= %s" %
                   (stackpart['particleNumber'], jumpdata['median'],
                    apDisplay.timeString(timeremain)))
     if len(medians) > 0:
         ### print stats
         apDisplay.printMsg("complete " + str(len(stackparts)) +
                            " particles in " +
                            apDisplay.timeString(time.time() - t0))
         print "-- median euler jumper stats --"
         medians = numpy.asarray(medians, dtype=numpy.float32)
         print("mean/std :: " + str(round(medians.mean(), 2)) + " +/- " +
               str(round(medians.std(), 2)))
         print("min/max  :: " + str(round(medians.min(), 2)) + " <> " +
               str(round(medians.max(), 2)))
     else:
         apDisplay.printWarning(
             "no Euler jumpers inserted into the database, make sure that the angles are read by the recon uploader"
         )
     return
Esempio n. 40
0
    def start(self):
        """
		for each particle in the stack, get the information that RELION needs
		"""
        stackPartList = apStack.getStackParticlesFromId(self.params['stackid'])
        nptcls = len(stackPartList)
        currentImageId = stackPartList[0]['particle']['image'].dbid
        count = 0
        imagenumber = 1
        partParamsList = []
        sys.stderr.write("reading stack particle data\n")

        #create list of dictionaries that will be passed to starFile maker later
        for stackPart in stackPartList:
            count += 1
            if count % 100 == 0:
                sys.stderr.write(".")
            if count % 10000 == 0:
                sys.stderr.write("\nparticle %d of %d\n" % (count, nptcls))

            # extra particle number information not read by Relion
            if count != stackPart['particleNumber']:
                apDisplay.printWarning(
                    "particle number in database is not in sync")

            partParams = {}
            partParams['ptclnum'] = count

            ### get image data
            imagedata = stackPart['particle']['image']
            if imagedata.dbid != currentImageId:
                imagenumber += 1
                currentImageId = imagedata.dbid
            partParams['filmNum'] = imagenumber
            #print partParams['filmNum']
            partParams['kv'] = imagedata['scope']['high tension'] / 1000.0
            partParams['cs'] = imagedata['scope']['tem']['cs'] * 1000
            ### get CTF data from image
            ctfdata = ctfdb.getBestCtfValue(imagedata,
                                            msg=False,
                                            sortType='maxconf')
            if ctfdata is not None:
                # use defocus & astigmatism values
                partParams['defocus1'] = abs(ctfdata['defocus1'] * 1e10)
                partParams['defocus2'] = abs(ctfdata['defocus2'] * 1e10)
                partParams['angle_astigmatism'] = ctfdata['angle_astigmatism']
                partParams['amplitude_contrast'] = ctfdata[
                    'amplitude_contrast']
            else:
                apDisplay.printError(
                    "No ctf information for particle %d in image %s" %
                    (count, imagedata['filename']))
            partParamsList.append(partParams)

        ###now make star file

        #first make header
        star = starFile.StarFile(self.params['outstar'])
        labels = [
            '_rlnImageName',
            '_rlnMicrographName',
            '_rlnDefocusU',
            '_rlnDefocusV',
            '_rlnDefocusAngle',
            '_rlnVoltage',
            '_rlnSphericalAberration',
            '_rlnAmplitudeContrast',
        ]

        valueSets = []  #list of strings for star file

        ###now make particle data
        for partParams in partParamsList:
            relionDataLine = (
                "%[email protected] mic%d %.6f %.6f %.6f %d %.6f %.6f" % (
                    partParams['ptclnum'],
                    partParams['filmNum'],
                    partParams['defocus2'],
                    partParams['defocus1'],
                    partParams['angle_astigmatism'],
                    partParams['kv'],
                    partParams['cs'],
                    partParams['amplitude_contrast'],
                ))
            valueSets.append(relionDataLine)
        star.buildLoopFile("data_", labels, valueSets)
        star.write()
    def removePtclsByJumps(self, particles, rejectlst):
        eulerjump = apEulerJump.ApEulerJump()
        numparts = len(particles)
        apDisplay.printMsg("finding euler jumps for " + str(numparts) +
                           " particles")

        ### check symmetry
        symmetry = eulerjump.getSymmetry(self.params['reconid'], msg=True)
        if not re.match("^[cd][0-9]+$", symmetry.lower()) and not re.match(
                "^icos", symmetry.lower()):
            apDisplay.printError(
                "Cannot calculate euler jumps for symmetry: " + symmetry)
            return
        self.params['sym'] = symmetry.lower()

        ### prepare file
        f = open('jumps.txt', 'w', 0666)
        f.write("#pnum\t")
        headerlist = ('mean', 'median', 'stdev', 'min', 'max')
        for key in headerlist:
            f.write(key + "\t")
        f.write("\n")

        ### get stack particles
        stackparts = apStack.getStackParticlesFromId(self.params['stackid'])

        ### start loop
        t0 = time.time()
        medians = []
        count = 0
        apDisplay.printMsg("processing euler jumps for recon run=" +
                           str(self.params['reconid']))
        for stackpart in stackparts:
            count += 1
            partnum = stackpart['particleNumber']
            f.write('%d\t' % partnum)
            jumpdata = eulerjump.getEulerJumpData(
                self.params['reconid'],
                stackpartid=stackpart.dbid,
                stackid=self.params['stackid'],
                sym=symmetry)
            medians.append(jumpdata['median'])
            if (jumpdata['median'] >
                    self.params['avgjump']) and partnum not in rejectlst:
                rejectlst.append(partnum)
            for key in headerlist:
                f.write("%.3f\t" % (jumpdata[key]))
            f.write("\n")
            if count % 1000 == 0:
                timeremain = (time.time() - t0) / (count + 1) * (numparts -
                                                                 count)
                apDisplay.printMsg(
                    "particle=% 5d; median jump=% 3.2f, remain time= %s" %
                    (partnum, jumpdata['median'],
                     apDisplay.timeString(timeremain)))
                #f.flush()
        ### print stats
        apDisplay.printMsg("-- median euler jumper stats --")
        medians = numpy.asarray(medians, dtype=numpy.float32)
        apDisplay.printMsg("mean/std :: " + str(round(medians.mean(), 2)) +
                           " +/- " + str(round(medians.std(), 2)))
        apDisplay.printMsg("min/max  :: " + str(round(medians.min(), 2)) +
                           " <> " + str(round(medians.max(), 2)))

        perrej = round(
            100.0 * float(numparts - len(rejectlst)) / float(numparts), 2)
        apDisplay.printMsg("keeping " + str(numparts - len(rejectlst)) +
                           " of " + str(numparts) + " particles (" +
                           str(perrej) + "%) so far " + " in " +
                           apDisplay.timeString(time.time() - t0))

        return rejectlst
Esempio n. 42
0
    def start(self):
        ### default parameters
        starfile = self.params['starfile']
        images = self.params['images']
        dbnum = self.params['dbnum']
        ctfrunid = self.params['ctfrunid']
        stackid = self.params['stackid']
        voltage = self.params['voltage']
        cs = self.params['cs']
        wgh = self.params['wgh']

        ### particles, angles
        appiondata.sinedon.setConfig('appiondata', db="ap%d" % dbnum)
        particledata = apStack.getStackParticlesFromId(stackid)

        ### write Relion starfile header
        sf = open(starfile, "w")
        sf.write("data_images\n")
        sf.write("loop_\n")
        sf.write("_rlnImageName\n")
        sf.write("_rlnMicrographName\n")
        sf.write("_rlnDefocusU\n")
        sf.write("_rlnDefocusV\n")
        sf.write("_rlnDefocusAngle\n")
        sf.write("_rlnVoltage\n")
        sf.write("_rlnSphericalAberration\n")
        sf.write("_rlnAmplitudeContrast\n")

        ### write info to starfile
        olddx = 0
        micn = 0
        oldimgid = None
        for i in range(len(particledata)):
            if i % 1000 == 0:
                print "done with %d particles" % i

            ### CTF info
            image = particledata[i]['particle']['image']
            imgid = image.dbid
            try:
                ctf = ctfdb.getCtfValueForCtfRunId(image, ctfrunid, msg=False)
                dx = ctf['defocus1'] * 10e9
                dy = ctf['defocus2'] * 10e9
                astig = ctf['angle_astigmatism']
            except:
                ctf = None


#				print "didn't find CTF values for image ", image.dbid

            if ctf is None:
                if oldimgid != imgid:
                    print "particle %d: " % i, "getting best value for image: %d" % imgid
                    ctf = ctfdb.getBestCtfValueForImage(image,
                                                        msg=False,
                                                        method='ctffind')
                    dx = ctf[0]['defocus1'] * 10e9
                    dy = ctf[0]['defocus2'] * 10e9
                    astig = ctf[0]['angle_astigmatism']
                    oldctf = ctf
                    oldimgid = imgid
                else:
                    try:
                        ctf = oldctf
                        dx = oldctf[0]['defocus1'] * 10e9
                        dy = oldctf[0]['defocus2'] * 10e9
                        astig = oldctf[0]['angle_astigmatism']
                    except:
                        apDisplay.printError("no CTF information for image")

            if dx != olddx:
                micn += 1
                olddx = dx

            ### write input Relion parameters
            sf.write("%06d@%s%10d%12.3f%12.3f%12.3f%8.3f%8.3f%8.3f\n" %
                     (i + 1, images, micn, dx, dy, astig, voltage, cs, wgh))
        sf.close()
	def generateParticleParams(self):
		paramfile = os.path.join(self.params['rundir'], 'params.iter000.par')
		apDisplay.printMsg("Creating parameter file: "+paramfile)

		numpart = apStack.getNumberStackParticlesFromId(self.params['stackid'])
		if os.path.isfile(paramfile):
			f = open(paramfile, 'r')
			numparam = len(f.readlines())
			if numparam > self.params['last']:
				apDisplay.printMsg("Param file exists")
				return paramfile
			else:
				apDisplay.printWarning("Param file exists with too few particles: %d vs %d"%(numparam,numpart))

		stackdata = apStack.getStackParticlesFromId(self.params['stackid'])
		particleparams={}

		f = open(paramfile, 'w')
		f.write("C           PSI   THETA     PHI     SHX     SHY    MAG   FILM      DF1      DF2  ANGAST  PRESA\n")
		apDisplay.printMsg("Writing out particle parameters")
		count = 0
		t0 = time.time()
		self.noeulers = 0
		for particle in stackdata:
			count += 1
			if (count % 200 == 0):
				estime = (time.time() - t0) * (numpart-count) / float(count)
				apDisplay.printMsg("particle %d -- %s remain"%(count, apDisplay.timeString(estime)))
			if count > self.params['last']:
				break
			# defaults
			## Niko says that if the defocus is negative it will not perform CTF correction
			## But it will also not correct the amplitudes
			particleparams = {
				'ptclnum': particle['particleNumber'],
				'psi': 0.0,
				'theta': 0.0,
				'phi': 0.0,
				'df1': -1.0, # workaround if no ctf correction
				'df2': -1.0, # set defocus to -1.0 Angstroms
				'angast': 0.0,
				'mag': 10000, # workaround to get around dstep
				'shx': 0.0,
				'shy': 0.0,
				'film': 1,
				'presa': 0.0,
				'dpres': 0.0,
			}
			imagedata = particle['particle']['image']
			if self.params['noctf'] is False:
				ctfdata, confidence = ctfdb.getBestCtfValueForImage(imagedata, msg=False, method=self.params['ctfmethod'])
				if ctfdata is not None:
					### use defocus and astigmatism values
					particleparams['df1'] = abs(ctfdata['defocus1']*1e10)
					particleparams['df2'] = abs(ctfdata['defocus2']*1e10)
					particleparams['angast'] = ctfdata['angle_astigmatism']
			# if using parameters from previous reconstruction
			if self.params['reconiterid'] is not None:
				emaneuler = self.getStackParticleEulersForIteration(particle['particleNumber'])
				frealigneulers = apFrealign.convertEmanEulersToFrealign(emaneuler, sym=self.params['sym'])
				particleparams['psi'] = frealigneulers['psi']
				particleparams['theta'] = frealigneulers['theta']
				particleparams['phi'] = frealigneulers['phi']
				particleparams['shx'] = emaneuler['shiftx']
				particleparams['shy'] = emaneuler['shifty']
				if emaneuler['mirror'] is True:
					particleparams['shx'] *= -1
			self.writeParticleParamLine(particleparams,f)
		f.close()
		return paramfile