Example #1
0
	def runProjalign(self,paramfile):
		"""
		Uses Hans Peters' projalign
		takes individual MRC files not an MRC stack
		"""
	#	while "transmation matrices are inconsistent":
		if True:
			#run tomo-refine.sh which iteratively run projalign
			### changes origin and rotation for each image
			tomorefineexe = apParam.getExecPath("tomo-refine.sh", die=True)
			cmd = ( tomorefineexe
				+" "+paramfile
			)
			print cmd
			proc = subprocess.Popen(cmd, shell=True)
			proc.wait()
			### results got to runname-iter-numimgs.tlt
			#run tomofit.sh
			tomofitexe = apParam.getExecPath("tomo-fit.sh", die=True)
			cmd = ( tomofitexe
				+" "+paramfile
			)
			print cmd
			proc = subprocess.Popen(cmd, shell=True)
			proc.wait()
    def runrefine(self):
        ### setup Xmipp command
        recontime = time.time()

        xmippopts = (
            " "
            + " -i "
            + os.path.join(self.params["rundir"], self.partlistdocfile)
            + " -vol "
            + os.path.join(self.params["rundir"], self.voldocfile)
            + " -iter "
            + str(self.params["maxiter"])
            + " -o "
            + os.path.join(self.params["rundir"], "part" + self.timestamp)
            + " -psi_step "
            + str(self.params["psi"])
            + " -ang "
            + str(self.params["phi"])
        )
        ### fast mode
        if self.params["fast"] is True:
            xmippopts += " -fast "
            if self.params["fastmode"] == "narrow":
                xmippopts += " -C 1e-10 "
            elif self.params["fastmode"] == "wide":
                xmippopts += " -C 1e-18 "
                ### convergence criteria
        if self.params["converge"] == "fast":
            xmippopts += " -eps 5e-3 "
        elif self.params["converge"] == "slow":
            xmippopts += " -eps 5e-8 "
        else:
            xmippopts += " -eps 5e-5 "
            ### mirrors
        if self.params["mirror"] is True:
            xmippopts += " -mirror "
            ### normalization
        if self.params["norm"] is True:
            xmippopts += " -norm "
            ### symmetry
        if self.params["symmetry"] is not None:
            xmippopts += " -sym " + self.params["symmetry"] + " "

        if self.nproc > 1 and self.mpirun is not None:
            ### use multi-processor
            apDisplay.printColor("Using " + str(self.nproc) + " processors!", "green")
            xmippexe = apParam.getExecPath("xmipp_mpi_ml_refine3d", die=True)
            mpiruncmd = self.mpirun + " -np " + str(self.nproc) + " " + xmippexe + " " + xmippopts
            self.writeXmippLog(mpiruncmd)
            apEMAN.executeEmanCmd(mpiruncmd, verbose=True, showcmd=True)
        else:
            ### use single processor
            xmippexe = apParam.getExecPath("xmipp_ml_refine3d", die=True)
            xmippcmd = xmippexe + " " + xmippopts
            self.writeXmippLog(xmippcmd)
            apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
        apDisplay.printMsg("Reconstruction time: " + apDisplay.timeString(time.time() - recontime))
    def runrefine(self):
        ### setup Xmipp command
        recontime = time.time()

        xmippopts = (
            " " + " -i " +
            os.path.join(self.params['rundir'], self.partlistdocfile) +
            " -vol " + os.path.join(self.params['rundir'], self.voldocfile) +
            " -iter " + str(self.params['maxiter']) + " -o " +
            os.path.join(self.params['rundir'], "part" + self.timestamp) +
            " -psi_step " + str(self.params['psi']) + " -ang " +
            str(self.params['phi']))
        ### fast mode
        if self.params['fast'] is True:
            xmippopts += " -fast "
            if self.params['fastmode'] == "narrow":
                xmippopts += " -C 1e-10 "
            elif self.params['fastmode'] == "wide":
                xmippopts += " -C 1e-18 "
        ### convergence criteria
        if self.params['converge'] == "fast":
            xmippopts += " -eps 5e-3 "
        elif self.params['converge'] == "slow":
            xmippopts += " -eps 5e-8 "
        else:
            xmippopts += " -eps 5e-5 "
        ### mirrors
        if self.params['mirror'] is True:
            xmippopts += " -mirror "
        ### normalization
        if self.params['norm'] is True:
            xmippopts += " -norm "
        ### symmetry
        if self.params['symmetry'] is not None:
            xmippopts += " -sym " + self.params['symmetry'] + " "

        if self.nproc > 1 and self.mpirun is not None:
            ### use multi-processor
            apDisplay.printColor("Using " + str(self.nproc) + " processors!",
                                 "green")
            xmippexe = apParam.getExecPath("xmipp_mpi_ml_refine3d", die=True)
            mpiruncmd = self.mpirun + " -np " + str(
                self.nproc) + " " + xmippexe + " " + xmippopts
            self.writeXmippLog(mpiruncmd)
            apEMAN.executeEmanCmd(mpiruncmd, verbose=True, showcmd=True)
        else:
            ### use single processor
            xmippexe = apParam.getExecPath("xmipp_ml_refine3d", die=True)
            xmippcmd = xmippexe + " " + xmippopts
            self.writeXmippLog(xmippcmd)
            apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
        apDisplay.printMsg("Reconstruction time: " +
                           apDisplay.timeString(time.time() - recontime))
	def checkMPI(self):
		mpiexe = apParam.getExecPath("mpirun")
		if mpiexe is None:
			return None
		simpleexe = apParam.getExecPath("simple_mpi_class_averages")
		if simpleexe is None:
			return None
		lddcmd = "ldd "+simpleexe+" | grep mpi"
		proc = subprocess.Popen(lddcmd, shell=True, stdout=subprocess.PIPE)
		proc.wait()
		lines = proc.stdout.readlines()
		print "lines=", lines
		if lines and len(lines) > 0:
			return mpiexe
	def checkMPI(self):
		mpiexe = apParam.getExecPath("mpirun")
		if mpiexe is None:
			return None
		xmippexe = apParam.getExecPath("xmipp_mpi_ml_align2d")
		if xmippexe is None:
			return None
		lddcmd = "ldd "+xmippexe+" | grep mpi"
		proc = subprocess.Popen(lddcmd, shell=True, stdout=subprocess.PIPE)
		proc.wait()
		lines = proc.stdout.readlines()
		print "lines=", lines
		if lines and len(lines) > 0:
			return mpiexe
 def checkMPI(self):
     mpiexe = apParam.getExecPath("mpirun")
     if mpiexe is None:
         return None
     simpleexe = apParam.getExecPath("simple_mpi_class_averages")
     if simpleexe is None:
         return None
     lddcmd = "ldd " + simpleexe + " | grep mpi"
     proc = subprocess.Popen(lddcmd, shell=True, stdout=subprocess.PIPE)
     proc.wait()
     lines = proc.stdout.readlines()
     print "lines=", lines
     if lines and len(lines) > 0:
         return mpiexe
Example #7
0
def setVolumeMass(volumefile, apix=1.0, mass=1.0, rna=0.0):
    """
        set the contour of 1.0 to the desired mass (in kDa) of the
        macromolecule based on its density
        
        use RNA to set the percentage of RNA in the structure
        """
    if isValidVolume(volumefile) is False:
        apDisplay.printError("Volume file is not valid")

    procbin = apParam.getExecPath("proc2d")
    emandir = os.path.dirname(procbin)
    volumebin = os.path.join(emandir, "volume")
    if not os.path.isfile(volumebin):
        apDisplay.printWarning("failed to find volume program")
        return False
    command = "%s %s %.3f set=%.3f" % (volumebin, volumefile, apix, mass)
    t0 = time.time()
    proc = subprocess.Popen(command, shell=True)
    proc.wait()
    if time.time() - t0 < 0.01:
        apDisplay.printWarning("failed to scale by mass in " +
                               apDisplay.timeString(time.time() - t0))
        return False
    apDisplay.printMsg("finished scaling by mass in " +
                       apDisplay.timeString(time.time() - t0))
    return True
def setVolumeMass(volumefile, apix=1.0, mass=1.0, rna=0.0):
        """
        set the contour of 1.0 to the desired mass (in kDa) of the
        macromolecule based on its density
        
        use RNA to set the percentage of RNA in the structure
        """
        if isValidVolume(volumefile) is False:
                apDisplay.printError("Volume file is not valid")

        procbin = apParam.getExecPath("proc2d")
        emandir = os.path.dirname(procbin)
        volumebin = os.path.join(emandir, "volume")
        if not os.path.isfile(volumebin):
                apDisplay.printWarning("failed to find volume program")
                return False
        command = "%s %s %.3f set=%.3f"%(       
                volumebin, volumefile, apix, mass
        )
        t0 = time.time()
        proc = subprocess.Popen(command, shell=True)
        proc.wait()
        if time.time()-t0 < 0.01:
                apDisplay.printWarning("failed to scale by mass in "+apDisplay.timeString(time.time()-t0))
                return False
        apDisplay.printMsg("finished scaling by mass in "+apDisplay.timeString(time.time()-t0))
        return True
Example #9
0
    def runMaxlike(self):
        stackdata = apStack.getOnlyStackData(self.params['stackid'])
        apix = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
        stackfile = os.path.join(stackdata['path']['path'], stackdata['name'])

        ### process stack to local file
        self.params['localstack'] = os.path.join(self.params['rundir'],
                                                 self.timestamp + ".hed")
        proccmd = "proc2d " + stackfile + " " + self.params[
            'localstack'] + " apix=" + str(apix)
        if self.params['highpass'] > 1:
            proccmd += " hp=" + str(self.params['highpass'])
        if self.params['lowpass'] > 1:
            proccmd += " lp=" + str(self.params['lowpass'])
        apEMAN.executeEmanCmd(proccmd, verbose=True)

        ### convert stack into single spider files
        self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(
            self.params['localstack'])

        ### setup Xmipp command
        aligntime = time.time()
        xmippopts = (
            " " + " -i " +
            os.path.join(self.params['rundir'], self.partlistdocfile) +
            " -nref 1 " + " -iter 10 " + " -o " +
            os.path.join(self.params['rundir'], "part" + self.timestamp) +
            " -fast -C 1e-18 ")
        ### angle step
        if self.params['rotate'] is True:
            xmippopts += " -psi_step 90 "
        else:
            xmippopts += " -psi_step 360 "
        ### convergence criteria
        if self.params['converge'] == "fast":
            xmippopts += " -eps 5e-3 "
        elif self.params['converge'] == "slow":
            xmippopts += " -eps 5e-8 "
        else:
            xmippopts += " -eps 5e-5 "
        ### mirrors
        if self.params['mirror'] is True:
            xmippopts += " -mirror "
        if self.params['maxshift'] is not None:
            xmippopts += " -max_shift %d " % (self.params['maxshift'])

        ### use single processor
        xmippexe = apParam.getExecPath("xmipp_ml_align2d", die=True)
        xmippcmd = xmippexe + " " + xmippopts
        self.writeXmippLog(xmippcmd)
        apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
        aligntime = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(aligntime))

        ### create a quick mrc
        emancmd = "proc2d part" + self.timestamp + "_ref000001.xmp average.mrc"
        apEMAN.executeEmanCmd(emancmd, verbose=True)
        apFile.removeStack(self.params['localstack'])
        apFile.removeFilePattern("partfiles/*")
    def runMaxlike(self):
        stackdata = apStack.getOnlyStackData(self.params["stackid"])
        apix = apStack.getStackPixelSizeFromStackId(self.params["stackid"])
        stackfile = os.path.join(stackdata["path"]["path"], stackdata["name"])

        ### process stack to local file
        self.params["localstack"] = os.path.join(self.params["rundir"], self.timestamp + ".hed")
        proccmd = "proc2d " + stackfile + " " + self.params["localstack"] + " apix=" + str(apix)
        if self.params["highpass"] > 1:
            proccmd += " hp=" + str(self.params["highpass"])
        if self.params["lowpass"] > 1:
            proccmd += " lp=" + str(self.params["lowpass"])
        apEMAN.executeEmanCmd(proccmd, verbose=True)

        ### convert stack into single spider files
        self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(self.params["localstack"])

        ### setup Xmipp command
        aligntime = time.time()
        xmippopts = (
            " "
            + " -i "
            + os.path.join(self.params["rundir"], self.partlistdocfile)
            + " -nref 1 "
            + " -iter 10 "
            + " -o "
            + os.path.join(self.params["rundir"], "part" + self.timestamp)
            + " -fast -C 1e-18 "
        )
        ### angle step
        if self.params["rotate"] is True:
            xmippopts += " -psi_step 90 "
        else:
            xmippopts += " -psi_step 360 "
        ### convergence criteria
        if self.params["converge"] == "fast":
            xmippopts += " -eps 5e-3 "
        elif self.params["converge"] == "slow":
            xmippopts += " -eps 5e-8 "
        else:
            xmippopts += " -eps 5e-5 "
        ### mirrors
        if self.params["mirror"] is True:
            xmippopts += " -mirror "
        if self.params["maxshift"] is not None:
            xmippopts += " -max_shift %d " % (self.params["maxshift"])

        ### use single processor
        xmippexe = apParam.getExecPath("xmipp_ml_align2d", die=True)
        xmippcmd = xmippexe + " " + xmippopts
        self.writeXmippLog(xmippcmd)
        apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
        aligntime = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " + apDisplay.timeString(aligntime))

        ### create a quick mrc
        emancmd = "proc2d part" + self.timestamp + "_ref000001.xmp average.mrc"
        apEMAN.executeEmanCmd(emancmd, verbose=True)
        apFile.removeStack(self.params["localstack"])
        apFile.removeFilePattern("partfiles/*")
Example #11
0
	def getCANPath(self):
		unames = os.uname()
		if unames[-1].find('64') >= 0:
			exename = 'can64_mp.exe'
		else:
			exename = 'can32.exe'
		CANexe = apParam.getExecPath(exename, die=True)
		return CANexe
Example #12
0
 def checkMPI(self):
         mpiexe = apParam.getExecPath("mpirun")
         if mpiexe is None:
                 return None
         xmippexe = apParam.getExecPath("xmipp_mpi_ml_refine3d")
         if xmippexe is None:
                 return None
         lddcmd = "ldd "+xmippexe+" | grep mpi"
         proc = subprocess.Popen(lddcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
         proc.wait()
         lines = proc.stdout.readlines()
         #print "lines=", lines
         if lines and len(lines) > 0:
                 return mpiexe
         else:
                 apDisplay.printWarning("Failed to find mpirun")
                 print "lines=", lines
                 return None
	def checkMPI(self):
		mpiexe = apParam.getExecPath("mpirun")
		if mpiexe is None:
			return None
		xmippexe = apParam.getExecPath("xmipp_mpi_ml_refine3d")
		if xmippexe is None:
			return None
		lddcmd = "ldd "+xmippexe+" | grep mpi"
		proc = subprocess.Popen(lddcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
		proc.wait()
		lines = proc.stdout.readlines()
		#print "lines=", lines
		if lines and len(lines) > 0:
			return mpiexe
		else:
			apDisplay.printWarning("Failed to find mpirun")
			print "lines=", lines
			return None
Example #14
0
	def alignReferences(self, runparams):
		### align references
		xmippopts = ( " "
			+" -i "+os.path.join(self.params['rundir'], "part"+self.params['timestamp']+".sel")
			+" -nref 1 "
			+" -iter 10 "
			+" -o "+os.path.join(self.params['rundir'], "ref"+self.params['timestamp'])
			+" -psi_step 1 "
			#+" -fast -C 1e-18 "
			+" -eps 5e-8 "
		)
		if runparams['mirror'] is True:
			xmippopts += " -mirror "
		xmippexe = apParam.getExecPath("xmipp_ml_align2d")
		xmippcmd = xmippexe+" "+xmippopts
		self.writeXmippLog(xmippcmd)
		apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
def applyBfactor(infile, fscfile, apix, mass=None, outfile=None):
        embfactorfile = "embfactor64.exe"
        embfactorexe = apParam.getExecPath("embfactor64.exe")
        if embfactorexe is None:
                apDisplay.printWarning("Could not find %s"%(embfactorfile))
                return infile
        if outfile is None:
                outfile = os.path.splitext(infile)[0]+"-bfactor.mrc"
        cmd = embfactorexe
        cmd += " -FSC %s"%(fscfile)
        cmd += " -sampling %.3f"%(apix)
        ### this option always failed for me -neil
        #if mass is not None:
        #       cmd += " -molweight %d"%(mass*1000)
        cmd += " %s"%(infile)
        cmd += " %s"%(outfile)
        apParam.runCmd(cmd, package="B-factor", verbose=True, showcmd=True)

        if not apVolume.isValidVolume(outfile):
                apDisplay.printWarning("B-factor correction failed %s"%(embfactorfile))
                return infile

        return outfile
def applyBfactor(infile, fscfile, apix, mass=None, outfile=None):
    embfactorfile = "embfactor64.exe"
    embfactorexe = apParam.getExecPath("embfactor64.exe")
    if embfactorexe is None:
        apDisplay.printWarning("Could not find %s" % (embfactorfile))
        return infile
    if outfile is None:
        outfile = os.path.splitext(infile)[0] + "-bfactor.mrc"
    cmd = embfactorexe
    cmd += " -FSC %s" % (fscfile)
    cmd += " -sampling %.3f" % (apix)
    ### this option always failed for me -neil
    #if mass is not None:
    #	cmd += " -molweight %d"%(mass*1000)
    cmd += " %s" % (infile)
    cmd += " %s" % (outfile)
    apParam.runCmd(cmd, package="B-factor", verbose=True, showcmd=True)

    if not apVolume.isValidVolume(outfile):
        apDisplay.printWarning("B-factor correction failed %s" %
                               (embfactorfile))
        return infile

    return outfile
	def start(self):
		self.insertMaxLikeJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
		self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])
	
		self.estimateIterTime()
		self.dumpParameters()

		### process stack to local file
		self.params['localstack'] = os.path.join(self.params['rundir'], self.timestamp+".hed")

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)
		a.setValue('append',False)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert') is True

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)


		# clip not yet implemented
#		self.params['clipsize'] is not None:
#			clipsize = int(self.clipsize)*self.params['bin']
#			if clipsize % 2 == 1:
#				clipsize += 1 ### making sure that clipped boxsize is even
#			a.setValue('clip',clipsize)

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		#run proc2d
		a.run()

		if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
			apDisplay.printError("Missing particles in stack")


		### convert stack into single spider files
		self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(self.params['localstack'])
		### stack is needed by uploadMaxlike.py
		#apFile.removeStack(self.params['localstack'])

		### setup Xmipp command
		aligntime = time.time()

		xmippopts = ( " "
			+" -i "+os.path.join(self.params['rundir'], self.partlistdocfile)
			+" -nref "+str(self.params['numrefs'])
			+" -iter "+str(self.params['maxiter'])
			+" -o "+os.path.join(self.params['rundir'], "part"+self.timestamp)
			+" -psi_step "+str(self.params['psistep'])
		)
		### fast mode
		if self.params['fast'] is True:
			xmippopts += " -fast "
			if self.params['fastmode'] == "narrow":
				xmippopts += " -C 1e-10 "
			elif self.params['fastmode'] == "wide":
				xmippopts += " -C 1e-17 "
		### convergence criteria
		if self.params['converge'] == "fast":
			xmippopts += " -eps 5e-3 "
		elif self.params['converge'] == "slow":
			xmippopts += " -eps 5e-8 "
		else:
			xmippopts += " -eps 5e-5 "
		### mirrors
		if self.params['mirror'] is True:
			xmippopts += " -mirror "
		### normalization
		if self.params['norm'] is True:
			xmippopts += " -norm "
		### use student's T distribution
		if self.params['student'] is True:
			xmippopts += " -student "

		### write cluster job file
		if self.params['cluster'] is True:
			self.writeClusterJobFile()

		### find number of processors
		if self.params['nproc'] is None:
			nproc = nproc = apParam.getNumProcessors()
		else:
			nproc = self.params['nproc']
		mpirun = self.checkMPI()
		self.estimateIterTime()
		if nproc > 2 and mpirun is not None:
			### use multi-processor
			apDisplay.printColor("Using "+str(nproc)+" processors!", "green")
			xmippexe = apParam.getExecPath("xmipp_mpi_ml_align2d", die=True)
			mpiruncmd = mpirun+" -np "+str(nproc)+" "+xmippexe+" "+xmippopts
			self.writeXmippLog(mpiruncmd)
			apParam.runCmd(mpiruncmd, package="Xmipp", verbose=True, showcmd=True)
		else:
			### use single processor
			xmippexe = apParam.getExecPath("xmipp_ml_align2d", die=True)
			xmippcmd = xmippexe+" "+xmippopts
			self.writeXmippLog(xmippcmd)
			apParam.runCmd(xmippcmd, package="Xmipp", verbose=True, showcmd=True)
		aligntime = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

		### minor post-processing
		self.createReferenceStack()
		self.readyUploadFlag()
		self.dumpParameters()
               "xmipp_mpi_ml_refine3d" : (package,""),
               "xmipp_protocols" : (package,""),
        })
package = "Grigorieff Lab"
packageDict.update({
               "ctffind64.exe" : (package,""),
               "ctftilt64.exe" : (package,""),
               "rmeasure64.exe" : (package,"Try 'which rmeasure32.exe'(or rmeasure.exe, or rmeasure) at a command promt."),
               "signature64.exe" : (package,"For Signature Particle Picking. Try 'which signature32.exe' at a command prompt."),
        })
package = "IMOD"
packageDict.update({
               "imod" : (package,"For Tomography Alignment and Reconstruction"),
        })
package = "PROTOMO"
packageDict.update({
               "tomo-refine.sh" : (package,"For Tomography Alignment"),
               "tomo-fit.sh" : (package,"For Tomography Alignment"),
        })

outString = "The following third party processing packages could not be found...\n\n"

for nameKey, desc in packageDict.iteritems():
    pathValue = apParam.getExecPath(nameKey, die=False)
    if pathValue is None:
        outString += "|\tFrom %s, (%s) %s\n|\n"%(desc[0], nameKey,  desc[1])
        
outString += "For installation instructions visit:\n\t http://ami.scripps.edu/redmine/projects/appion/wiki/Processing_Server_Installation\n"
apDisplay.printColor(outString,"cyan")

	def checkMPI(self):
		mpiexe = apParam.getExecPath("mpirun")
		if mpiexe is None:
			return None
		return mpiexe
	def start(self):
		self.addToLog('.... Setting up new ISAC job ....')
		self.addToLog('.... Making command for stack pre-processing ....')
		self.stack = {}
		self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
		self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])

		### send file to remotehost
		tasks = {}
		sfhed = self.stack['file'][:-4]+".hed"
		sfimg = self.stack['file'][:-4]+".img"
		tasks = self.addToTasks(tasks,"rsync -e 'ssh -o StrictHostKeyChecking=no' -rotouv --partial %s %s:%s/%s" % (sfhed,self.params['localhost'],self.params['remoterundir'],"start1.hed"))
		tasks = self.addToTasks(tasks,"rsync -e 'ssh -o StrictHostKeyChecking=no' -rotouv --partial %s %s:%s/%s" % (sfimg,self.params['localhost'],self.params['remoterundir'],"start1.img"))

		### write Sparx jobfile: process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".hed")
		if os.path.isfile(self.params['localstack']):
			apFile.removeStack(self.params['localstack'])
		proccmd = "proc2d "+self.stack['file']+" "+self.params['localstack']+" apix="+str(self.stack['apix'])
		if self.params['bin'] > 1 or self.params['clipsize'] is not None:
			clipsize = int(self.clipsize)*self.params['bin']
			if clipsize % 2 == 1:
				clipsize += 1 ### making sure that clipped boxsize is even
			proccmd += " shrink=%d clip=%d,%d "%(self.params['bin'],clipsize,clipsize)
		proccmd += " last="+str(self.params['numpart']-1)
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			proccmd += " hp="+str(self.params['highpass'])
		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			proccmd += " lp="+str(self.params['lowpass'])
#		apParam.runCmd(proccmd, "EMAN", verbose=True)
		self.addSimpleCommand('cd %s' % self.params['rundir'])
		self.addSimpleCommand(proccmd)
		sparxcmd = "sxcpy.py %s %s_1.hdf" % (self.params['localstack'], self.params['localstack'][:-4])
#		apParam.runCmd(sparxcmd, "SPARX", verbose=True)
		self.addSimpleCommand(sparxcmd)
		self.addSimpleCommand("")

		### write Sparx jobfile: run ISAC
		for i in range(self.params['generations']):
			sparxopts = " %s_%d.hdf" % (os.path.join(self.params['localstack'][:-4]), (i+1))
			if self.params['ir'] is not None:
				sparxopts += " --ir %d" % int(float(apRecon.getComponentFromVector(self.params['ir'], i, splitter=":")))
			if self.params['ou'] is not None:
				sparxopts += " --ou %d" % int(float(apRecon.getComponentFromVector(self.params['ou'], i, splitter=":")))
			if self.params['rs'] is not None:
				sparxopts += " --rs %d" % int(float(apRecon.getComponentFromVector(self.params['rs'], i, splitter=":")))
			if self.params['ts'] is not None:
				sparxopts += " --ts %.1f" % int(float(apRecon.getComponentFromVector(self.params['ts'], i, splitter=":")))
			if self.params['xr'] is not None:
				sparxopts += " --xr %.1f" % int(float(apRecon.getComponentFromVector(self.params['xr'], i, splitter=":")))
			if self.params['yr'] is not None:
				sparxopts += " --yr %.1f" % int(float(apRecon.getComponentFromVector(self.params['yr'], i, splitter=":")))
			if self.params['maxit'] is not None:
				sparxopts += " --maxit %d" % int(float(apRecon.getComponentFromVector(self.params['maxit'], i, splitter=":")))
			if self.params['FL'] is not None:
				sparxopts += " --FL %.2f" % int(float(apRecon.getComponentFromVector(self.params['FL'], i, splitter=":")))
			if self.params['FH'] is not None:
				sparxopts += " --FH %.2f" % int(float(apRecon.getComponentFromVector(self.params['FH'], i, splitter=":")))
			if self.params['FF'] is not None:
				sparxopts += " --FF %.2f" % int(float(apRecon.getComponentFromVector(self.params['FF'], i, splitter=":")))
			if self.params['init_iter'] is not None:
				sparxopts += " --init_iter %d" % int(float(apRecon.getComponentFromVector(self.params['init_iter'], i, splitter=":")))
			if self.params['main_iter'] is not None:
				sparxopts += " --main_iter %d" % int(float(apRecon.getComponentFromVector(self.params['main_iter'], i, splitter=":")))
			if self.params['iter_reali'] is not None:
				sparxopts += " --iter_reali %d" % int(float(apRecon.getComponentFromVector(self.params['iter_reali'], i, splitter=":")))
			if self.params['match_first'] is not None:
				sparxopts += " --match_first %d" % int(float(apRecon.getComponentFromVector(self.params['match_first'], i, splitter=":")))
			if self.params['max_round'] is not None:
				sparxopts += " --max_round %d" % int(float(apRecon.getComponentFromVector(self.params['max_round'], i, splitter=":")))
			if self.params['match_second'] is not None:
				sparxopts += " --match_second %d" % int(float(apRecon.getComponentFromVector(self.params['match_second'], i, splitter=":")))
			if self.params['stab_ali'] is not None:
				sparxopts += " --stab_ali %d" % int(float(apRecon.getComponentFromVector(self.params['stab_ali'], i, splitter=":")))
			if self.params['thld_err'] is not None:
				sparxopts += " --thld_err %.2f" % int(float(apRecon.getComponentFromVector(self.params['thld_err'], i, splitter=":")))
			if self.params['indep_run'] is not None:
				sparxopts += " --indep_run %d" % int(float(apRecon.getComponentFromVector(self.params['indep_run'], i, splitter=":")))
			if self.params['thld_grp'] is not None:
				sparxopts += " --thld_grp %d" % int(float(apRecon.getComponentFromVector(self.params['thld_grp'], i, splitter=":")))
			if self.params['img_per_grp'] is not None:
				sparxopts += " --img_per_grp %d" % int(apRecon.getComponentFromVector(self.params['img_per_grp'], i))
			sparxopts += " --generation %d" % (i+1)
			
			sparxexe = apParam.getExecPath("sxisac.py", die=True)
			mpiruncmd = self.mpirun+" -np "+str(self.params['nproc'])+" "+sparxexe+" "+sparxopts
			bn = os.path.basename(self.params['localstack'])[:-4]
			e2cmd = "e2proc2d.py %s_%d.hdf %s_%d.hdf --list=\"generation_%d_unaccounted.txt\"" % \
				(bn, i+1, bn, i+2, i+1)
			self.addSimpleCommand(mpiruncmd)
			self.addSimpleCommand(e2cmd)

#		print self.tasks
#		print self.command_list
#		self.writeCommandListToFile()
		apParam.dumpParameters(self.params, "isac-"+self.params['timestamp']+"-params.pickle")
	def start(self):
		### simple is written in Fortran, which cannot take inputs of certain length, therefore one needs
		### to change to the directory to minimize the filename length, in particular for the stack
		os.chdir(self.params['rundir'])

		### stack needs to be centered
		if self.params['no_center'] is False:
			if os.path.isfile(os.path.join(self.params['rundir'], "ali.hed")):
				apFile.removeStack(os.path.join(self.params['rundir'], "ali.hed"))
			centstack = os.path.join(self.params['rundir'], "ali.hed")
			centcmd = "cenalignint %s > cenalignint.log" % (self.stack['file'])
			apParam.runCmd(centcmd, "EMAN")

		### process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".spi")

		if os.path.isfile(self.params['localstack']):
			apFile.removeFile(self.params['localstack'])
		if self.params['no_center'] is False:
			proccmd = "proc2d "+centstack+" "+self.params['localstack']+" apix="+str(self.stack['apix'])
		else:
			proccmd = "proc2d "+self.stack['file']+" "+self.params['localstack']+" apix="+str(self.stack['apix'])
		if self.params['bin'] > 1 or self.params['clipsize'] is not None:
			proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'], self.boxsize, self.boxsize)
		proccmd += " last="+str(self.params['numpart']-1)
		proccmd += " spiderswap"
#		if self.params['highpass'] is not None and self.params['highpass'] > 1:
#			proccmd += " hp="+str(self.params['highpass'])
#		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
#			proccmd += " lp="+str(self.params['lowpass'])
		apParam.runCmd(proccmd, "EMAN", verbose=True)

#		if self.params['numpart'] != int(spider.getSpiderHeader(self.params['localstack'])[-2]):
#			apDisplay.printError("Missing particles in stack")

		### setup Simple command
		aligntime = time.time()
		simpleopts = (""
			+" stk=%s" % os.path.basename(self.params['localstack'])
			+" box=%d" % self.boxsize
			+" nptcls=%d" % self.params['numpart']
			+" smpd=%.3f" % self.apix
			+" ring2=%d" % self.params['ring2']
			+" ncls=%d" % self.params['ncls']	
			+" minp=%d" % self.params['minp']
			+" nvars=%d" % self.params['nvars']
			+" nthr=%d" % self.params['nproc']
		)
		if self.params['no_kmeans'] is True:
			simpleopts += " kmeans=off"
		if self.params['nran'] is not None:
			simpleopts += "nran=%d" % self.params['nran']

		### SIMPLE 2D clustering
		apDisplay.printColor("Using "+str(self.params['nproc'])+" processors!", "green")
		simpleexe = apParam.getExecPath("cluster", die=True)
		simplecmd = "%s %s" % (simpleexe, simpleopts)
		self.writeSimpleLog(simplecmd)
		apParam.runCmd(simplecmd, package="SIMPLE", verbose=True, showcmd=True, logfile="cluster.std")
		self.params['runtime'] = time.time() - aligntime
		apDisplay.printMsg("Alignment & Classification time: "+apDisplay.timeString(self.params['runtime']))

		### SIMPLE spider to Fourier format
		clsavgs = "cavgstk.spi"
		if not os.path.isfile(os.path.join(self.params['rundir'], clsavgs)):
			apDisplay.printError("class averages were not created! try rerunning with centering, more particles, or less ppc")
		try:
			nptcls = spider.getSpiderHeader(clsavgs)[-2]
		except:
			nptcls = self.params['ncls']
			apDisplay.printWarning("class average file may not have been created! Please check existence of file cavgstk.spi")
		projfile = "projs"
		projext = ".fim"
		simpleexe = apParam.getExecPath("spi_to_fim", die=True)
		simpleopts = (""
			+" stk=%s" % clsavgs
			+" box=%d" % self.boxsize
			+" nptcls=%d" % nptcls
			+" smpd=%.3f" % self.apix
			+" outbdy=%s" % projfile
			+" msk=%d" % self.params['mask']
		)
		simplecmd = "%s %s" % (simpleexe, simpleopts)
		self.writeSimpleLog(simplecmd)
		apParam.runCmd(simplecmd, package="SIMPLE", verbose=True, showcmd=True, logfile="spi_to_fim.std")

		### SIMPLE origami, ab initio 3D reconstruction
		refinetime = time.time()
		simpleexe = apParam.getExecPath("origami", die=True)
		simpleopts = (""
			+" fstk=%s" % projfile+projext
			+" froms=%d" % self.params['froms']
			+" tos=%d" % self.params['tos']
			+" lp=%d" % self.params['lp']
			+" hp=%d" % self.params['hp']
			+" maxits=%d" % self.params['maxits']
			+" msk=%d" % self.params['mask']	
			+" mw=%d" % self.params['mw']
			+" frac=%.3f" % self.params['frac']
			+" amsklp=%d" % self.params['amsklp']
			+" edge=%d" % self.params['edge']
			+" trs=%d" % self.params['trs']
			+" nthr=%d" % self.params['nproc']
		)
		simplecmd = "%s %s" % (simpleexe, simpleopts)
		self.writeSimpleLog(simplecmd)
		apParam.runCmd(simplecmd, package="SIMPLE", verbose=True, showcmd=True, logfile="origami.std")
		refinetime = time.time() - refinetime
		apDisplay.printMsg("Origami reconstruction time: "+apDisplay.timeString(refinetime))

#		'''

		### minor post-processing
		self.clearIntermediateFiles()
		apParam.dumpParameters(self.params, "simple-"+self.params['timestamp']+"-params.pickle")

		### upload results
		self.runparams = apParam.readRunParameters("simple-"+self.params['timestamp']+"-params.pickle")

		### create average of aligned and clustered stacks, convert to IMAGIC
		alignedStackSpi = "inplalgnstk.spi"
		alignedStack = "inplalgnstk.hed"
		if os.path.isfile(alignedStack):
			apFile.removeStack(alignedStack)
		emancmd = "proc2d %s %s flip" % (alignedStackSpi, alignedStack)
		apParam.runCmd(emancmd, "EMAN")
		clusterStackSpi = "cavgstk.spi"
		clusterStack = "cavgstk.hed"
		if os.path.isfile(clusterStack):
			apFile.removeStack(clusterStack)
		emancmd = "proc2d %s %s flip" % (clusterStackSpi, clusterStack)
		apParam.runCmd(emancmd, "EMAN")
#		apStack.averageStack(alignedStack)

		### parse alignment and classification results
		if self.params['no_center'] is False:
			self.alignD = self.getAlignParameters(centparams="cenalignint.log")
		else:
			self.alignD = self.getAlignParameters()
		if self.params['no_kmeans'] is False:
			self.classD = self.getClassification("kmeans.spi", clusterStack)
		else:
			self.classD = self.getClassification("hcl.spi", clusterStack)	

		### upload to database
		self.insertSIMPLEAlignParamsIntoDatabase()
		self.insertAlignStackRunIntoDatabase(alignedStack, clusterStack)
		self.calcResolution(alignedStack)
		self.insertAlignParticlesIntoDatabase()
		self.insertClusterRunIntoDatabase()
		self.insertClusterStackIntoDatabase(clusterStack, len(self.classD))
		self.insertSIMPLEOrigamiParamsIntoDatabase()
               "xmipp_mpi_ml_refine3d" : (package,""),
               "xmipp_protocols" : (package,""),
	})
package = "Grigorieff Lab"
packageDict.update({
               "ctffind64.exe" : (package,""),
               "ctftilt64.exe" : (package,""),
               "rmeasure64.exe" : (package,"Try 'which rmeasure32.exe'(or rmeasure.exe, or rmeasure) at a command promt."),
               "signature64.exe" : (package,"For Signature Particle Picking. Try 'which signature32.exe' at a command prompt."),
	})
package = "IMOD"
packageDict.update({
               "imod" : (package,"For Tomography Alignment and Reconstruction"),
	})
package = "PROTOMO"
packageDict.update({
               "tomo-refine.sh" : (package,"For Tomography Alignment"),
               "tomo-fit.sh" : (package,"For Tomography Alignment"),
	})

outString = "The following third party processing packages could not be found...\n\n"

for nameKey, desc in packageDict.iteritems():
    pathValue = apParam.getExecPath(nameKey, die=False)
    if pathValue is None:
        outString += "|\tFrom %s, (%s) %s\n|\n"%(desc[0], nameKey,  desc[1])
        
outString += "For installation instructions visit:\n\t http://ami.scripps.edu/redmine/projects/appion/wiki/Processing_Server_Installation\n"
apDisplay.printColor(outString,"cyan")

	def normalizeVolume(self, volfile):
		"""
mkdir CorrectGreyscale

xmipp_header_extract -i experimental_images.sel -o experimental_images.doc

xmipp_angular_project_library  -i bad_greyscale.vol -experimental_images experimental_images.doc -o CorrectGreyscale/ref -sampling_rate 15 -sym c1h -compute_neighbors -angular_distance -1

xmipp_angular_projection_matching  -i experimental_images.doc -o CorrectGreyscale/corrected_reference -ref CorrectGreyscale/ref

xmipp_mpi_angular_class_average  -i CorrectGreyscale/corrected_reference.doc -lib CorrectGreyscale/ref_angles.doc -o CorrectGreyscale/corrected_reference

xmipp_mpi_reconstruct_wbp  -i CorrectGreyscale/corrected_reference_classes.sel -o corrected_reference.vol -threshold 0.02 -sym c1  -use_each_image -weight
		"""
		volroot = os.path.splitext(volfile)[0]
		volroot = re.sub("\.", "_", volroot)
		normfolder = os.path.join(self.params['rundir'], volroot)
		apParam.createDirectory(normfolder)

		### Create Euler doc file for particles
		partselfile = os.path.join(self.params['rundir'], self.partlistdocfile)
		parteulerdoc = os.path.join(normfolder, "parteulers.doc")
		xmippcmd = "xmipp_header_extract -i %s -o %s"%(partselfile, parteulerdoc)
		apEMAN.executeEmanCmd(xmippcmd, verbose=False)
		if not os.path.isfile(parteulerdoc):
			apDisplay.printError("Could not normalize volume for file: "+volfile)

		### Create projections
		refprefix = os.path.join(normfolder, "refproj"+self.timestamp)
		if self.nproc > 1 and self.mpirun is not None:
			xmipppath = apParam.getExecPath("xmipp_mpi_angular_project_library", die=True)
			xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
		else:
			xmippexe = "xmipp_angular_project_library"
		xmippcmd = ("%s -i %s -experimental_images %s -o %s"
			%(xmippexe, volfile, parteulerdoc, refprefix))
		xmippcmd += " -sampling_rate %d -compute_neighbors -angular_distance -1 -perturb 0.5"%(self.params['phi'])
		if self.params['symmetry'] is not None:
			xmippcmd += " -sym "+str(self.params['symmetry'])
		apEMAN.executeEmanCmd(xmippcmd, verbose=False)
		refs = glob.glob(refprefix+"*.xmp")
		if not refs:
			apDisplay.printError("Could not normalize volume for file: "+volfile)

		### Match projections
		fixprefix = os.path.join(normfolder, "match"+self.timestamp)
		if self.nproc > 1 and self.mpirun is not None:
			xmipppath = apParam.getExecPath("xmipp_mpi_angular_projection_matching", die=True)
			xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
		else:
			xmippexe = "xmipp_angular_projection_matching"
		xmippcmd = ("%s -i %s -o %s -ref %s"
			%(xmippexe, parteulerdoc, fixprefix, refprefix))
		apEMAN.executeEmanCmd(xmippcmd, verbose=False)
		docfile = fixprefix+".doc"
		if not os.path.isfile(docfile):
			apDisplay.printError("Could not normalize volume for file: "+volfile)

		### Create projection averages
		correctprefix = os.path.join(normfolder, "correctproj"+self.timestamp)
		if self.nproc > 1 and self.mpirun is not None:
			xmipppath = apParam.getExecPath("xmipp_mpi_angular_class_average", die=True)
			xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
		else:
			xmippexe = "xmipp_angular_class_average"
		xmippcmd = ("%s -i %s.doc -lib %s_angles.doc -o %s"
			%(xmippexe, fixprefix, refprefix, correctprefix))
		apEMAN.executeEmanCmd(xmippcmd, verbose=False)
		refs = glob.glob(correctprefix+"*.xmp")
		if not refs:
			apDisplay.printError("Could not normalize volume for file: "+volfile)

		### Backproject
		correctvolfile = os.path.join(normfolder, "volume"+self.timestamp+".spi")
		if self.nproc > 1 and self.mpirun is not None:
			xmipppath = apParam.getExecPath("xmipp_mpi_reconstruct_wbp", die=True)
			xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
		else:
			xmippexe = "xmipp_reconstruct_wbp"
		xmippcmd = ("%s -i %s_classes.sel -o %s"
			%(xmippexe, correctprefix, correctvolfile))
		xmippcmd += " -threshold 0.02 -use_each_image -weight"
		if self.params['symmetry'] is not None:
			xmippcmd += " -sym "+str(self.params['symmetry'])
		apEMAN.executeEmanCmd(xmippcmd, verbose=False)

		if not os.path.isfile(correctvolfile):
			apDisplay.printError("Could not normalize volume for file: "+volfile)
		return correctvolfile
    def start(self):
        self.addToLog('.... Setting up new ISAC job ....')
        self.addToLog('.... Making command for stack pre-processing ....')
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])

        ### send file to remotehost
        tasks = {}
        sfhed = self.stack['file'][:-4] + ".hed"
        sfimg = self.stack['file'][:-4] + ".img"
        tasks = self.addToTasks(
            tasks,
            "rsync -e 'ssh -o StrictHostKeyChecking=no' -rotouv --partial %s %s:%s/%s"
            % (sfhed, self.params['localhost'], self.params['remoterundir'],
               "start1.hed"))
        tasks = self.addToTasks(
            tasks,
            "rsync -e 'ssh -o StrictHostKeyChecking=no' -rotouv --partial %s %s:%s/%s"
            % (sfimg, self.params['localhost'], self.params['remoterundir'],
               "start1.img"))

        ### write Sparx jobfile: process stack to local file
        if self.params['timestamp'] is None:
            apDisplay.printMsg("creating timestamp")
            self.params['timestamp'] = self.timestamp
        self.params['localstack'] = os.path.join(
            self.params['rundir'], self.params['timestamp'] + ".hed")
        if os.path.isfile(self.params['localstack']):
            apFile.removeStack(self.params['localstack'])
        proccmd = "proc2d " + self.stack['file'] + " " + self.params[
            'localstack'] + " apix=" + str(self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            clipsize = int(self.clipsize) * self.params['bin']
            if clipsize % 2 == 1:
                clipsize += 1  ### making sure that clipped boxsize is even
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   clipsize, clipsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        if self.params['highpass'] is not None and self.params['highpass'] > 1:
            proccmd += " hp=" + str(self.params['highpass'])
        if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
            proccmd += " lp=" + str(self.params['lowpass'])
#		apParam.runCmd(proccmd, "EMAN", verbose=True)
        self.addSimpleCommand('cd %s' % self.params['rundir'])
        self.addSimpleCommand(proccmd)
        sparxcmd = "sxcpy.py %s %s_1.hdf" % (self.params['localstack'],
                                             self.params['localstack'][:-4])
        #		apParam.runCmd(sparxcmd, "SPARX", verbose=True)
        self.addSimpleCommand(sparxcmd)
        self.addSimpleCommand("")

        ### write Sparx jobfile: run ISAC
        for i in range(self.params['generations']):
            sparxopts = " %s_%d.hdf" % (os.path.join(
                self.params['localstack'][:-4]), (i + 1))
            if self.params['ir'] is not None:
                sparxopts += " --ir %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['ir'], i, splitter=":")))
            if self.params['ou'] is not None:
                sparxopts += " --ou %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['ou'], i, splitter=":")))
            if self.params['rs'] is not None:
                sparxopts += " --rs %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['rs'], i, splitter=":")))
            if self.params['ts'] is not None:
                sparxopts += " --ts %.1f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['ts'], i, splitter=":")))
            if self.params['xr'] is not None:
                sparxopts += " --xr %.1f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['xr'], i, splitter=":")))
            if self.params['yr'] is not None:
                sparxopts += " --yr %.1f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['yr'], i, splitter=":")))
            if self.params['maxit'] is not None:
                sparxopts += " --maxit %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['maxit'], i, splitter=":")))
            if self.params['FL'] is not None:
                sparxopts += " --FL %.2f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['FL'], i, splitter=":")))
            if self.params['FH'] is not None:
                sparxopts += " --FH %.2f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['FH'], i, splitter=":")))
            if self.params['FF'] is not None:
                sparxopts += " --FF %.2f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['FF'], i, splitter=":")))
            if self.params['init_iter'] is not None:
                sparxopts += " --init_iter %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['init_iter'], i, splitter=":")))
            if self.params['main_iter'] is not None:
                sparxopts += " --main_iter %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['main_iter'], i, splitter=":")))
            if self.params['iter_reali'] is not None:
                sparxopts += " --iter_reali %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['iter_reali'], i, splitter=":")))
            if self.params['match_first'] is not None:
                sparxopts += " --match_first %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['match_first'], i, splitter=":")))
            if self.params['max_round'] is not None:
                sparxopts += " --max_round %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['max_round'], i, splitter=":")))
            if self.params['match_second'] is not None:
                sparxopts += " --match_second %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['match_second'], i, splitter=":")))
            if self.params['stab_ali'] is not None:
                sparxopts += " --stab_ali %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['stab_ali'], i, splitter=":")))
            if self.params['thld_err'] is not None:
                sparxopts += " --thld_err %.2f" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['thld_err'], i, splitter=":")))
            if self.params['indep_run'] is not None:
                sparxopts += " --indep_run %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['indep_run'], i, splitter=":")))
            if self.params['thld_grp'] is not None:
                sparxopts += " --thld_grp %d" % int(
                    float(
                        apRecon.getComponentFromVector(
                            self.params['thld_grp'], i, splitter=":")))
            if self.params['img_per_grp'] is not None:
                sparxopts += " --img_per_grp %d" % int(
                    apRecon.getComponentFromVector(self.params['img_per_grp'],
                                                   i))
            sparxopts += " --generation %d" % (i + 1)

            sparxexe = apParam.getExecPath("sxisac.py", die=True)
            mpiruncmd = self.mpirun + " -np " + str(
                self.params['nproc']) + " " + sparxexe + " " + sparxopts
            bn = os.path.basename(self.params['localstack'])[:-4]
            e2cmd = "e2proc2d.py %s_%d.hdf %s_%d.hdf --list=\"generation_%d_unaccounted.txt\"" % \
             (bn, i+1, bn, i+2, i+1)
            self.addSimpleCommand(mpiruncmd)
            self.addSimpleCommand(e2cmd)

#		print self.tasks
#		print self.command_list
#		self.writeCommandListToFile()
        apParam.dumpParameters(
            self.params, "isac-" + self.params['timestamp'] + "-params.pickle")
        def start(self):
                self.insertMaxLikeJob()
                self.stack = {}
                self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
                self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
                self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
                self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
                self.stack['file'] = os.path.join(self.stack['data']['path']['path'], self.stack['data']['name'])
                self.estimateIterTime()
                self.dumpParameters()

                ### process stack to local temp file

                proccmd = "proc2d "+self.stack['file']+" temp.hed apix="+str(self.stack['apix'])
                if self.params['bin'] > 1 or self.params['clipsize'] is not None:
                        clipsize = int(self.clipsize)*self.params['bin']
                        proccmd += " shrink=%d clip=%d,%d "%(self.params['bin'],clipsize,clipsize)
                proccmd += " last="+str(self.params['numpart']-1)
                apEMAN.executeEmanCmd(proccmd, verbose=True)

                ### process stack to final file
                self.params['localstack'] = os.path.join(self.params['rundir'], self.timestamp+".hed")
                proccmd = "proc2d temp.hed "+self.params['localstack']+" apix="+str(self.stack['apix']*self.params['bin'])
                if self.params['highpass'] is not None and self.params['highpass'] > 1:
                        proccmd += " hp="+str(self.params['highpass'])
                if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
                        proccmd += " lp="+str(self.params['lowpass'])
                apEMAN.executeEmanCmd(proccmd, verbose=True)
                apFile.removeStack("temp.hed")

                ### convert stack into single spider files
                self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(self.params['localstack'])

                ### convert stack into single spider files
                templateselfile = self.initializeTemplates()

                ### setup Xmipp command
                aligntime = time.time()

                xmippopts = ( " "
                        +" -i "+os.path.join(self.params['rundir'], self.partlistdocfile)
                        +" -iter "+str(self.params['maxiter'])
                        +" -ref "+templateselfile
                        +" -o "+os.path.join(self.params['rundir'], "part"+self.timestamp)
                        +" -psi_step "+str(self.params['psistep'])
                )
                ### fast mode
                if self.params['fast'] is True:
                        xmippopts += " -fast "
                        if self.params['fastmode'] == "narrow":
                                xmippopts += " -C 1e-10 "
                        elif self.params['fastmode'] == "wide":
                                xmippopts += " -C 1e-18 "
                ### convergence criteria
                if self.params['converge'] == "fast":
                        xmippopts += " -eps 5e-3 "
                elif self.params['converge'] == "slow":
                        xmippopts += " -eps 5e-8 "
                else:
                        xmippopts += " -eps 5e-5 "
                ### mirrors
                if self.params['mirror'] is True:
                        xmippopts += " -mirror "
                ### normalization
                if self.params['norm'] is True:
                        xmippopts += " -norm "

                ### find number of processors
                if self.params['nproc'] is None:
                        nproc = nproc = apParam.getNumProcessors()
                else:
                        nproc = self.params['nproc']
                mpirun = self.checkMPI()
                if nproc > 2 and mpirun is not None:
                        ### use multi-processor
                        apDisplay.printColor("Using "+str(nproc-1)+" processors!", "green")
                        xmippexe = apParam.getExecPath("xmipp_mpi_ml_align2d", die=True)
                        mpiruncmd = mpirun+" -np "+str(nproc-1)+" "+xmippexe+" "+xmippopts
                        self.writeXmippLog(mpiruncmd)
                        apEMAN.executeEmanCmd(mpiruncmd, verbose=True, showcmd=True)
                else:
                        ### use single processor
                        xmippexe = apParam.getExecPath("xmipp_ml_align2d", die=True)
                        xmippcmd = xmippexe+" "+xmippopts
                        self.writeXmippLog(xmippcmd)
                        apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
                aligntime = time.time() - aligntime
                apDisplay.printMsg("Alignment time: "+apDisplay.timeString(aligntime))

                ### minor post-processing
                self.createReferenceStack()
                self.readyUploadFlag()
                self.dumpParameters()
 def checkMPI(self):
     mpiexe = apParam.getExecPath("mpirun")
     if mpiexe is None:
         return None
     return mpiexe
Example #27
0
    def start(self):
        self.insertMaxLikeJob()
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])
        self.estimateIterTime()
        self.dumpParameters()

        ### process stack to local temp file

        proccmd = "proc2d " + self.stack['file'] + " temp.hed apix=" + str(
            self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            clipsize = int(self.clipsize) * self.params['bin']
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   clipsize, clipsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        apEMAN.executeEmanCmd(proccmd, verbose=True)

        ### process stack to final file
        self.params['localstack'] = os.path.join(self.params['rundir'],
                                                 self.timestamp + ".hed")
        proccmd = "proc2d temp.hed " + self.params[
            'localstack'] + " apix=" + str(
                self.stack['apix'] * self.params['bin'])
        if self.params['highpass'] is not None and self.params['highpass'] > 1:
            proccmd += " hp=" + str(self.params['highpass'])
        if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
            proccmd += " lp=" + str(self.params['lowpass'])
        apEMAN.executeEmanCmd(proccmd, verbose=True)
        apFile.removeStack("temp.hed")

        ### convert stack into single spider files
        self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(
            self.params['localstack'])

        ### convert stack into single spider files
        templateselfile = self.initializeTemplates()

        ### setup Xmipp command
        aligntime = time.time()

        xmippopts = (
            " " + " -i " +
            os.path.join(self.params['rundir'], self.partlistdocfile) +
            " -iter " + str(self.params['maxiter']) + " -ref " +
            templateselfile + " -o " +
            os.path.join(self.params['rundir'], "part" + self.timestamp) +
            " -psi_step " + str(self.params['psistep']))
        ### fast mode
        if self.params['fast'] is True:
            xmippopts += " -fast "
            if self.params['fastmode'] == "narrow":
                xmippopts += " -C 1e-10 "
            elif self.params['fastmode'] == "wide":
                xmippopts += " -C 1e-18 "
        ### convergence criteria
        if self.params['converge'] == "fast":
            xmippopts += " -eps 5e-3 "
        elif self.params['converge'] == "slow":
            xmippopts += " -eps 5e-8 "
        else:
            xmippopts += " -eps 5e-5 "
        ### mirrors
        if self.params['mirror'] is True:
            xmippopts += " -mirror "
        ### normalization
        if self.params['norm'] is True:
            xmippopts += " -norm "

        ### find number of processors
        if self.params['nproc'] is None:
            nproc = nproc = apParam.getNumProcessors()
        else:
            nproc = self.params['nproc']
        mpirun = self.checkMPI()
        if nproc > 2 and mpirun is not None:
            ### use multi-processor
            apDisplay.printColor("Using " + str(nproc - 1) + " processors!",
                                 "green")
            xmippexe = apParam.getExecPath("xmipp_mpi_ml_align2d", die=True)
            mpiruncmd = mpirun + " -np " + str(
                nproc - 1) + " " + xmippexe + " " + xmippopts
            self.writeXmippLog(mpiruncmd)
            apEMAN.executeEmanCmd(mpiruncmd, verbose=True, showcmd=True)
        else:
            ### use single processor
            xmippexe = apParam.getExecPath("xmipp_ml_align2d", die=True)
            xmippcmd = xmippexe + " " + xmippopts
            self.writeXmippLog(xmippcmd)
            apEMAN.executeEmanCmd(xmippcmd, verbose=True, showcmd=True)
        aligntime = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(aligntime))

        ### minor post-processing
        self.createReferenceStack()
        self.readyUploadFlag()
        self.dumpParameters()
	def start(self):
#		self.insertCL2DJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])
		self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])

		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		### process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".hed")
 		if os.path.isfile(self.params['localstack']):
 			apFile.removeStack(self.params['localstack'])

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert',True)

		# clip not yet implemented
#		if self.params['clipsize'] is not None:
#			clipsize = int(self.clipsize)*self.params['bin']
#			if clipsize % 2 == 1:
#				clipsize += 1 ### making sure that clipped boxsize is even
#			a.setValue('clip',clipsize)

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		#run proc2d
		a.run()

 		if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
 			apDisplay.printError("Missing particles in stack")

		### setup Xmipp command
		aligntime = time.time()
 		xmippopts = (" -i "+os.path.join(self.params['rundir'], self.params['localstack'])
 			+" --nref "+str(self.params['numrefs'])
 			+" --iter "+str(self.params['maxiter'])
 			+" --odir "+str(self.params['rundir'])
 			+" --oroot "+ "part"+str(self.params['timestamp'])
			+" --classifyAllImages"
 		)
 
 		if self.params['correlation']:
 			xmippopts += " --distance correlation"
 		if self.params['classical']:
 			xmippopts += " --classicalMultiref"		
 
 
 		### use multi-processor command
 		apDisplay.printColor("Using "+str(self.params['nproc'])+" processors!", "green")
 		xmippexe = apParam.getExecPath(self.execFile, die=True)
 		mpiruncmd = self.mpirun+" -np "+str(self.params['nproc'])+" "+xmippexe+" "+xmippopts
 		self.writeXmippLog(mpiruncmd)
 		apParam.runCmd(mpiruncmd, package="Xmipp 3", verbose=True, showcmd=True, logfile="xmipp.std")
 		self.params['runtime'] = time.time() - aligntime
 		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(self.params['runtime']))
 
 		### post-processing
 		# Create a stack for the class averages at each level
 		Nlevels=glob.glob("level_*")
 		for level in Nlevels:
 			digits = level.split("_")[1]
 			apParam.runCmd("xmipp_image_convert -i "+level+"/part"+self.params['timestamp']+"*xmd -o part"
 						+self.params['timestamp']+"_level_"+digits+"_.hed", package="Xmipp 3", verbose=True)
 			
 		if self.params['align']:
			apParam.runCmd("xmipp_transform_geometry -i images.xmd -o %s_aligned.stk --apply_transform" % self.params['timestamp'], package="Xmipp 3", verbose=True)
 			apParam.runCmd("xmipp_image_convert -i %s_aligned.xmd -o alignedStack.hed" % self.params['timestamp'], package="Xmipp 3", verbose=True)
			apFile.removeFile("%s_aligned.xmd" % self.params['timestamp'])
			apFile.removeFile("%s_aligned.stk" % self.params['timestamp'])
 		
 		self.parseOutput()
 		apParam.dumpParameters(self.params, "cl2d-"+self.params['timestamp']+"-params.pickle")

		### upload results ... this used to be two separate operations, I'm combining into one
		self.runparams = apParam.readRunParameters("cl2d-"+self.params['timestamp']+"-params.pickle")
		self.apix = apStack.getStackPixelSizeFromStackId(self.runparams['stackid'])*self.runparams['bin']
		self.Nlevels=len(glob.glob("part"+self.params['timestamp']+"_level_??_.hed"))

		### create average of aligned stacks & insert aligned stack info
		lastLevelStack = "part"+self.params['timestamp']+"_level_%02d_.hed"%(self.Nlevels-1)
		apStack.averageStack(lastLevelStack)
		self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
		self.insertCL2DParamsIntoDatabase()
		if self.runparams['align'] is True:
			self.insertAlignStackRunIntoDatabase("alignedStack.hed")
			self.calcResolution(self.Nlevels-1)
			self.insertAlignParticlesIntoDatabase(level=self.Nlevels-1)
		
		### loop over each class average stack & insert as clustering stacks
		self.insertClusterRunIntoDatabase()
		for level in range(self.Nlevels):
			### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
			if self.params['align'] is True:
				self.calcResolution(level)
			partdict = self.getClassificationAtLevel(level)
			for classnum in partdict: 
				self.insertClusterStackIntoDatabase(
					"part"+self.params['timestamp']+"_level_%02d_.hed"%level,
					classnum+1, partdict[classnum], len(partdict))
		self.clearIntermediateFiles()
    def start(self):
        #               self.insertCL2DJob()
        self.stack = {}
        self.stack['data'] = apStack.getOnlyStackData(self.params['stackid'])
        self.stack['apix'] = apStack.getStackPixelSizeFromStackId(
            self.params['stackid'])
        self.stack['part'] = apStack.getOneParticleFromStackId(
            self.params['stackid'])
        self.stack['boxsize'] = apStack.getStackBoxsize(self.params['stackid'])
        self.stack['file'] = os.path.join(self.stack['data']['path']['path'],
                                          self.stack['data']['name'])

        ### process stack to local file
        if self.params['timestamp'] is None:
            apDisplay.printMsg("creating timestamp")
            self.params['timestamp'] = self.timestamp
        self.params['localstack'] = os.path.join(
            self.params['rundir'], self.params['timestamp'] + ".hed")
        if os.path.isfile(self.params['localstack']):
            apFile.removeStack(self.params['localstack'])
        proccmd = "proc2d " + self.stack['file'] + " " + self.params[
            'localstack'] + " apix=" + str(self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            clipsize = int(self.clipsize) * self.params['bin']
            if clipsize % 2 == 1:
                clipsize += 1  ### making sure that clipped boxsize is even
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   clipsize, clipsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        if self.params['highpass'] is not None and self.params['highpass'] > 1:
            proccmd += " hp=" + str(self.params['highpass'])
        if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
            proccmd += " lp=" + str(self.params['lowpass'])
        apParam.runCmd(proccmd, "EMAN", verbose=True)
        if self.params['numpart'] != apFile.numImagesInStack(
                self.params['localstack']):
            apDisplay.printError("Missing particles in stack")

        ### convert stack into single spider files
        self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(
            self.params['localstack'])

        ### setup Xmipp command
        aligntime = time.time()
        xmippopts = (
            " " + " -i " +
            os.path.join(self.params['rundir'], self.partlistdocfile) +
            " -codes " + str(self.params['numrefs']) + " -iter " +
            str(self.params['maxiter']) + " -o " + os.path.join(
                self.params['rundir'], "part" + self.params['timestamp']))
        if self.params['fast']:
            xmippopts += " -fast "
        if self.params['correlation']:
            xmippopts += " -useCorrelation "
        if self.params['classical']:
            xmippopts += " -classicalMultiref "
        if self.params['align']:
            xmippopts += " -alignImages "

        ### use multi-processor command
        apDisplay.printColor(
            "Using " + str(self.params['nproc']) + " processors!", "green")
        xmippexe = apParam.getExecPath("xmipp_mpi_class_averages", die=True)
        mpiruncmd = self.mpirun + " -np " + str(
            self.params['nproc']) + " " + xmippexe + " " + xmippopts
        self.writeXmippLog(mpiruncmd)
        apParam.runCmd(mpiruncmd,
                       package="Xmipp",
                       verbose=True,
                       showcmd=True,
                       logfile="xmipp.std")
        self.params['runtime'] = time.time() - aligntime
        apDisplay.printMsg("Alignment time: " +
                           apDisplay.timeString(self.params['runtime']))

        ### minor post-processing
        self.createReferenceStack()
        self.parseOutput()
        self.clearIntermediateFiles()
        #               self.readyUploadFlag()
        apParam.dumpParameters(
            self.params, "cl2d-" + self.params['timestamp'] + "-params.pickle")

        ### upload results ... this used to be two separate operations, I'm combining into one
        self.runparams = apParam.readRunParameters("cl2d-" +
                                                   self.params['timestamp'] +
                                                   "-params.pickle")
        self.apix = apStack.getStackPixelSizeFromStackId(
            self.runparams['stackid']) * self.runparams['bin']
        self.Nlevels = len(
            glob.glob("part" + self.params['timestamp'] + "_level_??_.hed"))

        ### create average of aligned stacks & insert aligned stack info
        lastLevelStack = "part" + self.params[
            'timestamp'] + "_level_%02d_.hed" % (self.Nlevels - 1)
        apStack.averageStack(lastLevelStack)
        self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
        self.insertCL2DParamsIntoDatabase()
        if self.runparams['align'] is True:
            self.insertAlignStackRunIntoDatabase("alignedStack.hed")
            self.calcResolution(self.Nlevels - 1)
            self.insertAlignParticlesIntoDatabase(level=self.Nlevels - 1)

        ### loop over each class average stack & insert as clustering stacks
        self.insertClusterRunIntoDatabase()
        for level in range(self.Nlevels):
            ### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
            if self.params['align'] is True:
                self.calcResolution(level)
            partdict = self.getClassificationAtLevel(level)
            for classnum in partdict:
                self.insertClusterStackIntoDatabase(
                    "part" + self.params['timestamp'] +
                    "_level_%02d_.hed" % level, classnum + 1,
                    partdict[classnum], len(partdict))
Example #30
0
        def normalizeVolume(self, volfile):
                """
mkdir CorrectGreyscale

xmipp_header_extract -i experimental_images.sel -o experimental_images.doc

xmipp_angular_project_library  -i bad_greyscale.vol -experimental_images experimental_images.doc -o CorrectGreyscale/ref -sampling_rate 15 -sym c1h -compute_neighbors -angular_distance -1

xmipp_angular_projection_matching  -i experimental_images.doc -o CorrectGreyscale/corrected_reference -ref CorrectGreyscale/ref

xmipp_mpi_angular_class_average  -i CorrectGreyscale/corrected_reference.doc -lib CorrectGreyscale/ref_angles.doc -o CorrectGreyscale/corrected_reference

xmipp_mpi_reconstruct_wbp  -i CorrectGreyscale/corrected_reference_classes.sel -o corrected_reference.vol -threshold 0.02 -sym c1  -use_each_image -weight
                """
                volroot = os.path.splitext(volfile)[0]
                volroot = re.sub("\.", "_", volroot)
                normfolder = os.path.join(self.params['rundir'], volroot)
                apParam.createDirectory(normfolder)

                ### Create Euler doc file for particles
                partselfile = os.path.join(self.params['rundir'], self.partlistdocfile)
                parteulerdoc = os.path.join(normfolder, "parteulers.doc")
                xmippcmd = "xmipp_header_extract -i %s -o %s"%(partselfile, parteulerdoc)
                apEMAN.executeEmanCmd(xmippcmd, verbose=False)
                if not os.path.isfile(parteulerdoc):
                        apDisplay.printError("Could not normalize volume for file: "+volfile)

                ### Create projections
                refprefix = os.path.join(normfolder, "refproj"+self.timestamp)
                if self.nproc > 1 and self.mpirun is not None:
                        xmipppath = apParam.getExecPath("xmipp_mpi_angular_project_library", die=True)
                        xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
                else:
                        xmippexe = "xmipp_angular_project_library"
                xmippcmd = ("%s -i %s -experimental_images %s -o %s"
                        %(xmippexe, volfile, parteulerdoc, refprefix))
                xmippcmd += " -sampling_rate %d -compute_neighbors -angular_distance -1 -perturb 0.5"%(self.params['phi'])
                if self.params['symmetry'] is not None:
                        xmippcmd += " -sym "+str(self.params['symmetry'])
                apEMAN.executeEmanCmd(xmippcmd, verbose=False)
                refs = glob.glob(refprefix+"*.xmp")
                if not refs:
                        apDisplay.printError("Could not normalize volume for file: "+volfile)

                ### Match projections
                fixprefix = os.path.join(normfolder, "match"+self.timestamp)
                if self.nproc > 1 and self.mpirun is not None:
                        xmipppath = apParam.getExecPath("xmipp_mpi_angular_projection_matching", die=True)
                        xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
                else:
                        xmippexe = "xmipp_angular_projection_matching"
                xmippcmd = ("%s -i %s -o %s -ref %s"
                        %(xmippexe, parteulerdoc, fixprefix, refprefix))
                apEMAN.executeEmanCmd(xmippcmd, verbose=False)
                docfile = fixprefix+".doc"
                if not os.path.isfile(docfile):
                        apDisplay.printError("Could not normalize volume for file: "+volfile)

                ### Create projection averages
                correctprefix = os.path.join(normfolder, "correctproj"+self.timestamp)
                if self.nproc > 1 and self.mpirun is not None:
                        xmipppath = apParam.getExecPath("xmipp_mpi_angular_class_average", die=True)
                        xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
                else:
                        xmippexe = "xmipp_angular_class_average"
                xmippcmd = ("%s -i %s.doc -lib %s_angles.doc -o %s"
                        %(xmippexe, fixprefix, refprefix, correctprefix))
                apEMAN.executeEmanCmd(xmippcmd, verbose=False)
                refs = glob.glob(correctprefix+"*.xmp")
                if not refs:
                        apDisplay.printError("Could not normalize volume for file: "+volfile)

                ### Backproject
                correctvolfile = os.path.join(normfolder, "volume"+self.timestamp+".spi")
                if self.nproc > 1 and self.mpirun is not None:
                        xmipppath = apParam.getExecPath("xmipp_mpi_reconstruct_wbp", die=True)
                        xmippexe = self.mpirun+" -np "+str(self.nproc)+" "+xmipppath
                else:
                        xmippexe = "xmipp_reconstruct_wbp"
                xmippcmd = ("%s -i %s_classes.sel -o %s"
                        %(xmippexe, correctprefix, correctvolfile))
                xmippcmd += " -threshold 0.02 -use_each_image -weight"
                if self.params['symmetry'] is not None:
                        xmippcmd += " -sym "+str(self.params['symmetry'])
                apEMAN.executeEmanCmd(xmippcmd, verbose=False)

                if not os.path.isfile(correctvolfile):
                        apDisplay.printError("Could not normalize volume for file: "+volfile)
                return correctvolfile
    def start(self):
        ### simple is written in Fortran, which cannot take inputs of certain length, therefore one needs
        ### to change to the directory to minimize the filename length, in particular for the stack
        os.chdir(self.params['rundir'])

        ### stack needs to be centered
        if self.params['no_center'] is False:
            if os.path.isfile(os.path.join(self.params['rundir'], "ali.hed")):
                apFile.removeStack(
                    os.path.join(self.params['rundir'], "ali.hed"))
            centstack = os.path.join(self.params['rundir'], "ali.hed")
            centcmd = "cenalignint %s > cenalignint.log" % (self.stack['file'])
            apParam.runCmd(centcmd, "EMAN")

        ### process stack to local file
        if self.params['timestamp'] is None:
            apDisplay.printMsg("creating timestamp")
            self.params['timestamp'] = self.timestamp
        self.params['localstack'] = os.path.join(
            self.params['rundir'], self.params['timestamp'] + ".spi")

        if os.path.isfile(self.params['localstack']):
            apFile.removeFile(self.params['localstack'])
        if self.params['no_center'] is False:
            proccmd = "proc2d " + centstack + " " + self.params[
                'localstack'] + " apix=" + str(self.stack['apix'])
        else:
            proccmd = "proc2d " + self.stack['file'] + " " + self.params[
                'localstack'] + " apix=" + str(self.stack['apix'])
        if self.params['bin'] > 1 or self.params['clipsize'] is not None:
            proccmd += " shrink=%d clip=%d,%d " % (self.params['bin'],
                                                   self.boxsize, self.boxsize)
        proccmd += " last=" + str(self.params['numpart'] - 1)
        proccmd += " spiderswap"
        #               if self.params['highpass'] is not None and self.params['highpass'] > 1:
        #                       proccmd += " hp="+str(self.params['highpass'])
        #               if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
        #                       proccmd += " lp="+str(self.params['lowpass'])
        apParam.runCmd(proccmd, "EMAN", verbose=True)

        #               if self.params['numpart'] != int(spider.getSpiderHeader(self.params['localstack'])[-2]):
        #                       apDisplay.printError("Missing particles in stack")

        ### setup Simple command
        aligntime = time.time()
        simpleopts = (
            "" + " stk=%s" % os.path.basename(self.params['localstack']) +
            " box=%d" % self.boxsize + " nptcls=%d" % self.params['numpart'] +
            " smpd=%.3f" % self.apix + " ring2=%d" % self.params['ring2'] +
            " ncls=%d" % self.params['ncls'] +
            " minp=%d" % self.params['minp'] +
            " nvars=%d" % self.params['nvars'] +
            " nthr=%d" % self.params['nproc'])
        if self.params['no_kmeans'] is True:
            simpleopts += " kmeans=off"
        if self.params['nran'] is not None:
            simpleopts += "nran=%d" % self.params['nran']

        ### SIMPLE 2D clustering
        apDisplay.printColor(
            "Using " + str(self.params['nproc']) + " processors!", "green")
        simpleexe = apParam.getExecPath("cluster", die=True)
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="cluster.std")
        self.params['runtime'] = time.time() - aligntime
        apDisplay.printMsg("Alignment & Classification time: " +
                           apDisplay.timeString(self.params['runtime']))

        ### SIMPLE spider to Fourier format
        clsavgs = "cavgstk.spi"
        if not os.path.isfile(os.path.join(self.params['rundir'], clsavgs)):
            apDisplay.printError(
                "class averages were not created! try rerunning with centering, more particles, or less ppc"
            )
        try:
            nptcls = spider.getSpiderHeader(clsavgs)[-2]
        except:
            nptcls = self.params['ncls']
            apDisplay.printWarning(
                "class average file may not have been created! Please check existence of file cavgstk.spi"
            )
        projfile = "projs"
        projext = ".fim"
        simpleexe = apParam.getExecPath("spi_to_fim", die=True)
        simpleopts = ("" + " stk=%s" % clsavgs + " box=%d" % self.boxsize +
                      " nptcls=%d" % nptcls + " smpd=%.3f" % self.apix +
                      " outbdy=%s" % projfile +
                      " msk=%d" % self.params['mask'])
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="spi_to_fim.std")

        ### SIMPLE origami, ab initio 3D reconstruction
        refinetime = time.time()
        simpleexe = apParam.getExecPath("origami", die=True)
        simpleopts = (
            "" + " fstk=%s" % projfile + projext +
            " froms=%d" % self.params['froms'] +
            " tos=%d" % self.params['tos'] + " lp=%d" % self.params['lp'] +
            " hp=%d" % self.params['hp'] +
            " maxits=%d" % self.params['maxits'] +
            " msk=%d" % self.params['mask'] + " mw=%d" % self.params['mw'] +
            " frac=%.3f" % self.params['frac'] +
            " amsklp=%d" % self.params['amsklp'] +
            " edge=%d" % self.params['edge'] + " trs=%d" % self.params['trs'] +
            " nthr=%d" % self.params['nproc'])
        simplecmd = "%s %s" % (simpleexe, simpleopts)
        self.writeSimpleLog(simplecmd)
        apParam.runCmd(simplecmd,
                       package="SIMPLE",
                       verbose=True,
                       showcmd=True,
                       logfile="origami.std")
        refinetime = time.time() - refinetime
        apDisplay.printMsg("Origami reconstruction time: " +
                           apDisplay.timeString(refinetime))

        #               '''

        ### minor post-processing
        self.clearIntermediateFiles()
        apParam.dumpParameters(
            self.params,
            "simple-" + self.params['timestamp'] + "-params.pickle")

        ### upload results
        self.runparams = apParam.readRunParameters("simple-" +
                                                   self.params['timestamp'] +
                                                   "-params.pickle")

        ### create average of aligned and clustered stacks, convert to IMAGIC
        alignedStackSpi = "inplalgnstk.spi"
        alignedStack = "inplalgnstk.hed"
        if os.path.isfile(alignedStack):
            apFile.removeStack(alignedStack)
        emancmd = "proc2d %s %s flip" % (alignedStackSpi, alignedStack)
        apParam.runCmd(emancmd, "EMAN")
        clusterStackSpi = "cavgstk.spi"
        clusterStack = "cavgstk.hed"
        if os.path.isfile(clusterStack):
            apFile.removeStack(clusterStack)
        emancmd = "proc2d %s %s flip" % (clusterStackSpi, clusterStack)
        apParam.runCmd(emancmd, "EMAN")
        #               apStack.averageStack(alignedStack)

        ### parse alignment and classification results
        if self.params['no_center'] is False:
            self.alignD = self.getAlignParameters(centparams="cenalignint.log")
        else:
            self.alignD = self.getAlignParameters()
        if self.params['no_kmeans'] is False:
            self.classD = self.getClassification("kmeans.spi", clusterStack)
        else:
            self.classD = self.getClassification("hcl.spi", clusterStack)

        ### upload to database
        self.insertSIMPLEAlignParamsIntoDatabase()
        self.insertAlignStackRunIntoDatabase(alignedStack, clusterStack)
        self.calcResolution(alignedStack)
        self.insertAlignParticlesIntoDatabase()
        self.insertClusterRunIntoDatabase()
        self.insertClusterStackIntoDatabase(clusterStack, len(self.classD))
        self.insertSIMPLEOrigamiParamsIntoDatabase()
	def start(self):
#		self.insertCL2DJob()
		self.stack = {}
		self.stack['apix'] = apStack.getStackPixelSizeFromStackId(self.params['stackid'])
		self.stack['part'] = apStack.getOneParticleFromStackId(self.params['stackid'])

		if self.params['virtualdata'] is not None:
			self.stack['file'] = self.params['virtualdata']['filename']
		else:
			self.stack['file'] = os.path.join(self.stackdata['path']['path'], self.stackdata['name'])

		### process stack to local file
		if self.params['timestamp'] is None:
			apDisplay.printMsg("creating timestamp")
			self.params['timestamp'] = self.timestamp
		self.params['localstack'] = os.path.join(self.params['rundir'], self.params['timestamp']+".hed")
		if os.path.isfile(self.params['localstack']):
			apFile.removeStack(self.params['localstack'])

		a = proc2dLib.RunProc2d()
		a.setValue('infile',self.stack['file'])
		a.setValue('outfile',self.params['localstack'])
		a.setValue('apix',self.stack['apix'])
		a.setValue('bin',self.params['bin'])
		a.setValue('last',self.params['numpart']-1)

		if self.params['lowpass'] is not None and self.params['lowpass'] > 1:
			a.setValue('lowpass',self.params['lowpass'])
		if self.params['highpass'] is not None and self.params['highpass'] > 1:
			a.setValue('highpass',self.params['highpass'])
		if self.params['invert'] is True:
			a.setValue('invert',True)

		# clip not yet implemented
#		if self.params['clipsize'] is not None:
#			clipsize = int(self.clipsize)*self.params['bin']
#			if clipsize % 2 == 1:
#				clipsize += 1 ### making sure that clipped boxsize is even
#			a.setValue('clip',clipsize)

		if self.params['virtualdata'] is not None:
			vparts = self.params['virtualdata']['particles']
			plist = [int(p['particleNumber'])-1 for p in vparts]
			a.setValue('list',plist)

		#run proc2d
		a.run()

		if self.params['numpart'] != apFile.numImagesInStack(self.params['localstack']):
			apDisplay.printError("Missing particles in stack")

		### convert stack into single spider files
		self.partlistdocfile = apXmipp.breakupStackIntoSingleFiles(self.params['localstack'])

		### setup Xmipp command
		aligntime = time.time()
		xmippopts = ( " "
			+" -i "+os.path.join(self.params['rundir'], self.partlistdocfile)
			+" -codes "+str(self.params['numrefs'])
			+" -iter "+str(self.params['maxiter'])
			+" -o "+os.path.join(self.params['rundir'], "part"+self.params['timestamp'])
		)
		if self.params['fast']:
			xmippopts += " -fast "
		if self.params['correlation']:
			xmippopts += " -useCorrelation "
		if self.params['classical']:
			xmippopts += " -classicalMultiref "		
		if self.params['align']:
			xmippopts += " -alignImages "

		### use multi-processor command
		apDisplay.printColor("Using "+str(self.params['nproc'])+" processors!", "green")
		xmippexe = apParam.getExecPath("xmipp_mpi_class_averages", die=True)
		mpiruncmd = self.mpirun+" -np "+str(self.params['nproc'])+" "+xmippexe+" "+xmippopts
		self.writeXmippLog(mpiruncmd)
		apParam.runCmd(mpiruncmd, package="Xmipp", verbose=True, showcmd=True, logfile="xmipp.std")
		self.params['runtime'] = time.time() - aligntime
		apDisplay.printMsg("Alignment time: "+apDisplay.timeString(self.params['runtime']))

		### minor post-processing
		self.createReferenceStack()
		self.parseOutput()
		self.clearIntermediateFiles()
#		self.readyUploadFlag()
		apParam.dumpParameters(self.params, "cl2d-"+self.params['timestamp']+"-params.pickle")

		### upload results ... this used to be two separate operations, I'm combining into one
		self.runparams = apParam.readRunParameters("cl2d-"+self.params['timestamp']+"-params.pickle")
		self.apix = apStack.getStackPixelSizeFromStackId(self.runparams['stackid'])*self.runparams['bin']
		self.Nlevels=len(glob.glob("part"+self.params['timestamp']+"_level_??_.hed"))

		### create average of aligned stacks & insert aligned stack info
		lastLevelStack = "part"+self.params['timestamp']+"_level_%02d_.hed"%(self.Nlevels-1)
		apStack.averageStack(lastLevelStack)
		self.boxsize = apFile.getBoxSize(lastLevelStack)[0]
		self.insertCL2DParamsIntoDatabase()
		if self.runparams['align'] is True:
			self.insertAlignStackRunIntoDatabase("alignedStack.hed")
			self.calcResolution(self.Nlevels-1)
			self.insertAlignParticlesIntoDatabase(level=self.Nlevels-1)
		
		### loop over each class average stack & insert as clustering stacks
		self.insertClusterRunIntoDatabase()
		for level in range(self.Nlevels):
			### NOTE: RESOLUTION CAN ONLY BE CALCULATED IF ALIGNED STACK EXISTS TO EXTRACT / READ THE PARTICLES
			if self.params['align'] is True:
				self.calcResolution(level)
			partdict = self.getClassificationAtLevel(level)
			for classnum in partdict: 
				self.insertClusterStackIntoDatabase(
					"part"+self.params['timestamp']+"_level_%02d_.hed"%level,
					classnum+1, partdict[classnum], len(partdict))