示例#1
0
 def computeModesStep(self, fnPseudoatoms, numberOfModes, cutoffStr):
     (baseDir, fnBase) = os.path.split(fnPseudoatoms)
     fnBase = fnBase.replace(".pdb", "")
     fnDistanceHist = os.path.join(baseDir, 'extra',
                                   fnBase + '_distance.hist')
     rc = self._getRc(fnDistanceHist)
     self._enterWorkingDir()
     self.runJob('nma_record_info.py',
                 "%d %s.pdb %d" % (numberOfModes, fnBase, rc),
                 env=getNMAEnviron())
     self.runJob("nma_pdbmat.pl", "pdbmat.dat", env=getNMAEnviron())
     self.runJob("nma_diag_arpack", "", env=getNMAEnviron())
     if not exists("fort.11"):
         self._printWarnings(
             redStr(
                 'Modes cannot be computed. Check the number of '
                 'modes you asked to compute and/or consider increasing '
                 'cut-off distance. The maximum number of modes allowed by '
                 'the method for pseudoatomic normal mode analysis is 3 times '
                 'the number of pseudoatoms but the protocol allows only up to '
                 '200 modes as 20-100 modes are usually enough.  '
                 'If the number of modes is below the minimum between 200 and 3 '
                 'times the number of pseudoatoms, consider increasing cut-off distance.'
             ))
     cleanPath("diag_arpack.in", "pdbmat.dat")
     self._leaveWorkingDir()
示例#2
0
    def __createTemporaryCtfs(self, obj, setOfMics):
        """ Create a temporary .sqlite file to visualize CTF while the
             protocol has not finished yet.
            """
        cleanPath(obj._getPath("ctfs_temporary.sqlite"))
        ctfSet = self.protocol._createSetOfCTF("_temporary")

        for mic in setOfMics:
            micFn = mic.getFileName()
            micDir = obj._getExtraPath(removeBaseExt(mic.getFileName()))
            samplingRate = mic.getSamplingRate(
            ) * self.protocol.ctfDownFactor.get()
            mic.setSamplingRate(samplingRate)
            out = self.protocol._getCtfOutPath(micDir)
            psdFile = self.protocol._getPsdPath(micDir)

            if exists(out) and exists(psdFile):
                ctfModel = em.CTFModel()
                readCtfModel(ctfModel,
                             out,
                             ctf4=self.protocol.useCtffind4.get())
                ctfModel.setPsdFile(psdFile)
                ctfModel.setMicrograph(mic)
                ctfSet.append(ctfModel)

        if not ctfSet.isEmpty():
            ctfSet.write()
            ctfSet.close()

        return ctfSet
    def createOutputStep(self):
        fnImgs = self._getExtraPath('images.stk')
        if os.path.exists(fnImgs):
            cleanPath(fnImgs)

        outputSet = self._createSetOfParticles()
        imgSet = self.inputSet.get()
        imgFn = self._getExtraPath("anglesCont.xmd")
        self.newAssignmentPerformed = os.path.exists(
            self._getExtraPath("angles.xmd"))
        self.samplingRate = self.inputSet.get().getSamplingRate()
        if isinstance(imgSet, SetOfClasses2D):
            outputSet = self._createSetOfClasses2D(imgSet)
            outputSet.copyInfo(imgSet.getImages())
        elif isinstance(imgSet, SetOfAverages):
            outputSet = self._createSetOfAverages()
            outputSet.copyInfo(imgSet)
        else:
            outputSet = self._createSetOfParticles()
            outputSet.copyInfo(imgSet)
            if not self.newAssignmentPerformed:
                outputSet.setAlignmentProj()
        outputSet.copyItems(imgSet,
                            updateItemCallback=self._processRow,
                            itemDataIterator=md.iterRows(
                                imgFn, sortByLabel=md.MDL_ITEM_ID))
        self._defineOutputs(outputParticles=outputSet)
        self._defineSourceRelation(self.inputSet, outputSet)
示例#4
0
    def symStep(self):
        inputVol = self.inputVol.get()
        fnVol = inputVol.getFileName()
        samplingRate = inputVol.getSamplingRate()
        volName = os.path.basename(fnVol)
        volName = os.path.splitext(volName)[0]
        tmpDir = self._getTmpPath(volName)
        fnVol = os.path.abspath(fnVol)
        makePath(tmpDir)

        maskRadius = self.mask.get()
        if maskRadius<0:
            Xdim = inputVol.getDim()[0]
            maskRadius=Xdim/2-1
        lpCutoff = inputVol.getSamplingRate()/self.lp.get()

        paramsSym = ' prg=symmetry_test vol1=%s smpd=%f msk=%d lp=%f nthr=%d' \
                 %(fnVol, samplingRate, maskRadius, lpCutoff, self.numberOfThreads.get())

        self.runJob(simple.Plugin.sim_exec(), 'prg=new_project projname=temp', cwd=os.path.abspath(tmpDir),
                    env=simple.Plugin.getEnviron())
        self.runJob(simple.Plugin.sim_exec(), paramsSym, cwd=os.path.abspath(tmpDir)+'/temp', env=simple.Plugin.getEnviron())


        #Move output files to ExtraPath and rename them properly
        mvRoot1 = os.path.join(tmpDir+'/temp/1_symmetry_test', "symmetry_test_output.txt")
        moveFile(mvRoot1, self._getExtraPath('point_group_symmetry_.txt'))
        cleanPath(tmpDir)
示例#5
0
    def processMovieStep(self, movieId, movieFn, *args):
        movieFolder = self._getMovieFolder(movieId)
        movieName = basename(movieFn)
        #export SCIPION_DEBUG=1 # passwd=a
        #startDebugger()

        if self._filterMovie(movieId, movieFn):
            makePath(movieFolder)
            createLink(movieFn, join(movieFolder, movieName))
            toDelete = [movieName]
    
            if movieName.endswith('bz2'):
                movieMrc = movieName.replace('.bz2', '') # we assume that if compressed the name ends with .mrc.bz2
                toDelete.append(movieMrc)
                if not exists(movieMrc):
                    self.runJob('bzip2', '-d -f %s' % movieName, cwd=movieFolder)
            else:
                movieMrc = movieName
            
            self.info("Processing movie: %s" % movieMrc)
            
            if movieMrc.endswith('.em'):
                movieMrc = movieMrc + ":ems"

            self._processMovie(movieId, movieMrc, movieFolder, *args)
            
            if self.cleanMovieData:
                cleanPath(movieFolder)
            else:
                self.info('Clean movie data DISABLED. Movie folder will remain in disk!!!')
    def _calculateAuxiliaryFile(self):
        """create new ctf_set with ctf that satisfies the
        constraint and persist it
        """
        try:
            self.setOfCTFsConst
        except AttributeError:
            pass
        else:
            #TODO close the mapper, if not the object cannot be reused (Although it should be able)
            self.setOfCTFsConst.close()

        #metadata file with protocol output
        #get temporary fileName for metadata file
        self.targetFile = self.protocol._getTmpPath(self.tmpMetadataFile)
        resolutionThreshold = self.resolutionThreshold.get()
        print "TODO: this should be closer to the mapper. Here it does not make any sense. ROB"
        #TODO check if this is necessary
        cleanPath(self.targetFile)

        #metadata with selected CTFs
        self.setOfCTFsConst = data.SetOfCTF(filename=self.targetFile)
        #object read metadata file
        ctfs = data.SetOfCTF(filename=self.pairsFile)
        #condition to be satisfized for CTFs
        for ctf in ctfs:
            if ctf.resolution < resolutionThreshold:
                self.setOfCTFsConst.append(ctf)
        #new file with selected CTFs
        self.setOfCTFsConst.write()
        #check if empty
        if self.setOfCTFsConst.getSize() < 1:
            print "WARNING: Empty set of CTFs."
示例#7
0
 def resizeStep(self,fnRoot,Xdim):
     if os.path.exists(fnRoot+".vol"):
         self.runJob("xmipp_image_resize","-i %s.vol -o %s.vol --dim %d %d" %(fnRoot,fnRoot,Xdim,Xdim))
         self.runJob("xmipp_image_convert","-i %s.vol -o %s.mrc -t vol" %(fnRoot,fnRoot))
         cleanPath("%s.vol"%fnRoot)
         self.runJob("xmipp_image_header", "-i %s.mrc --sampling_rate %f" %\
                     (fnRoot, self.inputSet.get().getSamplingRate()))
示例#8
0
 def getBestVolumesStep(self):
     volumes = []
     inliers = []
     
     for n in range(self.nRansac.get()):
         fnAngles = self._getTmpPath("angles_ransac%05d"%n+".xmd")
         md=xmipp.MetaData("inliers@"+fnAngles)
         numInliers=md.getValue(xmipp.MDL_WEIGHT,md.firstObject())
         volumes.append(fnAngles)
         inliers.append(numInliers)
     
     index = sorted(range(inliers.__len__()), key=lambda k: inliers[k])
     fnBestAngles = ''
     threshold=self.getCCThreshold()
  
     i = self.nRansac.get()-1
     indx = 0
     while i >= 0 and indx < self.numVolumes:
         fnBestAngles = volumes[index[i]]
         fnBestAnglesOut = self._getPath("proposedVolume%05d"%indx+".xmd")
         copyFile(fnBestAngles, fnBestAnglesOut)
         self._log.info("Best volume %d = %s" % (indx, fnBestAngles))
         if not self.useAll:
             self.runJob("xmipp_metadata_utilities","-i %s -o %s --query select \"maxCC>%f \" --mode append" %(fnBestAnglesOut,fnBestAnglesOut,threshold))
             if not isMdEmpty(fnBestAnglesOut):
                 indx += 1
         else:
             indx += 1
         i -= 1
         
     # Remove unnecessary files
     for n in range(self.nRansac.get()):
         fnAngles = self._getTmpPath("angles_ransac%05d"%n+".xmd")
         cleanPath(fnAngles)
示例#9
0
 def getBestVolumesStep(self):
     volumes = []
     inliers = []
     
     for n in range(self.nRansac.get()):
         fnAngles = self._getTmpPath("angles_ransac%05d"%n+".xmd")
         md=emlib.MetaData("inliers@"+fnAngles)
         numInliers=md.getValue(emlib.MDL_WEIGHT,md.firstObject())
         volumes.append(fnAngles)
         inliers.append(numInliers)
     
     index = sorted(range(inliers.__len__()), key=lambda k: inliers[k])
     fnBestAngles = ''
     threshold=self.getCCThreshold()
  
     i = self.nRansac.get()-1
     indx = 0
     while i >= 0 and indx < self.numVolumes:
         fnBestAngles = volumes[index[i]]
         fnBestAnglesOut = self._getPath("proposedVolume%05d"%indx+".xmd")
         copyFile(fnBestAngles, fnBestAnglesOut)
         self._log.info("Best volume %d = %s" % (indx, fnBestAngles))
         if not self.useAll:
             self.runJob("xmipp_metadata_utilities","-i %s -o %s --query select \"maxCC>%f \" --mode append" %(fnBestAnglesOut,fnBestAnglesOut,threshold))
             if not isMdEmpty(fnBestAnglesOut):
                 indx += 1
         else:
             indx += 1
         i -= 1
         
     # Remove unnecessary files
     for n in range(self.nRansac.get()):
         fnAngles = self._getTmpPath("angles_ransac%05d"%n+".xmd")
         cleanPath(fnAngles)
示例#10
0
    def testOneRemoteHostToLocal(self):
        tempFolder = "oneRemoteHostToLocal"
        filePaths = {}
        checkPathList = []
        remoteSourceFilePathList2 = ft.getRemoteFolderFiles(
            self.remoteHostName2, self.remoteUserName2, self.remotePassword2,
            self.remoteSourceFolder2)
        for remoteSourceFilePath in remoteSourceFilePathList2:
            remoteSourceFileName = basename(remoteSourceFilePath)
            targetFilePath = join(self.localTargetFolder, tempFolder, "test2",
                                  remoteSourceFileName)
            filePaths[remoteSourceFilePath] = targetFilePath
            checkPathList.append(targetFilePath)

        self.fileTransfer.transferFilesFrom(filePaths,
                                            self.remoteHostName2,
                                            self.remoteUserName2,
                                            self.remotePassword2,
                                            gatewayHosts=self.gatewayHosts,
                                            numberTrials=self.numberTrials,
                                            forceOperation=self.forceOperation,
                                            operationId=self.operationId)
        passTest = len(
            self.fileTransfer.checkFiles(checkPathList,
                                         self.hostPasswords,
                                         gatewayHosts=self.gatewayHosts,
                                         numberTrials=self.numberTrials,
                                         forceOperation=self.forceOperation,
                                         operationId=self.operationId)) == 0
        cleanPath(join(self.localTargetFolder, tempFolder))
        self.assertTrue(passTest)
示例#11
0
 def testLocalToLocal(self):
     tempFolder = "localToLocal"
     filePaths = {}
     sourceFilesPathList = getFiles(self.localSourceFolder)
     for sourceFilePath in sourceFilesPathList:
         sourceFileName = basename(sourceFilePath)
         targetFilePath = join(self.localTargetFolder, tempFolder,
                               sourceFileName)
         targetFilePathList = []
         targetFilePathList.append(targetFilePath)
         filePaths[sourceFilePath] = targetFilePathList
     self.fileTransfer.transferFiles(filePaths,
                                     self.hostPasswords,
                                     gatewayHosts=self.gatewayHosts,
                                     numberTrials=self.numberTrials,
                                     forceOperation=self.forceOperation,
                                     operationId=self.operationId)
     checkPathList = ft.getFilePathList(filePaths)
     passTest = len(
         self.fileTransfer.checkFiles(checkPathList,
                                      self.hostPasswords,
                                      gatewayHosts=self.gatewayHosts,
                                      numberTrials=self.numberTrials,
                                      forceOperation=self.forceOperation,
                                      operationId=self.operationId)) == 0
     #         self.fileTransfer.deleteFiles(checkPathList, self.hostPasswords, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)
     cleanPath(join(self.localTargetFolder, tempFolder))
     self.assertTrue(passTest)
示例#12
0
    def _getIterClasses(self, it, clean=False):
        """ Return the file with the classes for this iteration.
        If the file doesn't exists, it will be created. 
        """
        from convert import readSetOfClasses3D
        
        dataClasses = self.protocol._getFileName('classes_scipion', iter=it)
        
        if clean:
            cleanPath(dataClasses)
        
        if not exists(dataClasses):
            fileparList = []
            volumeList = []
            for ref in self._refsList:
                filepar = self.protocol._getFileName('output_par_class', iter=it, ref=ref)
                volFn = self.protocol._getFileName('iter_vol_class', iter=it, ref=ref)
                fileparList.append(filepar)
                volumeList.append(volFn)
            
            clsSet = em.SetOfClasses3D(filename=dataClasses)
            clsSet.setImages(self.inputParticles.get())
            readSetOfClasses3D(clsSet, fileparList, volumeList)
            clsSet.write()
            clsSet.close()

        return dataClasses
示例#13
0
    def reformatPdbOutputStep(self, numberOfModes):
        self._enterWorkingDir()

        makePath('modes')
        Natoms = self._countAtoms("atoms.pdb")
        fhIn = open('diagrtb.eigenfacs')
        fhAni = open('vec_ani.txt', 'w')

        for n in range(numberOfModes):
            # Skip two lines
            fhIn.readline()
            fhIn.readline()
            fhOut = open('modes/vec.%d' % (n + 1), 'w')
            for i in range(Natoms):
                line = fhIn.readline()
                fhOut.write(line)
                fhAni.write(line.rstrip().lstrip() + " ")
            fhOut.close()
            if n != (numberOfModes - 1):
                fhAni.write("\n")
        fhIn.close()
        fhAni.close()
        self.runJob("nma_prepare_for_animate.py", "", env=getNMAEnviron())
        cleanPath("vec_ani.txt")
        moveFile('vec_ani.pkl', 'extra/vec_ani.pkl')

        self._leaveWorkingDir()
示例#14
0
def exportData(emxDir, inputSet, ctfSet=None, xmlFile='data.emx', binaryFile=None):
    """ Export micrographs, coordinates or particles to  EMX format. """
    cleanPath(emxDir)
    makePath(emxDir) 
    emxData = emxlib.EmxData()
    micSet=None
    
    if binaryFile is None:
        binaryFile = xmlFile.replace('.emx', '.mrc')

    if isinstance(inputSet, SetOfMicrographs):
        _micrographsToEmx(emxData, inputSet, emxDir, ctfSet)
        
    elif isinstance(inputSet, SetOfCoordinates):
        micSet = inputSet.getMicrographs()
        _micrographsToEmx(emxData, micSet, emxDir, ctfSet)
        _particlesToEmx(emxData, inputSet, None, micSet)
        
    elif isinstance(inputSet, SetOfParticles):
        print ("SetOfParticles-----------------------------------------")
        if inputSet.hasCoordinates():
            micSet = inputSet.getCoordinates().getMicrographs()
            _micrographsToEmx(emxData, micSet, emxDir, writeData=False)
        fnMrcs = join(emxDir, binaryFile)
        _particlesToEmx(emxData, inputSet, fnMrcs, micSet)
        
    fnXml = join(emxDir, xmlFile)
    emxData.write(fnXml)
    def alignParticlesStep(self):

        fhInputTranMat = self._getExtraPath('transformation-matrix.txt')
        outParticlesFn = self._getExtraPath('outputParticles.xmd')
        transMatFromFile = np.loadtxt(fhInputTranMat)
        transformationMat = np.reshape(transMatFromFile, (4, 4))
        transform = Transform()
        transform.setMatrix(transformationMat)

        resultMat = Transform()
        outputParts = md.MetaData()
        mdToAlign = md.MetaData(self.imgsInputFn)
        for row in md.iterRows(mdToAlign):
            inMat = rowToAlignment(row, ALIGN_PROJ)
            partTransformMat = inMat.getMatrix()
            partTransformMatrix = np.matrix(partTransformMat)
            newTransformMatrix = np.matmul(transformationMat,
                                           partTransformMatrix)
            resultMat.setMatrix(newTransformMatrix)
            rowOut = md.Row()
            rowOut.copyFromRow(row)
            alignmentToRow(resultMat, rowOut, ALIGN_PROJ)
            rowOut.addToMd(outputParts)
        outputParts.write(outParticlesFn)
        cleanPath(self.imgsInputFn)
示例#16
0
    def createOutputStep(self):

        fnImgs = self._getExtraPath('images.stk')
        if os.path.exists(fnImgs):
            cleanPath(fnImgs)

        imgSet = self.inputSet.get()
        imgFn = self._getExtraPath("anglesCont.xmd")

        self.newAssignmentPerformed = os.path.exists(
            self._getExtraPath("angles.xmd"))
        self.samplingRate = self.inputSet.get().getSamplingRate()
        outputSet = self._createSetOfParticles()
        outputSet.copyInfo(imgSet)
        if not self.newAssignmentPerformed:
            outputSet.setAlignmentProj()
        self.iterMd = md.iterRows(imgFn, md.MDL_ITEM_ID)
        self.lastRow = next(self.iterMd)
        outputSet.copyItems(imgSet, updateItemCallback=self._updateItem)
        self._defineOutputs(outputParticles=outputSet)
        self._defineSourceRelation(self.inputSet, outputSet)

        imgSet = self.inputSet.get()
        outputSet2 = self._createSetOfParticles('2')
        outputSet2.copyInfo(imgSet)
        if not self.newAssignmentPerformed:
            outputSet2.setAlignmentProj()
        self.iterMd2 = md.iterRows(imgFn, md.MDL_ITEM_ID)
        self.lastRow2 = next(self.iterMd2)
        outputSet2.copyItems(
            imgSet,
            updateItemCallback=self._updateItem2,
        )
        self._defineOutputs(outputProjections=outputSet2)
        self._defineSourceRelation(self.inputSet, outputSet2)
    def createOutputStep(self):
        lastIter = self.getLastIteration(1)
        Ts = self.inputSet.get().getSamplingRate()

        # To recover the original size of the volume if it was changed
        fnVol = self.getIterVolume(lastIter)
        Xdim = self.inputSet.get().getDimensions()[0]
        if self.useMaxRes and self.newXdim != Xdim:
            self.runJob('xmipp_image_resize',
                        "-i %s --fourier %d" % (fnVol, Xdim),
                        numberOfMpi=1)
        fnMrc = fnVol.replace(".vol", ".mrc")
        self.runJob("xmipp_image_convert",
                    "-i %s -o %s -t vol" % (fnVol, fnMrc),
                    numberOfMpi=1)
        cleanPath(fnVol)
        self.runJob("xmipp_image_header",
                    "-i %s --sampling_rate %f" % (fnMrc, Ts),
                    numberOfMpi=1)

        vol = Volume()
        vol.setObjComment('significant volume 1')
        vol.setLocation(fnMrc)
        vol.setSamplingRate(Ts)
        self._defineOutputs(outputVolume=vol)
        self._defineSourceRelation(self.inputSet, vol)
示例#18
0
    def _calculateAuxiliaryFile(self):
        """create new ctf_set with ctf that satisfies the
        constraint and persist it
        """
        try:
            self.setOfCTFsConst
        except AttributeError:
            pass
        else:
            #TODO close the mapper, if not the object cannot be reused (Although it should be able)
            self.setOfCTFsConst.close()

        #metadata file with protocol output
        #get temporary fileName for metadata file
        self.targetFile = self.protocol._getTmpPath(self.tmpMetadataFile)
        resolutionThreshold = self.resolutionThreshold.get()
        print "TODO: this should be closer to the mapper. Here it does not make any sense. ROB"
        #TODO check if this is necessary
        cleanPath(self.targetFile)
        
        #metadata with selected CTFs
        self.setOfCTFsConst  = data.SetOfCTF(filename=self.targetFile)
        #object read metadata file
        ctfs  = data.SetOfCTF(filename=self.pairsFile)
        #condition to be satisfized for CTFs
        for ctf in ctfs:
            if ctf.resolution < resolutionThreshold:
                self.setOfCTFsConst.append(ctf)
        #new file with selected CTFs
        self.setOfCTFsConst.write()
        #check if empty
        if self.setOfCTFsConst.getSize() < 1:
            print "WARNING: Empty set of CTFs."
示例#19
0
    def init3DStep(self, partFile,SamplingRate):
        partName = os.path.basename(partFile)
        partName = os.path.splitext(partName)[0]
        tmpDir = self._getTmpPath(partName)
        makePath(tmpDir)

        params = self.getI3DParams(partFile, SamplingRate)

        self.runJob("simple_distr_exec", params, cwd=os.path.abspath(tmpDir))

        #Move output files to ExtraPath and rename them properly
        folder = self._getExtraPath(partName)
        folder = os.path.abspath(folder)
        source_dir = os.path.abspath(tmpDir)
        files1 = glob.iglob(os.path.join(source_dir, "*.txt"))
        files2 = glob.iglob(os.path.join(source_dir, "*.mrc"))
        for file1, file2 in map(None, files1, files2):
            if (file1 != None):
                if os.path.isfile(file1):
                    oldName = os.path.basename(file1)
                    shutil.move(file1, folder + '_' + oldName)
            if (file2 != None):
                if os.path.isfile(file2):
                    oldName = os.path.basename(file2)
                    shutil.move(file2, folder + '_' + oldName)
        cleanPath(tmpDir)
    def extractParticles(self):
        samplingRateCoord = self.inputCoordinates.get().getSamplingRate()
        samplingRateTomo = self.getInputTomograms().getFirstItem(
        ).getSamplingRate()
        for tomo in self.tomoFiles:
            args = '%s ' % os.path.abspath(tomo)
            args += "--coords %s --boxsize %i" % (pwutils.replaceBaseExt(
                tomo, 'coords'), self.boxSize.get())
            if self.doInvert:
                args += ' --invert'
            if self.doNormalize:
                args += ' --normproc %s' % self.getEnumText('normproc')
            args += ' --cshrink %d' % (samplingRateCoord / samplingRateTomo)

            program = emantomo.Plugin.getProgram('e2spt_boxer_old.py')
            self.runJob(program,
                        args,
                        cwd=self._getExtraPath(),
                        numberOfMpi=1,
                        numberOfThreads=1)
            moveFile(
                self._getExtraPath(os.path.join('sptboxer_01',
                                                'basename.hdf')),
                self._getExtraPath(pwutils.replaceBaseExt(tomo, 'hdf')))
            cleanPath(self._getExtraPath("sptboxer_01"))
示例#21
0
 def _createCluster(self):
     """ Create the cluster with the selected particles
     from the cluster. This method will be called when
     the button 'Create Cluster' is pressed.
     """
     # Write the particles
     prot = self.protocol
     project = prot.getProject()
     inputSet = prot.getInputParticles()
     fnSqlite = prot._getTmpPath('cluster_particles.sqlite')
     cleanPath(fnSqlite)
     partSet = SetOfParticles(filename=fnSqlite)
     partSet.copyInfo(inputSet)
     for point in self.getData():
         if point.getState() == Point.SELECTED:
             particle = inputSet[point.getId()]
             partSet.append(particle)
     partSet.write()
     partSet.close()
             
     from protocol_batch_cluster import BatchProtNMACluster
     newProt = project.newProtocol(BatchProtNMACluster)
     clusterName = self.clusterWindow.getClusterName()
     if clusterName:
         newProt.setObjLabel(clusterName)
     newProt.inputNmaDimred.set(prot)
     newProt.sqliteFile.set(fnSqlite)
     
     project.launchProtocol(newProt)
示例#22
0
    def kmeansClassifyStep(self, fnInputMd):
        iteration = 0
        args = "-i %s -k %d -m %d" % (fnInputMd, self.numberOfClasses.get(),
                                      self.maxObjects.get())
        self.runJob("xmipp_classify_kmeans_2d", args)
        cleanPath(self._getExtraPath("level_00"))
        blocks = md.getBlocksInMetaDataFile(self._getExtraPath("output.xmd"))
        fnDir = self._getExtraPath()
        # Gather all images in block
        for b in blocks:
            if b.startswith('class0'):
                args = "-i %s@%s --iter 5 --distance correlation " \
                       "--classicalMultiref --nref 1 --odir %s --oroot %s" % \
                       (b, self._getExtraPath("output.xmd"), fnDir, b)
                if iteration == 0:
                    args += " --nref0 1"
                else:
                    args += " --ref0 %s" % \
                            self._getExtraPath("level_00/%s_classes.stk" % b)
                self.runJob("xmipp_classify_CL2D",
                            args,
                            numberOfMpi=max(2, self.numberOfMpi.get()))
                cleanPath(self._getExtraPath("level_00/%s_classes.xmd" % b))

        streamMode = Set.STREAM_CLOSED if self.finished else Set.STREAM_OPEN
        outSet = self._loadOutputSet(SetOfClasses2D, 'classes2D.sqlite')
        self._updateOutputSet('outputParticles', outSet, streamMode)
    def createOutputStep(self):
        fnImgs = self._getExtraPath('images.stk')
        if os.path.exists(fnImgs):
            cleanPath(fnImgs)

        outputSet = self._createSetOfParticles()
        imgSet = self.inputSet.get()
        imgFn = self._getExtraPath("anglesCont.xmd")
        self.newAssignmentPerformed = os.path.exists(self._getExtraPath("angles.xmd"))
        self.samplingRate = self.inputSet.get().getSamplingRate()
        if isinstance(imgSet, SetOfClasses2D):
            outputSet = self._createSetOfClasses2D(imgSet)
            outputSet.copyInfo(imgSet.getImages())
        elif isinstance(imgSet, SetOfAverages):
            outputSet = self._createSetOfAverages()
            outputSet.copyInfo(imgSet)
        else:
            outputSet = self._createSetOfParticles()
            outputSet.copyInfo(imgSet)
            if not self.newAssignmentPerformed:
                outputSet.setAlignmentProj()
        outputSet.copyItems(imgSet,
                            updateItemCallback=self._processRow,
                            itemDataIterator=md.iterRows(imgFn, sortByLabel=md.MDL_ITEM_ID))
        self._defineOutputs(outputParticles=outputSet)
        self._defineSourceRelation(self.inputSet, outputSet)
示例#24
0
    def _createCluster(self):
        """ Create the cluster with the selected particles
        from the cluster. This method will be called when
        the button 'Create Cluster' is pressed.
        """
        # Write the particles
        prot = self.protocol
        project = prot.getProject()
        inputSet = prot.getInputParticles()
        fnSqlite = prot._getTmpPath('cluster_particles.sqlite')
        cleanPath(fnSqlite)
        partSet = SetOfParticles(filename=fnSqlite)
        partSet.copyInfo(inputSet)
        for point in self.getData():
            if point.getState() == Point.SELECTED:
                particle = inputSet[point.getId()]
                partSet.append(particle)
        partSet.write()
        partSet.close()

        from protocol_batch_cluster import BatchProtNMACluster
        newProt = project.newProtocol(BatchProtNMACluster)
        clusterName = self.clusterWindow.getClusterName()
        if clusterName:
            newProt.setObjLabel(clusterName)
        newProt.inputNmaDimred.set(prot)
        newProt.sqliteFile.set(fnSqlite)

        project.launchProtocol(newProt)
示例#25
0
    def __createTemporaryCtfs(self, obj, setOfMics):
        """ Create a temporary .sqlite file to visualize CTF while the
             protocol has not finished yet.
            """
        cleanPath(obj._getPath("ctfs_temporary.sqlite"))
        ctfSet = self.protocol._createSetOfCTF("_temporary")

        for mic in setOfMics:
            micFn = mic.getFileName()
            micDir = obj._getExtraPath(removeBaseExt(mic.getFileName()))
            samplingRate = mic.getSamplingRate() * self.protocol.ctfDownFactor.get()
            mic.setSamplingRate(samplingRate)
            out = self.protocol._getCtfOutPath(micDir)
            psdFile = self.protocol._getPsdPath(micDir)

            if exists(out) and exists(psdFile):
                ctfModel = em.CTFModel()
                readCtfModel(ctfModel, out,
                             ctf4=self.protocol.useCtffind4.get())
                ctfModel.setPsdFile(psdFile)
                ctfModel.setMicrograph(mic)
                ctfSet.append(ctfModel)

        if not ctfSet.isEmpty():
            ctfSet.write()
            ctfSet.close()

        return ctfSet
示例#26
0
    def reformatPdbOutputStep(self, numberOfModes):
        self._enterWorkingDir()
        
        makePath('modes')
        Natoms = self._countAtoms("atoms.pdb")
        fhIn = open('diagrtb.eigenfacs')
        fhAni = open('vec_ani.txt','w')
        
        for n in range(numberOfModes):
            # Skip two lines
            fhIn.readline()
            fhIn.readline()
            fhOut=open('modes/vec.%d'%(n+1),'w')
            for i in range(Natoms):
                line=fhIn.readline()
                fhOut.write(line)
                fhAni.write(line.rstrip().lstrip()+" ")
            fhOut.close()
            if n!=(numberOfModes-1):
                fhAni.write("\n")
        fhIn.close()
        fhAni.close()
        self.runJob("nma_prepare_for_animate.py","",env=getNMAEnviron())
        cleanPath("vec_ani.txt")
        moveFile('vec_ani.pkl', 'extra/vec_ani.pkl')

        self._leaveWorkingDir()
    def unblurStep(self, mvF, samplingRate):
        #movieName = self._getMovieName(movie)
        mvName = os.path.basename(mvF)
        mvName = os.path.splitext(mvName)[0]
        tmpDir = self._getTmpPath(mvName)
        makePath(tmpDir)
        mvRoot = os.path.join(tmpDir, mvName)

        fnInput = os.path.abspath(mvRoot + '.txt')
        fhInput = open(fnInput, 'w')
        fhInput.write(os.path.abspath(mvF))
        fhInput.close()

        params = self.getUnblurParams(fnInput, samplingRate, mvName)

        self.runJob(simple.Plugin.distr_exec(),
                    params,
                    cwd=os.path.abspath(tmpDir),
                    env=simple.Plugin.getEnviron())
        moveFile(mvRoot + "_intg1.mrc", self._getExtraPath(mvName + ".mrc"))
        moveFile(mvRoot + "_pspec1.mrc",
                 self._getExtraPath(mvName + "_psd.mrc"))
        moveFile(mvRoot + "_thumb1.mrc",
                 self._getExtraPath(mvName + "_thumb.mrc"))
        cleanPath(tmpDir)
示例#28
0
    def performNmaStep(self, atomsFn, modesFn):
        sampling = self.inputParticles.get().getSamplingRate()
        discreteAngularSampling = self.discreteAngularSampling.get()
        trustRegionScale = self.trustRegionScale.get()
        odir = self._getTmpPath()
        imgFn = self.imgsFn

        args = "-i %(imgFn)s --pdb %(atomsFn)s --modes %(modesFn)s --sampling_rate %(sampling)f "
        args += "--discrAngStep %(discreteAngularSampling)f --odir %(odir)s --centerPDB "
        args += "--trustradius_scale %(trustRegionScale)d --resume "

        if self.getInputPdb().getPseudoAtoms():
            args += "--fixed_Gaussian "

        if self.alignmentMethod == NMA_ALIGNMENT_PROJ:
            args += "--projMatch "

        self.runJob("xmipp_nma_alignment", args % locals())

        cleanPath(self._getPath('nmaTodo.xmd'))

        inputSet = self.inputParticles.get()
        mdImgs = md.MetaData(self.imgsFn)
        for objId in mdImgs:
            imgPath = mdImgs.getValue(md.MDL_IMAGE, objId)
            index, fn = xmippToLocation(imgPath)
            # Conside the index is the id in the input set
            particle = inputSet[index]
            mdImgs.setValue(md.MDL_IMAGE, getImageLocation(particle), objId)
            mdImgs.setValue(md.MDL_ITEM_ID, long(particle.getObjId()), objId)
        mdImgs.write(self.imgsFn)
    def retrieveTrainSets(self):
        """ Retrieve, link and return a setOfParticles
            corresponding to the NegativeTrain DeepConsensus trainning set
            with certain extraction conditions (phaseFlip/invContrast)
        """
        prefixYES = ''
        prefixNO = 'no'
        modelType = "negativeTrain_%sPhaseFlip_%sInvert.mrcs" % (
            prefixYES if self.doInvert.get() else prefixNO,
            prefixYES if self.ignoreCTF.get() else prefixNO)
        modelPath = xmipp3.Plugin.getModel("deepConsensus", modelType)
        print("Precompiled negative particles found at %s" % (modelPath))
        modelFn = self._getTmpPath(modelType)
        pwutils.createLink(modelPath, modelFn)

        tmpSqliteSuff = "AddTrain"
        partSet = self._createSetOfParticles(tmpSqliteSuff)
        img = SetOfParticles.ITEM_TYPE()

        imgh = ImageHandler()
        _, _, _, n = imgh.getDimensions(modelFn)
        if n > 1:
            for index in range(1, n + 1):
                img.cleanObjId()
                img.setMicId(9999)
                img.setFileName(modelFn)
                img.setIndex(index)
                partSet.append(img)
        partSet.setAlignment(ALIGN_NONE)

        cleanPath(self._getPath("particles%s.sqlite" % tmpSqliteSuff))
        return partSet
示例#30
0
    def visualizeClasses(self, e=None):
        prot = self.protocol
        classDir = prot.getClassDir()
        classAvg = 'classavg'
        classVar = 'classvar'
        classDoc = 'docclass'

        params = {
            '[class_dir]': classDir,
            '[desired-classes]': self.numberOfClasses.get(),
            '[particles]': prot._params['particles'] + '@******',
            '[class_doc]': join(classDir, classDoc + '***'),
            '[class_avg]': join(classDir, classAvg + '***'),
            '[class_var]': join(classDir, classVar + '***'),
        }

        prot.runTemplate('mda/classavg.msa', prot.getExt(), params)

        particles = prot.inputParticles.get()
        particles.load()
        sampling = particles.getSamplingRate()

        setFn = prot._getTmpPath('classes2D.sqlite')
        cleanPath(setFn)
        classes2D = SetOfClasses2D(filename=setFn)
        classes2D.setImages(particles)

        # We need to first create a map between the particles index and
        # the assigned class number
        classDict = {}
        for classId in range(1, self.numberOfClasses.get() + 1):
            docClass = prot._getPath(classDir, classDoc + '%03d.stk' % classId)
            doc = SpiderDocFile(docClass)
            for values in doc.iterValues():
                imgIndex = int(values[0])
                classDict[imgIndex] = classId
            doc.close()

        updateItem = lambda p, i: p.setClassId(classDict[i])

        def updateClass(cls):
            rep = cls.getRepresentative()
            rep.setSamplingRate(particles.getSamplingRate())
            avgFn = prot._getPath(classDir,
                                  classAvg + '%03d.stk' % cls.getObjId())
            rep.setLocation(1, avgFn)

        particlesRange = range(1, particles.getSize() + 1)
        classes2D.classifyItems(updateItemCallback=updateItem,
                                updateClassCallback=updateClass,
                                itemDataIterator=iter(particlesRange))

        classes2D.write()
        classes2D.close()

        return [
            ClassesView(self.getProject(), prot.strId(),
                        classes2D.getFileName(), particles.strId())
        ]
示例#31
0
 def cleanPrime(self):
     self._enterDir(self._getExtraPath())
     cleanPath("cmdline.txt")
     cleanPattern("*.txt")
     cleanPattern("startvol_state*.spi")
     # Get last iteration
     for i in range(1, self.getLastIteration()):
         cleanPattern("recvol_state*_iter%d.spi" % i)
     self._leaveDir()
示例#32
0
 def cleanPrime(self):
     self._enterDir(self._getExtraPath())
     cleanPath("cmdline.txt")
     cleanPattern("*.txt")
     cleanPattern("startvol_state*.spi")
     # Get last iteration
     for i in range(1, self.getLastIteration()):
         cleanPattern("recvol_state*_iter%d.spi"%i)
     self._leaveDir()
示例#33
0
    def visualizeClasses(self, e=None):
        prot = self.protocol
        classDir = prot.getClassDir()
        classAvg = 'classavg'
        classVar = 'classvar'
        classDoc = 'docclass'
        
        params = {'[class_dir]': classDir,
                  '[desired-classes]': self.numberOfClasses.get(),
                  '[particles]': prot._params['particles'] + '@******',
                  '[class_doc]': join(classDir, classDoc + '***'), 
                  '[class_avg]': join(classDir, classAvg + '***'),
                  '[class_var]': join(classDir, classVar + '***'),        
                  }
        
        prot.runTemplate('mda/classavg.msa', prot.getExt(), params)

        particles = prot.inputParticles.get()
        particles.load()
        sampling = particles.getSamplingRate()
        
        setFn = prot._getTmpPath('classes2D.sqlite')
        cleanPath(setFn)
        classes2D = SetOfClasses2D(filename=setFn)
        classes2D.setImages(particles)

        # We need to first create a map between the particles index and
        # the assigned class number
        classDict = {}
        for classId in range(1, self.numberOfClasses.get()+1):
            docClass = prot._getPath(classDir, classDoc + '%03d.stk' % classId)
            doc = SpiderDocFile(docClass)
            for values in doc.iterValues():
                imgIndex = int(values[0])
                classDict[imgIndex] = classId
            doc.close()

        updateItem = lambda p, i: p.setClassId(classDict[i])

        def updateClass(cls):
            rep = cls.getRepresentative()
            rep.setSamplingRate(particles.getSamplingRate())
            avgFn = prot._getPath(classDir,
                                  classAvg + '%03d.stk' % cls.getObjId())
            rep.setLocation(1, avgFn)

        particlesRange = range(1, particles.getSize()+1)
        classes2D.classifyItems(updateItemCallback=updateItem,
                                updateClassCallback=updateClass,
                                itemDataIterator=iter(particlesRange))

        classes2D.write()
        classes2D.close()

        return [ClassesView(self.getProject(), prot.strId(),
                            classes2D.getFileName(), particles.strId())]
示例#34
0
    def visualizeClasses(self, e=None):
        prot = self.protocol
        classDir = prot.getClassDir()
        classAvg = 'classavg'
        classVar = 'classvar'
        classDoc = 'docclass'
        ext = prot.getExt()
        
        params = {'[class_dir]': classDir,
                  '[desired-classes]': self.numberOfClasses.get(),
                  '[particles]': prot._params['particles'] + '@******',
                  '[class_doc]': join(classDir, classDoc + '***'), 
                  '[class_avg]': join(classDir, classAvg + '***'),
                  '[class_var]': join(classDir, classVar + '***'),        
                  }
        
        prot.runTemplate('mda/classavg.msa', prot.getExt(), params)

        particles = prot.inputParticles.get()
        particles.load()
        sampling = particles.getSamplingRate()
        
        setFn = self._getPath('classes2D.sqlite')
        cleanPath(setFn)
        classes2D = SetOfClasses2D(filename=setFn)
        classes2D.setImages(particles)
            
        for classId in range(1, self.numberOfClasses.get()+1):
            class2D = Class2D()
            class2D.setObjId(classId)
            
            avgImg = Particle()
            avgImg.setSamplingRate(sampling)
            avgFn = prot._getPath(classDir, classAvg + '%03d.stk' % classId)
            avgImg.setLocation(1, avgFn)
            #avgImg.setLocation(classId, 'classavg.stk')
            
            class2D.setRepresentative(avgImg)
            classes2D.append(class2D)
            
            docClass = prot._getPath(classDir, classDoc + '%03d.stk' % classId)
            doc = SpiderDocFile(docClass)
            
            for values in doc.iterValues():
                imgId = int(values[0])
                img = particles[imgId]
                class2D.append(img)
                
            classes2D.update(class2D)            
        classes2D.write()
        classes2D.close()

        return [ClassesView(self.getProject(),
                            prot.strId(), classes2D.getFileName(), 
                            prot.inputParticles.get().strId())]
                              
示例#35
0
def writeSqliteIterData(imgStar, imgSqlite, **kwargs):
    """ Given a Relion images star file (from some iteration)
    create the corresponding SetOfParticles (sqlite file)
    for this iteration. This file can be visualized sorted
    by the LogLikelihood.
    """
    cleanPath(imgSqlite)
    imgSet = em.SetOfParticles(filename=imgSqlite)
    readSetOfParticles(imgStar, imgSet, **kwargs)
    imgSet.write()
 def evaluateResiduals(self):
     # Evaluate each image
     fnAutoCorrelations = self._getExtraPath("autocorrelations.xmd")
     stkAutoCorrelations = self._getExtraPath("autocorrelations.stk")
     stkResiduals = self._getExtraPath("residuals.stk")
     anglesOutFn=self._getExtraPath("anglesCont.xmd")
     self.runJob("xmipp_image_residuals", " -i %s -o %s --save_metadata_stack %s" % (stkResiduals, stkAutoCorrelations, fnAutoCorrelations), numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", '-i %s --operate rename_column "image imageResidual"' % fnAutoCorrelations, numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", '-i %s --set join %s imageResidual' % (anglesOutFn, fnAutoCorrelations), numberOfMpi=1)
     cleanPath(fnAutoCorrelations)
 def evaluateResiduals(self):
     # Evaluate each image
     fnAutoCorrelations = self._getExtraPath("autocorrelations.xmd")
     stkAutoCorrelations = self._getExtraPath("autocorrelations.stk")
     stkResiduals = self._getExtraPath("residuals.stk")
     anglesOutFn=self._getExtraPath("anglesCont.xmd")
     self.runJob("xmipp_image_residuals", " -i %s -o %s --save_metadata_stack %s" % (stkResiduals, stkAutoCorrelations, fnAutoCorrelations), numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", '-i %s --operate rename_column "image imageResidual"' % fnAutoCorrelations, numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", '-i %s --set join %s imageResidual' % (anglesOutFn, fnAutoCorrelations), numberOfMpi=1)
     cleanPath(fnAutoCorrelations)
示例#38
0
def writeSqliteIterData(imgStar, imgSqlite, **kwargs):
    """ Given a Relion images star file (from some iteration)
    create the corresponding SetOfParticles (sqlite file)
    for this iteration. This file can be visualized sorted
    by the LogLikelihood.
    """
    cleanPath(imgSqlite)
    imgSet = em.SetOfParticles(filename=imgSqlite)
    readSetOfParticles(imgStar, imgSet, **kwargs)
    imgSet.write()
示例#39
0
文件: protocol.py 项目: liz18/scipion
    def __createSet(self, SetClass, template, suffix, **kwargs):
        """ Create a set and set the filename using the suffix. 
        If the file exists, it will be delete. """
        setFn = self._getPath(template % suffix)
        # Close the connection to the database if
        # it is open before deleting the file
        cleanPath(setFn)

        SqliteDb.closeConnection(setFn)
        setObj = SetClass(filename=setFn, **kwargs)
        return setObj
示例#40
0
    def ransacIterationStep(self, n):

        fnOutputReducedClass = self._getExtraPath("reducedClasses.xmd")
        fnBase = "ransac%05d" % n
        fnRoot = self._getTmpPath(fnBase)

        if self.dimRed:
            # Get a random sample of images
            self.runJob(
                "xmipp_transform_dimred",
                "-i %s --randomSample %s.xmd  %d -m LTSA " %
                (fnOutputReducedClass, fnRoot, self.numGrids.get()))
        else:
            self.runJob(
                "xmipp_metadata_utilities",
                "-i %s -o %s.xmd  --operate random_subset %d --mode overwrite "
                % (fnOutputReducedClass, fnRoot, self.numSamples.get()))
            self.runJob(
                "xmipp_metadata_utilities",
                "-i %s.xmd --fill angleRot rand_uniform -180 180 " % (fnRoot))
            self.runJob(
                "xmipp_metadata_utilities",
                "-i %s.xmd --fill angleTilt rand_uniform 0 180 " % (fnRoot))
            self.runJob(
                "xmipp_metadata_utilities",
                "-i %s.xmd --fill anglePsi  rand_uniform 0 360 " % (fnRoot))

        # If there is an initial volume, assign angles
        if self.initialVolume.hasValue():
            fnGallery = self._getTmpPath('gallery_InitialVolume.stk')
            self.runJob("xmipp_angular_projection_matching", "-i %s.xmd -o %s.xmd --ref %s --Ri 0 --Ro %s --max_shift %s --append"\
                   %(fnRoot,fnRoot,fnGallery,str(self.Xdim/2),str(self.Xdim/20)))

        # Reconstruct with the small sample
        self.reconstructStep(fnRoot)

        fnVol = fnRoot + '.vol'

        # Generate projections from this reconstruction
        fnGallery = self._getTmpPath('gallery_' + fnBase + '.stk')
        self.runJob("xmipp_angular_project_library", "-i %s -o %s --sampling_rate %f --sym %s --method fourier 1 0.25 bspline --compute_neighbors --angular_distance -1 --experimental_images %s --max_tilt_angle 90"\
                    %(fnVol,fnGallery,self.angularSampling.get(),self.symmetryGroup.get(),fnOutputReducedClass))

        # Assign angles to the rest of images
        fnAngles = self._getTmpPath('angles_' + fnBase + '.xmd')
        self.runJob("xmipp_angular_projection_matching", "-i %s -o %s --ref %s --Ri 0 --Ro %s --max_shift %s --append"\
                              %(fnOutputReducedClass,fnAngles,fnGallery,str(self.Xdim/2),str(self.Xdim/20)))

        # Delete intermediate files
        cleanPath(fnGallery)
        cleanPath(self._getTmpPath('gallery_' + fnBase + '_sampling.xmd'))
        cleanPath(self._getTmpPath('gallery_' + fnBase + '.doc'))
        cleanPath(fnVol)
        cleanPath(self._getTmpPath(fnBase + '.xmd'))
示例#41
0
    def __createSet(self, SetClass, template, suffix):
        """ Create a set and set the filename using the suffix. 
        If the file exists, it will be delete. """
        setFn = self._getPath(template % suffix)
        # Close the connection to the database if
        # it is open before deleting the file
        cleanPath(setFn)

        SqliteDb.closeConnection(setFn)
        setObj = SetClass(filename=setFn)
        return setObj
示例#42
0
 def produceProjections(self, fnVol, fnAngles, Ts):
     fnVol = self._getTmpPath("volume.vol")
     anglesOutFn = self._getExtraPath("anglesCont.stk")
     projectionsOutFn = self._getExtraPath("projections.stk")
     args="-i %s -o %s --ref %s --oprojections %s --sampling %f " \
          "--max_angular_change 90"%(fnAngles,anglesOutFn,fnVol,
                               projectionsOutFn,Ts)
     self.runJob("xmipp_angular_continuous_assign2", args)
     fnNewParticles = self._getExtraPath("images.stk")
     if os.path.exists(fnNewParticles):
         cleanPath(fnNewParticles)
 def produceResiduals(self, fnVol, fnAngles, Ts):
     if fnVol.endswith(".mrc"):
         fnVol+=":mrc"
     anglesOutFn=self._getExtraPath("anglesCont.stk")
     residualsOutFn=self._getExtraPath("residuals.stk")
     projectionsOutFn=self._getExtraPath("projections.stk")
     xdim=self.inputVolume.get().getDim()[0]
     self.runJob("xmipp_angular_continuous_assign2", "-i %s -o %s --ref %s --optimizeAngles --optimizeGray --optimizeShift --max_shift %d --oresiduals %s --oprojections %s --sampling %f" %\
                 (fnAngles,anglesOutFn,fnVol,floor(xdim*0.05),residualsOutFn,projectionsOutFn,Ts))
     fnNewParticles=self._getExtraPath("images.stk")
     if os.path.exists(fnNewParticles):
         cleanPath(fnNewParticles)
 def evaluateStep(self, outImgsFn):
     # Evaluate each image
     fnAutoCorrelations = self._getExtraPath("autocorrelations.xmd")
     stkAutoCorrelations = self._getExtraPath("autocorrelations.stk")
     stkDiff = self._getExtraPath("diff.stk")
     args1 = " -i %s -o %s --save_metadata_stack %s"
     args2 = " -i %s --set merge %s"
     outClasses = 'classes_aligned@' + outImgsFn
     self.runJob("xmipp_image_residuals", args1 % (stkDiff, stkAutoCorrelations, fnAutoCorrelations), numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", '-i %s --operate rename_column "image image1"' % fnAutoCorrelations, numberOfMpi=1)
     self.runJob("xmipp_metadata_utilities", args2 % (outClasses, fnAutoCorrelations), numberOfMpi=1)
     cleanPath(fnAutoCorrelations)
 def produceResiduals(self, fnVol, fnAngles, Ts):
     if fnVol.endswith(".mrc"):
         fnVol += ":mrc"
     anglesOutFn = self._getExtraPath("anglesCont.stk")
     residualsOutFn = self._getExtraPath("residuals.stk")
     projectionsOutFn = self._getExtraPath("projections.stk")
     xdim = self.inputVolume.get().getDim()[0]
     self.runJob("xmipp_angular_continuous_assign2", "-i %s -o %s --ref %s --optimizeAngles --optimizeGray --optimizeShift --max_shift %d --oresiduals %s --oprojections %s --sampling %f" %\
                 (fnAngles,anglesOutFn,fnVol,floor(xdim*0.05),residualsOutFn,projectionsOutFn,Ts))
     fnNewParticles = self._getExtraPath("images.stk")
     if os.path.exists(fnNewParticles):
         cleanPath(fnNewParticles)
示例#46
0
    def reconstructNewVolumes(self,iteration):
        fnDir = self._getExtraPath()
        newXdim = self.readInfoField(fnDir,"size",xmippLib.MDL_XSIZE)
        angleStep = max(math.atan2(1,newXdim/2),self.minAngle.get())
        TsOrig=self.inputParticles.get().getSamplingRate()
        TsCurrent = self.readInfoField(fnDir,"sampling",xmippLib.MDL_SAMPLINGRATE)
        fnImages = join(fnDir,"images.xmd")
        
        maxShift=round(0.1*newXdim)
        R=self.particleRadius.get()
        if R<=0:
            R=self.inputParticles.get().getDimensions()[0]/2
        R=R*TsOrig/TsCurrent

        # Global alignment
        for i in range(self.inputVolumes.get().getSize()):
            fnVol = self._getExtraPath("volume%03d.vol"%i)

            # Prepare subset of experimental images
            fnTrain =  join(fnDir,"imgs_%03d.xmd"%i)
            self.runJob("xmipp_metadata_utilities","-i %s --operate random_subset %d -o %s"%(fnImages,self.NimgTrain,fnTrain),numberOfMpi=1)
            
            # Generate projections
            fnGallery=join(fnDir,"gallery%02d.stk"%i)
            fnGalleryMd=join(fnDir,"gallery%02d.doc"%i)
#             args="-i %s -o %s --sampling_rate %f --perturb %f --sym %s"%\
#                  (fnVol,fnGallery,angleStep,math.sin(angleStep*math.pi/180.0)/4,self.symmetryGroup)
            args="-i %s -o %s --sampling_rate %f --sym %s"%\
                 (fnVol,fnGallery,angleStep,self.symmetryGroup)
            args+=" --compute_neighbors --angular_distance -1 --experimental_images %s"%fnTrain
            self.runJob("xmipp_angular_project_library",args,numberOfMpi=self.numberOfMpi.get()*self.numberOfThreads.get())
            
            # Assign angles
            args='-i %s --initgallery %s --maxShift %d --odir %s --dontReconstruct --useForValidation 1'%\
                 (fnTrain,fnGalleryMd,maxShift,fnDir)
            self.runJob('xmipp_reconstruct_significant',args,numberOfMpi=self.numberOfMpi.get()*self.numberOfThreads.get())
            fnAngles = join(fnDir,"angles_iter001_00.xmd")

            # Reconstruct
            if exists(fnAngles):
                # Significant may decide not to write it if no image is significant
                args="-i %s -o %s --sym %s --weight --thr %d"%(fnAngles,fnVol,self.symmetryGroup,self.numberOfThreads.get())
                self.runJob("xmipp_reconstruct_fourier",args,numberOfMpi=self.numberOfMpi.get())
                args="-i %s --mask circular %f"%(fnVol,-R)
                self.runJob("xmipp_transform_mask",args,numberOfMpi=1)
                args="-i %s --select below 0 --substitute value 0"%fnVol
                self.runJob("xmipp_transform_threshold",args,numberOfMpi=1)

            # Clean
            cleanPath(fnTrain)
            self.runJob("rm -f",fnDir+"/*iter00?_00.xmd",numberOfMpi=1)
            self.runJob("rm -f",fnDir+"/gallery*",numberOfMpi=1)
示例#47
0
 def computeModesStep(self, fnPseudoatoms, numberOfModes, cutoffStr):
     (baseDir,fnBase)=os.path.split(fnPseudoatoms)
     fnBase=fnBase.replace(".pdb","")
     fnDistanceHist=os.path.join(baseDir,'extra',fnBase+'_distance.hist')
     rc = self._getRc(fnDistanceHist)
     self._enterWorkingDir()
     self.runJob('nma_record_info.py', "%d %s.pdb %d" % (numberOfModes, fnBase, rc),env=getNMAEnviron())
     self.runJob("nma_pdbmat.pl","pdbmat.dat",env=getNMAEnviron())
     self.runJob("nma_diag_arpack","",env=getNMAEnviron())
     if not exists("fort.11"):
         self._printWarnings(redStr("Modes cannot be computed. Check the number of modes you asked to compute and/or consider increasing cut-off distance. The maximum number of modes allowed by the method for pseudoatomic normal mode analysis is 3 times the number of pseudoatoms but the protocol allows only up to 200 modes as 20-100 modes are usually enough.  If the number of modes is below the minimum between 200 and 3 times the number of pseudoatoms, consider increasing cut-off distance."))
     cleanPath("diag_arpack.in", "pdbmat.dat")
     self._leaveWorkingDir()
 def _createVolumesMd(self, volumes):
     """ Write a metadata with all volumes selected for visualization. """
     mdPath = self.protocol._getTmpPath('viewer_volumes.xmd')
     cleanPath(mdPath)
     md = xmippLib.MetaData()
     
     for volFn in volumes:
         md.clear()
         md.setValue(xmippLib.MDL_IMAGE, volFn, md.addObject())
         blockName = volFn.split("/")[3]
         #print "Volume: ", volFn, blockName
         md.write("%s@%s"% (blockName, mdPath), xmippLib.MD_APPEND)
     return [self.createDataView(mdPath)]
示例#49
0
    def convertInputStep(self, particlesId):
        """ Create the input file in STAR format as expected by Relion.
        If the input particles comes from Relion, just link the file.
        Params:
            particlesId: use this parameters just to force redo of convert if 
                the input particles are changed.
        """
        imgSet = self._getInputParticles()
        imgStar = self._getFileName('input_star')

        self.info("Converting set from '%s' into '%s'" %
                           (imgSet.getFileName(), imgStar))
        
        # Pass stack file as None to avoid write the images files
        writeSetOfParticles(imgSet, imgStar, self._getExtraPath())
        
        if self.doCtfManualGroups:
            self._splitInCTFGroups(imgStar)
        
        if not self.IS_CLASSIFY:
            if self.realignMovieFrames:
                movieParticleSet = self.inputMovieParticles.get()
                
                auxMovieParticles = self._createSetOfMovieParticles(suffix='tmp')
                auxMovieParticles.copyInfo(movieParticleSet)
                # Discard the movie particles that are not present in the refinement set
                for movieParticle in movieParticleSet:
                    particle = imgSet[movieParticle.getParticleId()]
                    if particle is not None:
                        auxMovieParticles.append(movieParticle)
                writeSetOfParticles(auxMovieParticles,
                                    self._getFileName('movie_particles'), None, originalSet=imgSet,
                                    postprocessImageRow=self._postprocessImageRow)
                mdMovies = md.MetaData(self._getFileName('movie_particles'))
                mdParts = md.MetaData(self._getFileName('input_star'))

                if getVersion() == "1.4":
                    mdParts.renameColumn(md.RLN_IMAGE_NAME, md.RLN_PARTICLE_ORI_NAME)
                else:
                    mdParts.renameColumn(md.RLN_IMAGE_NAME, md.RLN_PARTICLE_NAME)
                mdParts.removeLabel(md.RLN_MICROGRAPH_NAME)
                
                detectorPxSize = movieParticleSet.getAcquisition().getMagnification() * movieParticleSet.getSamplingRate() / 10000
                mdAux = md.MetaData()
                mdMovies.fillConstant(md.RLN_CTF_DETECTOR_PIXEL_SIZE, detectorPxSize)
                
                mdAux.join2(mdMovies, mdParts, md.RLN_PARTICLE_ID, md.RLN_IMAGE_ID, md.INNER_JOIN)
                
                mdAux.write(self._getFileName('movie_particles'), md.MD_OVERWRITE)
                cleanPath(auxMovieParticles.getFileName())
示例#50
0
 def _getIterData(self, it, **kwargs):
     from convert import readSetOfParticles
     
     imgSqlite = self.protocol._getFileName('data_scipion', iter=it)
     
     if not exists(imgSqlite):
         imgPar = self.protocol._getFileName('output_par', iter=it)
         
         cleanPath(imgSqlite)
         imgSet = em.SetOfParticles(filename=imgSqlite)
         readSetOfParticles(self.protocol.inputParticles.get(), imgSet, imgPar)
         imgSet.write()
         
     return imgSqlite
示例#51
0
文件: viewer.py 项目: coocoky/scipion
    def createVolumesSqlite(self, files, path, samplingRate):
        from em import SetOfVolumes, Volume
        cleanPath(path)
        volSet = SetOfVolumes(filename=path)
        volSet.setSamplingRate(samplingRate)

        for volFn in files:
            vol = Volume()
            vol.setFileName(volFn)
            volSet.append(vol)
        volSet.write()
        volSet.close()
        
        return volSet
示例#52
0
def writeSqliteIterData(partSet, imgSqlite, itemMatrix, iterTextFile):
    """ Given a Relion images star file (from some iteration)
    create the corresponding SetOfParticles (sqlite file)
    for this iteration. This file can be visualized sorted
    by the LogLikelihood.
    """
    cleanPath(imgSqlite)
    imgSet = em.SetOfParticles(filename=imgSqlite)
    imgSet.copyInfo(partSet)
    imgSet.setAlignment(em.ALIGN_PROJ)
    imgSet.copyItems(partSet,
                         updateItemCallback=itemMatrix,
                         itemDataIterator=iterTextFile)
    imgSet.write()
示例#53
0
    def testOneRemoteHostToLocal(self):
        tempFolder = "oneRemoteHostToLocal"
        filePaths = {}
        checkPathList = []
        remoteSourceFilePathList2 = ft.getRemoteFolderFiles(self.remoteHostName2, self.remoteUserName2, self.remotePassword2, self.remoteSourceFolder2)
        for remoteSourceFilePath in remoteSourceFilePathList2:
            remoteSourceFileName = basename(remoteSourceFilePath)
            targetFilePath = join(self.localTargetFolder, tempFolder, "test2", remoteSourceFileName)
            filePaths[remoteSourceFilePath] = targetFilePath
            checkPathList.append(targetFilePath)

        self.fileTransfer.transferFilesFrom(filePaths, self.remoteHostName2, self.remoteUserName2, self.remotePassword2, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)
        passTest = len(self.fileTransfer.checkFiles(checkPathList, self.hostPasswords, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)) == 0
        cleanPath(join(self.localTargetFolder, tempFolder))
        self.assertTrue(passTest)
示例#54
0
    def testLocalToLocal(self):
        tempFolder = "localToLocal"
        filePaths = {}
        sourceFilesPathList = getFiles(self.localSourceFolder)
        for sourceFilePath in sourceFilesPathList:
            sourceFileName = basename(sourceFilePath)
            targetFilePath = join(self.localTargetFolder, tempFolder, sourceFileName)
            targetFilePathList = []
            targetFilePathList.append(targetFilePath)
            filePaths[sourceFilePath] = targetFilePathList
        self.fileTransfer.transferFiles(filePaths, self.hostPasswords, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)
        checkPathList = ft.getFilePathList(filePaths)
        passTest = len(self.fileTransfer.checkFiles(checkPathList, self.hostPasswords, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)) == 0
#         self.fileTransfer.deleteFiles(checkPathList, self.hostPasswords, gatewayHosts=self.gatewayHosts, numberTrials=self.numberTrials, forceOperation=self.forceOperation, operationId=self.operationId)
        cleanPath(join(self.localTargetFolder, tempFolder))
        self.assertTrue(passTest)
示例#55
0
 def analyzeOutOfCores(self,subset):
     """ Analyze which images are out of cores """
     levelMdFiles = self._getAllLevelMdFiles(subset)
     
     for fn in levelMdFiles:
         mdAll=md.MetaData()
         blocks = md.getBlocksInMetaDataFile(fn)
         fnDir=dirname(fn)
         # Gather all images in block
         for block in blocks:
             if block.startswith('class0'):
                 mdClass=md.MetaData(block+"@"+fn)
                 mdAll.unionAll(mdClass)
         if mdAll.size()>0:
             # Compute difference to images
             fnSubset=join(fnDir,"images%s.xmd"%subset)
             mdAll.write(fnSubset)
             fnOutOfSubset=join(fnDir,"imagesOut.xmd")
             
             inputMd = self._getFileName('input_particles')
             
             args = "-i %s --set subtraction %s -o %s" % (inputMd,
                                                         fnSubset,
                                                         fnOutOfSubset)
             
             self.runJob("xmipp_metadata_utilities", args, numberOfMpi=1,
                         numberOfThreads=1)
             
             # Remove disabled and intermediate files
             mdClass=md.MetaData(fnOutOfSubset)
             mdClass.removeDisabled()
             fnRejected="images_rejected@"+fn
             mdClass.write(fnRejected,md.MD_APPEND)
             cleanPath(fnOutOfSubset)
             cleanPath(fnSubset)
             
             # If enough images, make a small summary
             if mdClass.size()>100:
                 from math import ceil
                 fnRejectedDir=join(fnDir,"rejected%s"%subset)
                 makePath(fnRejectedDir)
                 Nclasses=int(ceil(mdClass.size()/300))
                 self.runJob("xmipp_classify_CL2D",
                             "-i %s --nref0 1 --nref %d --iter 5 --distance "
                             "correlation --classicalMultiref "
                             "--classifyAllImages --odir %s"
                             %( fnRejected, Nclasses, fnRejectedDir))
示例#56
0
    def _getIterClasses(self, it, clean=False):
        """ Return the .star file with the classes for this iteration.
        If the file doesn't exists, it will be created. 
        """
        data_classes = self._getFileName('classes_scipion', iter=it)
        
        if clean:
            cleanPath(data_classes)
        
        if not exists(data_classes):
            clsSet = self.OUTPUT_TYPE(filename=data_classes)
            clsSet.setImages(self.inputParticles.get())
            self._fillClassesFromIter(clsSet, it)
            clsSet.write()
            clsSet.close()

        return data_classes
 def cleanDirectory(self):
     iteration=1
     fnDirCurrent=self._getExtraPath("Iter%03d"%iteration)
     fnGlobal=join(fnDirCurrent,"globalAssignment")
     if exists(fnGlobal):
         cleanPath(join(fnGlobal,"images.stk"))
     if exists(fnGlobal):
         cleanPath(join(fnGlobal,"images.xmd"))
         cleanPath(join(fnGlobal,"significant"))
         cleanPath(join(fnGlobal,"volumeRef.vol"))
示例#58
0
    def compareClassesStep(self, i1, i2):
        set1 = self.inputClasses1.get()
        set2 = self.inputClasses2.get()
        
        # Compare each pair of class from set1 and set2
        # compute the Jaccard index for each (J = len(intersection) / len(union))
        # Create a list will all pairs indexes and the sort them
        jaccardList = []
        f = open(self._getPath('jaccard.txt'), 'w')
        f.write('; class1 class2 intersection(i) union(i) jaccard index = len(i)/len(u)\n')
        for cls1 in set1:
            ids1 = cls1.getIdSet()
            for cls2 in set2:
                ids2 = cls2.getIdSet()
                inter = len(ids1.intersection(ids2))
                union = len(ids1.union(ids2))
                jaccardIndex = float(inter) / union
                jaccardTuple = (cls1.getObjId(), cls2.getObjId(), inter, union, jaccardIndex)
                f.write('%d %d %d %d %0.3f\n' % jaccardTuple)
                jaccardList.append(jaccardTuple)
        f.close()

        jaccardList.sort(key=lambda e: e[4], reverse=True)
        visitedClasses = set()
        outputFn = self._getPath('consensus.sqlite')
        cleanPath(outputFn)
        outputSet = EMSet(filename=outputFn)
        
        for clsId1, clsId2, inter, union, jaccardIndex in jaccardList:
            if clsId1 not in visitedClasses:
                visitedClasses.add(clsId1) # mark as visited
                cls1 = set1[clsId1]
                cls2 = set2[clsId2]
                o = Object()
                o.setObjLabel('classes %d - %d' % (clsId1, clsId2))
                o.class1 = cls1.clone()
                o.class1.id = Integer(clsId1)
                o.class2 = cls2.clone()
                o.class2.id = Integer(clsId2)
                o.jaccard = Float(jaccardIndex)
                o.intersection = Integer(inter)
                o.union = Integer(union)
                outputSet.append(o)
                
        self._defineOutputs(outputConsensus=outputSet)
示例#59
0
 def ransacIterationStep(self, n):
 
     fnOutputReducedClass = self._getExtraPath("reducedClasses.xmd")  
     fnBase = "ransac%05d"%n
     fnRoot = self._getTmpPath(fnBase)
     
 
     if self.dimRed:
         # Get a random sample of images
         self.runJob("xmipp_transform_dimred","-i %s --randomSample %s.xmd  %d -m LTSA "%(fnOutputReducedClass,fnRoot,self.numGrids.get()))
     else:        
         self.runJob("xmipp_metadata_utilities","-i %s -o %s.xmd  --operate random_subset %d --mode overwrite "%(fnOutputReducedClass,fnRoot,self.numSamples.get()))
         self.runJob("xmipp_metadata_utilities","-i %s.xmd --fill angleRot rand_uniform -180 180 "%(fnRoot))
         self.runJob("xmipp_metadata_utilities","-i %s.xmd --fill angleTilt rand_uniform 0 180 "%(fnRoot))
         self.runJob("xmipp_metadata_utilities","-i %s.xmd --fill anglePsi  rand_uniform 0 360 "%(fnRoot)) 
 
     # If there is an initial volume, assign angles        
     if self.initialVolume.hasValue():
         fnGallery=self._getTmpPath('gallery_InitialVolume.stk')
         self.runJob("xmipp_angular_projection_matching", "-i %s.xmd -o %s.xmd --ref %s --Ri 0 --Ro %s --max_shift %s --append"\
                %(fnRoot,fnRoot,fnGallery,str(self.Xdim/2),str(self.Xdim/20)))
 
     # Reconstruct with the small sample
     self.reconstructStep(fnRoot)
     
     fnVol = fnRoot+'.vol'
     
     # Generate projections from this reconstruction
     fnGallery=self._getTmpPath('gallery_'+fnBase+'.stk')
     self.runJob("xmipp_angular_project_library", "-i %s -o %s --sampling_rate %f --sym %s --method fourier 1 0.25 bspline --compute_neighbors --angular_distance -1 --experimental_images %s"\
                 %(fnVol,fnGallery,self.angularSampling.get(),self.symmetryGroup.get(),fnOutputReducedClass))
         
     # Assign angles to the rest of images
     fnAngles=self._getTmpPath('angles_'+fnBase+'.xmd')
     self.runJob("xmipp_angular_projection_matching", "-i %s -o %s --ref %s --Ri 0 --Ro %s --max_shift %s --append"\
                           %(fnOutputReducedClass,fnAngles,fnGallery,str(self.Xdim/2),str(self.Xdim/20)))
    
     # Delete intermediate files 
     cleanPath(fnGallery)
     cleanPath(self._getTmpPath('gallery_'+fnBase+'_sampling.xmd'))
     cleanPath(self._getTmpPath('gallery_'+fnBase+'.doc'))
     cleanPath(fnVol)
     cleanPath(self._getTmpPath(fnBase+'.xmd'))
     if self.initialVolume.hasValue():
         cleanPattern(self._getTmpPath("gallery_InitialVolume*"))
示例#60
0
 def _getIterClasses(self, it, clean=False):
     """ Return a classes .sqlite file for this iteration.
     If the file doesn't exists, it will be created by 
     converting from this iteration iter_images.xmd file.
     """
     dataClasses = self._getFileName('classes_scipion', iter=it)
      
     if clean:
         path.cleanPath(dataClasses)
     
     if not exists(dataClasses):
         clsSet = data.SetOfClasses2D(filename=dataClasses)
         clsSet.setImages(self.inputParticles.get())
         self._fillClassesFromIter(clsSet, it)
         clsSet.write()
         clsSet.close()
     
     return dataClasses