def _createCluster(self): """ Create the cluster with the selected particles from the cluster. This method will be called when the button 'Create Cluster' is pressed. """ # Write the particles prot = self.protocol project = prot.getProject() inputSet = prot.getInputParticles() fnSqlite = prot._getTmpPath('cluster_particles.sqlite') cleanPath(fnSqlite) partSet = SetOfParticles(filename=fnSqlite) partSet.copyInfo(inputSet) for point in self.getData(): if point.getState() == Point.SELECTED: particle = inputSet[point.getId()] partSet.append(particle) partSet.write() partSet.close() from protocol_batch_cluster import BatchProtNMACluster newProt = project.newProtocol(BatchProtNMACluster) clusterName = self.clusterWindow.getClusterName() if clusterName: newProt.setObjLabel(clusterName) newProt.inputNmaDimred.set(prot) newProt.sqliteFile.set(fnSqlite) project.launchProtocol(newProt)
def _loadInput(self): partsFile = self.inputParticles.get().getFileName() inPartsSet = SetOfParticles(filename=partsFile) inPartsSet.loadAllProperties() if self.check == None: writeSetOfParticles(inPartsSet, self.fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation') else: writeSetOfParticles(inPartsSet, self.fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation>"' + str(self.check) + '"') writeSetOfParticles(inPartsSet, self.fnInputOldMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation<"' + str(self.check) + '"') for p in inPartsSet.iterItems(orderBy='creation', direction='DESC'): self.check = p.getObjCreation() break streamClosed = inPartsSet.isStreamClosed() inputSize = inPartsSet.getSize() inPartsSet.close() return inputSize, streamClosed
def loadInputs(self): micsFn = self.getInputMicrographs().getFileName() micsSet = SetOfMicrographs(filename=micsFn) micsSet.loadAllProperties() availableMics = [] for mic in micsSet: availableMics.append(mic.getObjId()) micsSetClosed = micsSet.isStreamClosed() micsSet.close() partsFn = self.getInputParticles().getFileName() partsSet = SetOfParticles(filename=partsFn) partsSet.loadAllProperties() newParts = [] newMics = [] for item in partsSet: micKey = item.getCoordinate().getMicId() if micKey not in self.micsDone and micKey in availableMics: newParts.append(item.getObjId()) if not micKey in self.micsDone: newMics.append(micKey) self.micsDone += newMics self.inputSize = partsSet.getSize() partSetClosed = partsSet.isStreamClosed() partsSet.close() return newParts, micsSetClosed and partSetClosed
def getParticles(self, protImport, classid): dbPartSet = protImport._getPath("particles_class-%d.sqlite" % classid) class3D = protImport.outputClasses[classid] if os.path.exists(dbPartSet): os.remove(dbPartSet) partSet = SetOfParticles(filename=dbPartSet) partSet.copyInfo(class3D) for part in class3D: partSet.append(part) partSet.write() partSet.close() protImportCls1 = self.newProtocol( ProtImportParticles, objLabel='particles class-%d' % classid, importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=dbPartSet, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImportCls1) self.assertIsNotNone(protImportCls1.outputParticles.getFileName(), "There was a problem with the import") return protImportCls1
def _checkNewInput(self): # Check if there are new particles to process from the input set partsFile = self.inputParticles.get().getFileName() partsSet = SetOfParticles(filename=partsFile) partsSet.loadAllProperties() self.SetOfParticles = [m.clone() for m in partsSet] self.streamClosed = partsSet.isStreamClosed() partsSet.close() partsSet = self._createSetOfParticles() readSetOfParticles(self._getExtraPath("allDone.xmd"), partsSet) newParts = any(m.getObjId() not in partsSet for m in self.SetOfParticles) outputStep = self._getFirstJoinStep() if newParts: fDeps = self._insertNewPartsSteps(self.insertedDict, self.SetOfParticles) if outputStep is not None: outputStep.addPrerequisites(*fDeps) self.updateSteps()
def test_hdfToStk(self): """ In this case the hdf stack files should be converted to .stk spider files for Relion. """ stackFiles = [ 'BPV_1386_ptcls.hdf', 'BPV_1387_ptcls.hdf', 'BPV_1388_ptcls.hdf' ] partSet = SetOfParticles(filename=':memory:') for fn in stackFiles: particle = Particle() particle.setLocation(1, self.ds.getFile('particles/%s' % fn)) partSet.append(particle) outputDir = self.getOutputPath() filesDict = convertBinaryFiles(partSet, outputDir) partSet.close() print filesDict
def test_hdfToStk(self): """ In this case the hdf stack files should be converted to .stk spider files for Relion. """ stackFiles = ['BPV_1386_ptcls.hdf', 'BPV_1387_ptcls.hdf', 'BPV_1388_ptcls.hdf'] partSet = SetOfParticles(filename=':memory:') for fn in stackFiles: particle = Particle() particle.setLocation(1, self.ds.getFile('particles/%s' % fn)) partSet.append(particle) outputDir = self.getOutputPath() filesDict = convertBinaryFiles(partSet, outputDir) partSet.close() print filesDict
class XmippProtEliminateEmptyParticles(XmippProtEliminateEmptyBase): """ Takes a set of particles and using statistical methods (variance of variances of sub-parts of input image) eliminates those samples, where there is no object/particle (only noise is presented there). Threshold parameter can be used for fine-tuning the algorithm for type of data. """ _label = 'eliminate empty particles' def __init__(self, **args): XmippProtEliminateEmptyBase.__init__(self, **args) # --------------------------- DEFINE param functions ---------------------- def _defineParams(self, form): form.addSection(label=Message.LABEL_INPUT) # - - - F O R P A R T I C L E S - - - form.addParam('inputParticles', param.PointerParam, important=True, label="Input particles", pointerClass='SetOfParticles', help='Select the input particles to be classified.') form.addParam('threshold', param.FloatParam, default=1.1, label='Threshold used in elimination:', help='Higher threshold => more particles will be ' 'eliminated. Set to -1 for no elimination, even so ' 'the "xmipp_scoreEmptiness" value will be attached to ' 'every paricle for a posterior inspection.') self.addAdvancedParams(form) # --------------------------- INSERT steps functions ---------------------- def _checkNewInput(self): # Check if there are new particles to process from the input set partsFile = self.inputParticles.get().getFileName() self.lastCheck = getattr(self, 'lastCheck', datetime.now()) mTime = datetime.fromtimestamp(os.path.getmtime(partsFile)) # If the input movies.sqlite have not changed since our last check, # it does not make sense to check for new input data if self.lastCheck > mTime: return None self.lastCheck = datetime.now() outputStep = self._getFirstJoinStep() fDeps = self._insertNewPartsSteps() if outputStep is not None: outputStep.addPrerequisites(*fDeps) self.updateSteps() def eliminationStep(self, fnInputMd): self.inputImages = self.inputParticles.get() partsFile = self.inputImages.getFileName() self.partsSet = SetOfParticles(filename=partsFile) self.partsSet.loadAllProperties() self.streamClosed = self.partsSet.isStreamClosed() if self.check == None: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation') else: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation>"' + str(self.check) + '"') for p in self.partsSet.iterItems(orderBy='creation', direction='DESC'): self.check = p.getObjCreation() break self.partsSet.close() self.lenPartsSet = len(self.partsSet) print("os.path.exists(fnInputMd)): %s" % os.path.exists(fnInputMd)) args = "-i %s -o %s -e %s -t %f" % ( fnInputMd, self.fnOutputMd, self.fnElimMd, self.threshold.get()) if self.addFeatures: args += " --addFeatures" if self.useDenoising: args += " --useDenoising -d %f" % self.denoising.get() self.runJob("xmipp_image_eliminate_empty_particles", args) cleanPath(fnInputMd) def createOutputs(self): streamMode = Set.STREAM_CLOSED if getattr(self, 'finished', False) \ else Set.STREAM_OPEN def updateOutputs(mdFn, suffix): newData = os.path.exists(mdFn) lastToClose = getattr(self, 'finished', False) and \ hasattr(self, '%sParticles'%suffix) if newData or lastToClose: outSet = self._loadOutputSet(em.SetOfParticles, '%sParticles.sqlite' % suffix) if newData: partsSet = self._createSetOfParticles("AUX") readSetOfParticles(mdFn, partsSet) outSet.copyItems(partsSet, updateItemCallback=self._updateParticle, itemDataIterator=md.iterRows( mdFn, sortByLabel=md.MDL_ITEM_ID)) self.outputSize = self.outputSize + len(partsSet) self._updateOutputSet('%sParticles' % suffix, outSet, streamMode) cleanPath(mdFn) updateOutputs(self.fnOutMdTmp, 'output') updateOutputs(self.fnElimMdTmp, 'eliminated') def getInput(self): return self.inputParticles.get()