def _createSubSetFromParticlesTiltPair(self, particlesTiltPair): print 'create subset from particles tilt pair' """ Create a subset of Micrographs Tilt Pair. """ output = ParticlesTiltPair(filename=self._getPath('particles_pairs.sqlite')) inputU = particlesTiltPair.getUntilted() inputT = particlesTiltPair.getTilted() outputU = SetOfParticles(filename=self._getPath('particles_untilted.sqlite')) outputT = SetOfParticles(filename=self._getPath('particles_tilted.sqlite')) outputU.copyInfo(inputU) outputT.copyInfo(inputT) modifiedSet = ParticlesTiltPair(filename=self._dbName, prefix=self._dbPrefix) for pair, u, t in izip(modifiedSet, inputU, inputT): if pair.isEnabled(): output.append(pair) outputU.append(u) outputT.append(t) # Register outputs output.setUntilted(outputU) output.setTilted(outputT) outputDict = {'outputParticlesTiltPair': output} self._defineOutputs(**outputDict) self._defineTransformRelation(particlesTiltPair, output) return output
def createSetOfParticles(self, setPartSqliteName, partFn, doCtf=False): # create a set of particles self.partSet = SetOfParticles(filename=setPartSqliteName) self.partSet.setAlignment(ALIGN_PROJ) self.partSet.setAcquisition(Acquisition(voltage=300, sphericalAberration=2, amplitudeContrast=0.1, magnification=60000)) self.partSet.setSamplingRate(samplingRate) self.partSet.setHasCTF(True) aList = [np.array(m) for m in mList] #defocus=15000 + 5000* random.random() for i, a in enumerate(aList): p = Particle() if doCtf: defocusU = defocusList[i]#+500. defocusV = defocusList[i] ctf = CTFModel(defocusU=defocusU, defocusV=defocusV, defocusAngle=defocusAngle[i]) ctf.standardize() p.setCTF(ctf) p.setLocation(i + 1, partFn) p.setTransform(Transform(a)) self.partSet.append(p) self.partSet.write()
def importParticles(self): """ Import particles from Frealign. Params: parFile: the filename of the parameter file with the alignment in Frealign. stackFile: single stack file with the images. """ partSet = self.protocol._createSetOfParticles() partSet.setObjComment('Particles imported from Frealign parfile:\n%s' % self.parFile) # Create a local link to the input stack file localStack = self.protocol._getExtraPath( os.path.basename(self.stackFile)) pwutils.createLink(self.stackFile, localStack) # Create a temporary set only with location tmpSet = SetOfParticles(filename=':memory:') tmpSet.readStack(localStack) self._setupSet(tmpSet) # Update both samplingRate and acquisition with parameters # selected in the protocol form self._setupSet(partSet) # Now read the alignment parameters from par file readSetOfParticles(tmpSet, partSet, self.parFile) partSet.setHasCTF(True) # Register the output set of particles self.protocol._defineOutputs(outputParticles=partSet)
def importParticles(self): """ Import particles from Frealign. Params: parFile: the filename of the parameter file with the alignment in Frealing. stackFile: single stack file with the images. """ partSet = self.protocol._createSetOfParticles() partSet.setObjComment("Particles imported from Frealign parfile:\n%s" % self.parFile) # Create a local link to the input stack file localStack = self.protocol._getExtraPath(os.path.basename(self.stackFile)) pwutils.createLink(self.stackFile, localStack) # Create a temporarly set only with location tmpSet = SetOfParticles(filename=":memory:") tmpSet.readStack(localStack) self._setupSet(tmpSet) # Update both samplingRate and acquisition with parameters # selected in the protocol form self._setupSet(partSet) # Now read the alignment parameters from par file readSetOfParticles(tmpSet, partSet, self.parFile) partSet.setHasCTF(True) # Register the output set of particles self.protocol._defineOutputs(outputParticles=partSet)
def eliminationStep(self, fnInputMd): self.inputImages = self.inputParticles.get() partsFile = self.inputImages.getFileName() self.partsSet = SetOfParticles(filename=partsFile) self.partsSet.loadAllProperties() self.streamClosed = self.partsSet.isStreamClosed() if self.check == None: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation') else: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation>"' + str(self.check) + '"') for p in self.partsSet.iterItems(orderBy='creation', direction='DESC'): self.check = p.getObjCreation() break self.partsSet.close() self.lenPartsSet = len(self.partsSet) print("os.path.exists(fnInputMd)): %s" % os.path.exists(fnInputMd)) args = "-i %s -o %s -e %s -t %f" % ( fnInputMd, self.fnOutputMd, self.fnElimMd, self.threshold.get()) if self.addFeatures: args += " --addFeatures" if self.useDenoising: args += " --useDenoising -d %f" % self.denoising.get() self.runJob("xmipp_image_eliminate_empty_particles", args) cleanPath(fnInputMd)
def _createSubSetFromMicrographsTiltPair(self, micrographsTiltPair): """ Create a subset of Micrographs Tilt Pair. """ output = MicrographsTiltPair(filename=self._getPath('micrographs_pairs.sqlite')) print "self._dbName=%s" % self._dbName modifiedSet = MicrographsTiltPair(filename=self._dbName, prefix=self._dbPrefix) inputU = micrographsTiltPair.getUntilted() inputT = micrographsTiltPair.getTilted() outputU = SetOfMicrographs(filename=self._getPath('mics_untilted.sqlite')) outputT = SetOfParticles(filename=self._getPath('mics_tilted.sqlite')) outputU.copyInfo(inputU) outputT.copyInfo(inputT) for micPairI in modifiedSet: untilted = micPairI.getUntilted() tilted = micPairI.getTilted() if micPairI.isEnabled(): micPairO = TiltPair() micPairO.setUntilted(untilted) micPairO.setTilted(tilted) output.append(micPairO) outputU.append(untilted) outputT.append(tilted) output.setUntilted(outputU) output.setTilted(outputT) # Register outputs outputDict = {'outputMicrographsTiltPair': output} self._defineOutputs(**outputDict) self._defineTransformRelation(micrographsTiltPair, output) return output
def _createCluster(self): """ Create the cluster with the selected particles from the cluster. This method will be called when the button 'Create Cluster' is pressed. """ # Write the particles prot = self.protocol project = prot.getProject() inputSet = prot.getInputParticles() fnSqlite = prot._getTmpPath('cluster_particles.sqlite') cleanPath(fnSqlite) partSet = SetOfParticles(filename=fnSqlite) partSet.copyInfo(inputSet) for point in self.getData(): if point.getState() == Point.SELECTED: particle = inputSet[point.getId()] partSet.append(particle) partSet.write() partSet.close() from protocol_batch_cluster import BatchProtNMACluster newProt = project.newProtocol(BatchProtNMACluster) clusterName = self.clusterWindow.getClusterName() if clusterName: newProt.setObjLabel(clusterName) newProt.inputNmaDimred.set(prot) newProt.sqliteFile.set(fnSqlite) project.launchProtocol(newProt)
def getParticles(self, protImport, classid): dbPartSet = protImport._getPath("particles_class-%d.sqlite" % classid) class3D = protImport.outputClasses[classid] if os.path.exists(dbPartSet): os.remove(dbPartSet) partSet = SetOfParticles(filename=dbPartSet) partSet.copyInfo(class3D) for part in class3D: partSet.append(part) partSet.write() partSet.close() protImportCls1 = self.newProtocol( ProtImportParticles, objLabel='particles class-%d' % classid, importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=dbPartSet, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImportCls1) self.assertIsNotNone(protImportCls1.outputParticles.getFileName(), "There was a problem with the import") return protImportCls1
def test_particlesToStar(self): fnSqlite = self.getFile('particles') partSet = SetOfParticles(filename=fnSqlite) for i, img in enumerate(partSet): img.printAll() if i > 10: break partSet.printAll()
def _loadInput(self): partsFile = self.inputParticles.get().getFileName() inPartsSet = SetOfParticles(filename=partsFile) inPartsSet.loadAllProperties() if self.check == None: writeSetOfParticles(inPartsSet, self.fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation') else: writeSetOfParticles(inPartsSet, self.fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation>"' + str(self.check) + '"') writeSetOfParticles(inPartsSet, self.fnInputOldMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation<"' + str(self.check) + '"') for p in inPartsSet.iterItems(orderBy='creation', direction='DESC'): self.check = p.getObjCreation() break streamClosed = inPartsSet.isStreamClosed() inputSize = inPartsSet.getSize() inPartsSet.close() return inputSize, streamClosed
def createOutputStep(self): # Create the SetOfImages object on the database #imgSet = XmippSetOfParticles(self._getPath('images.xmd')) fnImages = self._getOutputImgMd() # Create output SetOfParticles imgSet = self._createSetOfParticles() imgSet.copyInfo(self.inputMics) if self.doFlip: # Check if self.inputMics are phase flipped. if self.inputMics.isPhaseFlipped(): imgSet.setIsPhaseFlipped(False) else: imgSet.setIsPhaseFlipped(True) #imgSet.setHasCTF(self.fnCTF is not None) if self.downsampleType == OTHER: imgSet.setSamplingRate(self.inputMics.getSamplingRate() * self.downFactor.get()) imgSet.setCoordinates(self.inputCoords) # Create a temporary set to read from the metadata file # and later create the good one with the coordinates # properly set. We need this because the .update is not # working in the mapper when new attributes are added. imgSet.setHasCTF(self.ctfRelations.hasValue()) auxSet = SetOfParticles(filename=':memory:') auxSet.copyInfo(imgSet) readSetOfParticles(fnImages, auxSet) if self.downsampleType != SAME_AS_PICKING: factor = self.samplingInput / self.samplingFinal # For each particle retrieve micId from SetOFCoordinates and set it on the CTFModel for img in auxSet: #FIXME: This can be slow to make a query to grab the coord, maybe use zip(imgSet, coordSet)??? coord = self.inputCoords[img.getObjId()] if self.downsampleType != SAME_AS_PICKING: x, y = coord.getPosition() coord.setPosition(x * factor, y * factor) ctfModel = img.getCTF() if ctfModel is not None: ctfModel.setObjId(coord.getMicId()) ##img.setCTF(ctfModel)####JM img.setMicId(coord.getMicId()) img.setCoordinate(coord) imgSet.append(img) self._storeMethodsInfo(fnImages) self._defineOutputs(outputParticles=imgSet) self._defineSourceRelation(self.inputCoordinates, imgSet) if self.ctfRelations.hasValue(): self._defineSourceRelation(self.ctfRelations.get(), imgSet)
def createOutputStep(self): # Create the SetOfImages object on the database #imgSet = XmippSetOfParticles(self._getPath('images.xmd')) fnImages = self._getOutputImgMd() # Create output SetOfParticles imgSet = self._createSetOfParticles() imgSet.copyInfo(self.inputMics) if self.doFlip: # Check if self.inputMics are phase flipped. if self.inputMics.isPhaseFlipped(): imgSet.setIsPhaseFlipped(False) else: imgSet.setIsPhaseFlipped(True) #imgSet.setHasCTF(self.fnCTF is not None) if self.downsampleType == OTHER: imgSet.setSamplingRate(self.inputMics.getSamplingRate()*self.downFactor.get()) imgSet.setCoordinates(self.inputCoords) # Create a temporary set to read from the metadata file # and later create the good one with the coordinates # properly set. We need this because the .update is not # working in the mapper when new attributes are added. imgSet.setHasCTF(self.ctfRelations.hasValue()) auxSet = SetOfParticles(filename=':memory:') auxSet.copyInfo(imgSet) readSetOfParticles(fnImages, auxSet) if self.downsampleType != SAME_AS_PICKING: factor = self.samplingInput / self.samplingFinal # For each particle retrieve micId from SetOFCoordinates and set it on the CTFModel for img in auxSet: #FIXME: This can be slow to make a query to grab the coord, maybe use zip(imgSet, coordSet)??? coord = self.inputCoords[img.getObjId()] if self.downsampleType != SAME_AS_PICKING: x, y = coord.getPosition() coord.setPosition(x*factor, y*factor) ctfModel = img.getCTF() if ctfModel is not None: ctfModel.setObjId(coord.getMicId()) ##img.setCTF(ctfModel)####JM img.setMicId(coord.getMicId()) img.setCoordinate(coord) imgSet.append(img) self._storeMethodsInfo(fnImages) self._defineOutputs(outputParticles=imgSet) self._defineSourceRelation(self.inputCoordinates, imgSet) if self.ctfRelations.hasValue(): self._defineSourceRelation(self.ctfRelations.get(), imgSet)
def loadInputs(self): micsFn = self.getInputMicrographs().getFileName() micsSet = SetOfMicrographs(filename=micsFn) micsSet.loadAllProperties() availableMics = [] for mic in micsSet: availableMics.append(mic.getObjId()) micsSetClosed = micsSet.isStreamClosed() micsSet.close() partsFn = self.getInputParticles().getFileName() partsSet = SetOfParticles(filename=partsFn) partsSet.loadAllProperties() newParts = [] newMics = [] for item in partsSet: micKey = item.getCoordinate().getMicId() if micKey not in self.micsDone and micKey in availableMics: newParts.append(item.getObjId()) if not micKey in self.micsDone: newMics.append(micKey) self.micsDone += newMics self.inputSize = partsSet.getSize() partSetClosed = partsSet.isStreamClosed() partsSet.close() return newParts, micsSetClosed and partSetClosed
def loadAcquisitionInfo(self): """ Return a dictionary with acquisition values and the sampling rate information. In the case of Xmipp, they are stored in files: acquisition_info.xmd and microscope.xmd """ acquisitionDict = OrderedDict() inputSet = SetOfParticles(filename=self._sqliteFile) acquisitionDict['samplingRate'] = inputSet.getProperty('_samplingRate') acquisitionDict['voltage'] = inputSet.getProperty('_acquisition._voltage') acquisitionDict['amplitudeContrast'] = inputSet.getProperty('_acquisition._amplitudeContrast') acquisitionDict['magnification'] = int(float(inputSet.getProperty('_acquisition._magnification'))) return acquisitionDict
def _checkProcessedData(self): if self.setof == SET_OF_MOVIES: objSet = SetOfMovies(filename=self._getPath('movies.sqlite')) elif self.setof == SET_OF_MICROGRAPHS: objSet = \ SetOfMicrographs(filename=self._getPath('micrographs.sqlite')) elif self.setof == SET_OF_RANDOM_MICROGRAPHS: objSet = \ SetOfMicrographs(filename=self._getPath('micrographs.sqlite')) elif self.setof == SET_OF_PARTICLES: objSet = SetOfParticles(filename=self._getPath('particles.sqlite')) else: raise Exception('Unknown data type') newObjSet, newObj = self._checkNewItems(objSet) if self.setof == SET_OF_MOVIES: newObjSet.setFramesRange(self.inputMovies.get().getFramesRange()) # check if end .... if self.setof != SET_OF_PARTICLES: self.nDims = self.nDim.get() endObjs = newObjSet.getSize() == self.nDims if newObj: if endObjs: newObjSet.setStreamState(newObjSet.STREAM_CLOSED) self._updateOutput(newObjSet) newObjSet.close()
def retrieveTrainSets(self): """ Retrieve, link and return a setOfParticles corresponding to the NegativeTrain DeepConsensus trainning set with certain extraction conditions (phaseFlip/invContrast) """ prefixYES = '' prefixNO = 'no' modelType = "negativeTrain_%sPhaseFlip_%sInvert.mrcs" % ( prefixYES if self.doInvert.get() else prefixNO, prefixYES if self.ignoreCTF.get() else prefixNO) modelPath = xmipp3.Plugin.getModel("deepConsensus", modelType) print("Precompiled negative particles found at %s" % (modelPath)) modelFn = self._getTmpPath(modelType) pwutils.createLink(modelPath, modelFn) tmpSqliteSuff = "AddTrain" partSet = self._createSetOfParticles(tmpSqliteSuff) img = SetOfParticles.ITEM_TYPE() imgh = ImageHandler() _, _, _, n = imgh.getDimensions(modelFn) if n > 1: for index in range(1, n + 1): img.cleanObjId() img.setMicId(9999) img.setFileName(modelFn) img.setIndex(index) partSet.append(img) partSet.setAlignment(ALIGN_NONE) cleanPath(self._getPath("particles%s.sqlite" % tmpSqliteSuff)) return partSet
def test_mrcsLink(self): """ In this case just a link with .mrcs extension should be created """ stackFile = self.dsEmx.getFile('particles/particles.mrc') partSet = SetOfParticles(filename=':memory:') for i in range(1, 10): particle = Particle() particle.setLocation(i, stackFile) partSet.append(particle) outputDir = self.getOutputPath() filesDict = convertBinaryFiles(partSet, outputDir) print filesDict
def importParticles(self): """ Import particles from a metadata 'images.xmd' """ inputSet = SetOfParticles(filename=self._sqliteFile) inputSet.loadProperty('_alignment', ALIGN_NONE) inputSet.loadProperty('_hasCtf', False) self._findImagesPath(inputSet) partSet = self.protocol._createSetOfParticles() partSet.copyInfo(inputSet) partSet.setObjComment('Particles imported from Scipion sqlite file:\n%s' % self._sqliteFile) partSet.copyItems(inputSet, updateItemCallback=self._updateParticle) # Update both samplingRate and acquisition with parameters # selected in the protocol form self.protocol.setSamplingRate(partSet) self.protocol.fillAcquisition(partSet.getAcquisition()) partSet.setIsPhaseFlipped(self.protocol.haveDataBeenPhaseFlipped.get()) self.protocol._defineOutputs(outputParticles=partSet)
def _createSubSetFromMicrographsTiltPair(self, micrographsTiltPair): """ Create a subset of Micrographs Tilt Pair. """ output = MicrographsTiltPair( filename=self._getPath('micrographs_pairs.sqlite')) modifiedSet = MicrographsTiltPair(filename=self._dbName, prefix=self._dbPrefix) inputU = micrographsTiltPair.getUntilted() inputT = micrographsTiltPair.getTilted() outputU = SetOfMicrographs( filename=self._getPath('mics_untilted.sqlite')) outputT = SetOfParticles(filename=self._getPath('mics_tilted.sqlite')) outputU.copyInfo(inputU) outputT.copyInfo(inputT) for micPairI in modifiedSet: untilted = micPairI.getUntilted() tilted = micPairI.getTilted() if micPairI.isEnabled(): micPairO = TiltPair() micPairO.setUntilted(untilted) micPairO.setTilted(tilted) output.append(micPairO) outputU.append(untilted) outputT.append(tilted) output.setUntilted(outputU) output.setTilted(outputT) # Register outputs outputDict = {'outputMicrographsTiltPair': output} self._defineOutputs(**outputDict) self._defineTransformRelation(micrographsTiltPair, output) return output
def test_particlesWithPhaseShiftToStar(self): """ Write a SetOfParticles to Relion star input file. """ imgSet = SetOfParticles(filename=self.getOutputPath("particles_ph_sh.sqlite")) n = 10 fn = self.getFile('particles_binary') ctfs = [CTFModel(defocusU=10000, defocusV=15000, defocusAngle=15, phaseShift=90), CTFModel(defocusU=20000, defocusV=25000, defocusAngle=25, phaseShift=60) ] acquisition = Acquisition(magnification=60000, voltage=300, sphericalAberration=2., amplitudeContrast=0.07) imgSet.setAcquisition(acquisition) coord = Coordinate() coord.setMicId(1) for i in range(n): p = Particle() p.setLocation(i + 1, fn) ctf = ctfs[i % 2] p.setCTF(ctf) p.setAcquisition(acquisition) p._xmipp_zScore = Float(i) coord.setX(i * 10) coord.setY(i * 10) p.setCoordinate(coord) imgSet.append(p) fnStar = self.getOutputPath('particles_ph_sh.star') fnStk = self.getOutputPath('particles.stk') print (">>> Writing to file: %s" % fnStar) relion.writeSetOfParticles(imgSet, fnStar, fnStk) mdAll = md.MetaData(fnStar) self.assertTrue(mdAll.containsLabel(md.RLN_IMAGE_COORD_X)) self.assertTrue(mdAll.containsLabel(md.RLN_IMAGE_COORD_Y)) self.assertFalse(mdAll.containsLabel(md.RLN_SELECT_PARTICLES_ZSCORE)) self.assertTrue(mdAll.containsLabel(md.RLN_CTF_PHASESHIFT))
def test_hdfToStk(self): """ In this case the hdf stack files should be converted to .stk spider files for Relion. """ stackFiles = ['BPV_1386_ptcls.hdf', 'BPV_1387_ptcls.hdf', 'BPV_1388_ptcls.hdf'] partSet = SetOfParticles(filename=':memory:') for fn in stackFiles: particle = Particle() particle.setLocation(1, self.ds.getFile('particles/%s' % fn)) partSet.append(particle) outputDir = self.getOutputPath() filesDict = convertBinaryFiles(partSet, outputDir) partSet.close() print filesDict
def convertInputStep(self, imagesMd): # It is unusual to create the output in the convertInputStep, # but just to avoid reading twice the sqlite with particles inputSet = self.inputNmaDimred.get().getInputParticles() partSet = self._createSetOfParticles() partSet.copyInfo(inputSet) tmpSet = SetOfParticles(filename=self.sqliteFile.get()) partSet.appendFromImages(tmpSet) # Register outputs partSet.setAlignmentProj() self._defineOutputs(outputParticles=partSet) self._defineTransformRelation(inputSet, partSet) writeSetOfParticles(partSet, imagesMd)
def loadAcquisitionInfo(self): """ Return a dictionary with acquisition values and the sampling rate information. In the case of Scipion, these values will be read from the 'Properties' table of the particles.sqlite file. """ acq = OrderedDict() inputSet = SetOfParticles(filename=self._sqliteFile) def _get(key): return inputSet.getProperty(key) acq['samplingRate'] = _get('_samplingRate') acq['voltage'] = _get('_acquisition._voltage') acq['amplitudeContrast'] = _get('_acquisition._amplitudeContrast') acq['sphericalAberration'] = float( _get('_acquisition._sphericalAberration')) return acq
def _checkNewInput(self): # Check if there are new particles to process from the input set partsFile = self.inputParticles.get().getFileName() partsSet = SetOfParticles(filename=partsFile) partsSet.loadAllProperties() self.SetOfParticles = [m.clone() for m in partsSet] self.streamClosed = partsSet.isStreamClosed() partsSet.close() partsSet = self._createSetOfParticles() readSetOfParticles(self._getExtraPath("allDone.xmd"), partsSet) newParts = any(m.getObjId() not in partsSet for m in self.SetOfParticles) outputStep = self._getFirstJoinStep() if newParts: fDeps = self._insertNewPartsSteps(self.insertedDict, self.SetOfParticles) if outputStep is not None: outputStep.addPrerequisites(*fDeps) self.updateSteps()
def _findPathAndCtf(self, warnings=True): """ Find the relative path from which the micrographs exists repect to the metadata location. Also check if it contains CTF information and their relative root. """ inputSet = SetOfParticles(filename=self._sqliteFile) inputSet.loadProperty('_alignment', ALIGN_NONE) inputSet.loadProperty('_hasCtf', False) particle = inputSet.getFirstItem() self._imgPath = findRootFrom(self._sqliteFile, particle.getFileName()) if warnings and self._imgPath is None: self.protocol.warning("Binary data was not found from metadata: %s" % self._sqliteFile) return inputSet
def loadAcquisitionInfo(self): """ Return a dictionary with acquisition values and the sampling rate information. In the case of Xmipp, they are stored in files: acquisition_info.xmd and microscope.xmd """ acquisitionDict = OrderedDict() inputSet = SetOfParticles(filename=self._sqliteFile) acquisitionDict['samplingRate'] = inputSet.getProperty('_samplingRate') acquisitionDict['voltage'] = inputSet.getProperty( '_acquisition._voltage') acquisitionDict['amplitudeContrast'] = inputSet.getProperty( '_acquisition._amplitudeContrast') acquisitionDict['magnification'] = int( float(inputSet.getProperty('_acquisition._magnification'))) return acquisitionDict
def _allParticles(self, iterate=False): # A handler function to iterate over the particles inputSet = self.inputSet.get() if self.isInputClasses(): iterParticles = inputSet.iterClassItems() if iterate: return iterParticles else: particles = SetOfParticles(filename=":memory:") particles.copyInfo(inputSet.getFirstItem()) particles.copyItems(iterParticles) return particles else: if iterate: return inputSet.iterItems() else: return inputSet
def importParticles(self): """ Import particles from a metadata 'images.xmd' """ inputSet = SetOfParticles(filename=self._sqliteFile) inputSet.loadProperty('_alignment', ALIGN_NONE) inputSet.loadProperty('_hasCtf', False) self._findImagesPath(inputSet) partSet = self.protocol._createSetOfParticles() partSet.copyInfo(inputSet) partSet.setObjComment( 'Particles imported from Scipion sqlite file:\n%s' % self._sqliteFile) partSet.copyItems(inputSet, updateItemCallback=self._updateParticle) # Update both samplingRate and acquisition with parameters # selected in the protocol form self.protocol.setSamplingRate(partSet) self.protocol.fillAcquisition(partSet.getAcquisition()) partSet.setIsPhaseFlipped(self.protocol.haveDataBeenPhaseFlipped.get()) self.protocol._defineOutputs(outputParticles=partSet)
def aaatest_particlesToStar(self): """ Write a SetOfParticles to Relion star input file. """ imgSet = SetOfParticles( filename=self.getOutputPath("particles.sqlite")) n = 10 fn = self.particles ctfs = [ CTFModel(defocusU=10000, defocusV=15000, defocusAngle=15), CTFModel(defocusU=20000, defocusV=25000, defocusAngle=25) ] acquisition = Acquisition(magnification=60000, voltage=300, sphericalAberration=2., amplitudeContrast=0.07) imgSet.setAcquisition(acquisition) coord = Coordinate() coord.setMicId(1) for i in range(n): p = Particle() p.setLocation(i + 1, fn) ctf = ctfs[i % 2] p.setCTF(ctf) p.setAcquisition(acquisition) p._xmipp_zScore = Float(i) coord.setX(i * 10) coord.setY(i * 10) p.setCoordinate(coord) imgSet.append(p) fnStar = self.getOutputPath('particles.star') fnStk = self.getOutputPath('particles.stk') print ">>> Writing to file: %s" % fnStar relion.writeSetOfParticles(imgSet, fnStar, fnStk) mdAll = md.MetaData(fnStar) self.assertTrue(mdAll.containsLabel(md.RLN_IMAGE_COORD_X)) self.assertTrue(mdAll.containsLabel(md.RLN_IMAGE_COORD_Y)) self.assertFalse(mdAll.containsLabel(md.RLN_SELECT_PARTICLES_ZSCORE))
def test_hdfToStk(self): """ In this case the hdf stack files should be converted to .stk spider files for Relion. """ stackFiles = [ 'BPV_1386_ptcls.hdf', 'BPV_1387_ptcls.hdf', 'BPV_1388_ptcls.hdf' ] partSet = SetOfParticles(filename=':memory:') for fn in stackFiles: particle = Particle() particle.setLocation(1, self.ds.getFile('particles/%s' % fn)) partSet.append(particle) outputDir = self.getOutputPath() filesDict = convertBinaryFiles(partSet, outputDir) partSet.close() print filesDict
def createOutputStep(self): fnTilted = self._getExtraPath("images_tilted.xmd") fnUntilted = self._getExtraPath("images_untilted.xmd") # Create outputs SetOfParticles both for tilted and untilted imgSetU = self._createSetOfParticles(suffix="Untilted") imgSetU.copyInfo(self.uMics) imgSetT = self._createSetOfParticles(suffix="Tilted") imgSetT.copyInfo(self.tMics) sampling = self.samplingMics if self._micsOther() else self.samplingInput if self._doDownsample(): sampling *= self.downFactor.get() imgSetU.setSamplingRate(sampling) imgSetT.setSamplingRate(sampling) # set coords from the input, will update later if needed imgSetU.setCoordinates(self.inputCoordinatesTiltedPairs.get().getUntilted()) imgSetT.setCoordinates(self.inputCoordinatesTiltedPairs.get().getTilted()) # Read untilted and tilted particles on a temporary object (also disabled particles) imgSetAuxU = SetOfParticles(filename=':memory:') imgSetAuxU.copyInfo(imgSetU) readSetOfParticles(fnUntilted, imgSetAuxU, removeDisabled=False) imgSetAuxT = SetOfParticles(filename=':memory:') imgSetAuxT.copyInfo(imgSetT) readSetOfParticles(fnTilted, imgSetAuxT, removeDisabled=False) # calculate factor for coords scaling factor = 1 / self.samplingFactor if self._doDownsample(): factor /= self.downFactor.get() coordsT = self.inputCoordinatesTiltedPairs.get().getTilted() # For each untilted particle retrieve micId from SetOfCoordinates untilted for imgU, coordU in izip(imgSetAuxU, self.inputCoordinatesTiltedPairs.get().getUntilted()): # FIXME: Remove this check when sure that objIds are equal id = imgU.getObjId() if id != coordU.getObjId(): raise Exception('ObjIds in untilted img and coord are not the same!!!!') imgT = imgSetAuxT[id] coordT = coordsT[id] # If both particles are enabled append them if imgU.isEnabled() and imgT.isEnabled(): if self._micsOther() or self._doDownsample(): coordU.scale(factor) coordT.scale(factor) imgU.setCoordinate(coordU) imgSetU.append(imgU) imgT.setCoordinate(coordT) imgSetT.append(imgT) imgSetU.write() imgSetT.write() # Define output ParticlesTiltPair outputset = ParticlesTiltPair(filename=self._getPath('particles_pairs.sqlite')) outputset.setTilted(imgSetT) outputset.setUntilted(imgSetU) for imgU, imgT in izip(imgSetU, imgSetT): outputset.append(TiltPair(imgU, imgT)) outputset.setCoordsPair(self.inputCoordinatesTiltedPairs.get()) self._defineOutputs(outputParticlesTiltPair=outputset) self._defineSourceRelation(self.inputCoordinatesTiltedPairs, outputset)
class TestSubProj(BaseTest): @classmethod def setUpClass(cls): setupTestProject(cls) def createSetOfParticles(self, setPartSqliteName, partFn, doCtf=False): # create a set of particles self.partSet = SetOfParticles(filename=setPartSqliteName) self.partSet.setAlignment(ALIGN_PROJ) self.partSet.setAcquisition(Acquisition(voltage=300, sphericalAberration=2, amplitudeContrast=0.1, magnification=60000)) self.partSet.setSamplingRate(samplingRate) self.partSet.setHasCTF(True) aList = [np.array(m) for m in mList] #defocus=15000 + 5000* random.random() for i, a in enumerate(aList): p = Particle() if doCtf: defocusU = defocusList[i]#+500. defocusV = defocusList[i] ctf = CTFModel(defocusU=defocusU, defocusV=defocusV, defocusAngle=defocusAngle[i]) ctf.standardize() p.setCTF(ctf) p.setLocation(i + 1, partFn) p.setTransform(Transform(a)) self.partSet.append(p) self.partSet.write() def createProjection(self, proj, num, baseName): img = xmipp.Image() img.setDataType(xmipp.DT_FLOAT) img.resize(projSize, projSize) #img.initRandom(0., 1., xmipp.XMIPP_RND_GAUSSIAN) img.initConstant(0.) for coor in proj: value = img.getPixel(coor[0], coor[1], coor[2], coor[3]) img.setPixel(coor[0], coor[1], coor[2], coor[3], coor[4]+value) # coor4 is the pixel value img.write("%d@"%num + baseName) def createVol(self, volume): vol = xmipp.Image() vol.setDataType(xmipp.DT_FLOAT) vol.resize(projSize, projSize, projSize) #vol.initRandom(0., .5, xmipp.XMIPP_RND_UNIFORM) vol.initConstant(0.) for coor in volume: vol.setPixel(coor[0], coor[1], coor[2], coor[3], coor[4]) # coor4 is the pixel value vol.write(self.volBaseFn) def createMask(self, _maskName): vol = xmipp.Image() vol.setDataType(xmipp.DT_FLOAT) vol.resize(projSize, projSize, projSize) vol.initConstant(0.0)#ROB: not sure this is needed halfDim = int(projSize/2) maskRadius2 = maskRadius * maskRadius for i in range(-halfDim,halfDim): for j in range(-halfDim,halfDim): for k in range(-halfDim,halfDim): if(i*i+j*j+k*k) < maskRadius2: vol.setPixel(0, k+halfDim, i+halfDim, j+halfDim, 1.) # coor4 is the pixel value vol.write(_maskName) def applyCTF(self, setPartMd): writeSetOfParticles(self.partSet,setPartMd) md1 = xmipp.MetaData() md1.setColumnFormat(False) idctf = md1.addObject() _acquisition = self.partSet.getAcquisition() for part in self.partSet: baseFnCtf = self.proj.getTmpPath("kk")#self._getTmpPath("ctf_%d.param"%mic) md1.setValue(xmipp.MDL_CTF_SAMPLING_RATE, samplingRate, idctf) md1.setValue(xmipp.MDL_CTF_VOLTAGE, 200., idctf); ctf = part.getCTF() udefocus = ctf.getDefocusU() vdefocus = ctf.getDefocusV() angle = ctf.getDefocusAngle() md1.setValue(xmipp.MDL_CTF_DEFOCUSU, udefocus, idctf); md1.setValue(xmipp.MDL_CTF_DEFOCUSV, vdefocus, idctf); md1.setValue(xmipp.MDL_CTF_DEFOCUS_ANGLE, 180.0 * random.random(), idctf); md1.setValue(xmipp.MDL_CTF_CS, 2., idctf); md1.setValue(xmipp.MDL_CTF_Q0, 0.07, idctf); md1.setValue(xmipp.MDL_CTF_K, 1., idctf); md1.write(baseFnCtf) ##writeSetOfParticles(self.partSet, setPartMd) #apply ctf args = " -i %s"%setPartMd args += " -o %s"%self.proj.getTmpPath(setPartCtfName) args += " -f ctf %s"%baseFnCtf args += " --sampling %f"%samplingRate runXmippProgram("xmipp_transform_filter", args) args = " -i %s"%setPartMd args += " -o %s"%self.proj.getTmpPath(setPartCtfPosName) args += " -f ctfpos %s"%baseFnCtf args += " --sampling %f"%samplingRate runXmippProgram("xmipp_transform_filter", args) def importData(self, baseFn, objLabel, protType, importFrom): prot = self.newProtocol(protType, objLabel=objLabel, filesPath=baseFn, maskPath=baseFn, sqliteFile=baseFn, haveDataBeenPhaseFlipped=False, magnification=10000, samplingRate=samplingRate, importFrom=importFrom ) self.launchProtocol(prot) return prot def test_pattern(self): #1) create fake protocol so I have a place to save data #prot = self.launchFakeProtocol() #output stack self.setPartName = self.proj.getTmpPath(setPartName) self.setPartSqliteName = self.proj.getTmpPath(setPartSqliteName) self.setPartSqliteCtfName = self.proj.getTmpPath(setPartSqliteCtfName) self.setPartSqliteCTfPosName = self.proj.getTmpPath(setPartSqliteCTfPosName) self.kksqlite = self.proj.getTmpPath("kk.sqlite") self.setPartMd = self.proj.getTmpPath(setPartNameMd) self.volBaseFn = self.proj.getTmpPath(volName) self.maskName = self.proj.getTmpPath(maskName) #2) create projections and sets of particles self.createProjection(proj1, 1, self.setPartName) self.createProjection(proj2, 2, self.setPartName) self.createProjection(proj3, 3, self.setPartName) self.createSetOfParticles(self.setPartSqliteCTfPosName, self.proj.getTmpPath(setPartCtfPosName), True) self.createSetOfParticles(self.setPartSqliteCtfName, self.proj.getTmpPath(setPartCtfName), True) self.createSetOfParticles(self.setPartSqliteName, self.setPartName, False) #create auxiliary setofparticles self.createSetOfParticles(self.kksqlite, self.setPartName, True) #4) apply CTF self.applyCTF(self.setPartMd) #5) create volume self.createVol(vol1) #6) create mask self.createMask(self.maskName) #import three projection datasets, volume and mask protPlainProj = self.importData(self.setPartSqliteName, "plain projection", ProtImportParticles, ProtImportParticles.IMPORT_FROM_SCIPION) protCTFProj = self.importData(self.setPartSqliteCtfName,"ctf projection", ProtImportParticles, ProtImportParticles.IMPORT_FROM_SCIPION) protCTFposProj = self.importData(self.setPartSqliteCTfPosName, "pos ctf projection", ProtImportParticles, ProtImportParticles.IMPORT_FROM_SCIPION) _protImportVol = self.importData(os.path.abspath(self.proj.getTmpPath(volName)), "3D reference", ProtImportVolumes, ProtImportParticles.IMPORT_FROM_FILES) _protImportMask = self.importData(self.proj.getTmpPath(maskName), "3D mask", ProtImportMask, ProtImportParticles.IMPORT_FROM_FILES) mask = VolumeMask() mask.setFileName(self.proj.getTmpPath(maskName)) mask.setSamplingRate(samplingRate) #launch substract protocol <<<<<<<<<<<<<<<<<<<<<<<<<< protSubtract = self.newProtocol(XmippProtSubtractProjection) protSubtract.inputParticles.set(protPlainProj.outputParticles) protSubtract.inputVolume.set(_protImportVol.outputVolume) protSubtract.refMask.set(_protImportMask.outputMask) protSubtract.projType.set(XmippProtSubtractProjection.CORRECT_NONE) self.launchProtocol(protSubtract) protSubtractCTF = self.newProtocol(XmippProtSubtractProjection) protSubtractCTF.inputParticles.set(protCTFProj.outputParticles) protSubtractCTF.inputVolume.set(_protImportVol.outputVolume) protSubtractCTF.refMask.set(_protImportMask.outputMask) protSubtractCTF.projType.set(XmippProtSubtractProjection.CORRECT_FULL_CTF) self.launchProtocol(protSubtractCTF) protSubtractCTFpos = self.newProtocol(XmippProtSubtractProjection) protSubtractCTFpos.inputParticles.set(protCTFposProj.outputParticles) protSubtractCTFpos.inputVolume.set(_protImportVol.outputVolume) protSubtractCTFpos.refMask.set(_protImportMask.outputMask) protSubtractCTFpos.projType.set(XmippProtSubtractProjection.CORRECT_PHASE_FLIP) self.launchProtocol(protSubtractCTFpos) protSubtractCTFRelion = self.newProtocol(ProtRelionSubtract) protSubtractCTFRelion.inputParticles.set(protCTFProj.outputParticles) protSubtractCTFRelion.inputVolume.set(_protImportVol.outputVolume) self.launchProtocol(protSubtractCTFRelion) self.assertIsNotNone(protSubtract.outputParticles, "There was a problem with subtract projection") self.assertTrue(True)
def testOrderBy(self): """ create set of particles and orderby a given attribute """ # This function was written by Roberto. It does things # differently, so let's keep it for reference. #create set of particles inFileNameMetadata = self.proj.getTmpPath('particlesOrderBy.sqlite') inFileNameData = self.proj.getTmpPath('particlesOrderBy.stk') imgSet = SetOfParticles(filename=inFileNameMetadata) imgSet.setSamplingRate(1.5) acq = Acquisition() acq.setAmplitudeContrast(0.1) acq.setMagnification(10000) acq.setVoltage(200) acq.setSphericalAberration(2.0) imgSet.setAcquisition(acq) img = Particle() for i in range(1, 10): img.setLocation(i, inFileNameData) img.setMicId(i%3) img.setClassId(i%5) imgSet.append(img) img.cleanObjId() imgSet.write() #now import the dataset prot1 = self.newProtocol(ProtImportParticles, importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata, magnification=10000, samplingRate=1.5 ) prot1.setObjLabel('from sqlite (test-sets)') self.launchProtocol(prot1) if prot1.outputParticles is None: raise Exception('Import of images: %s, failed. outputParticles is None.' % inFileNameMetadata) protSplitSet = self.newProtocol(ProtSplitSet, inputSet=prot1.outputParticles, numberOfSets=2, randomize=True) self.launchProtocol(protSplitSet) inputSets = [protSplitSet.outputParticles01,protSplitSet.outputParticles02] outputSet = SetOfParticles(filename=self.proj.getTmpPath('gold.sqlite')) for itemSet in inputSets: for obj in itemSet: outputSet.append(obj) for item1, item2 in izip(imgSet, outputSet): if not item1.equalAttributes(item2): print "Items differ:" prettyDict(item1.getObjDict()) prettyDict(item2.getObjDict()) self.assertTrue(item1.equalAttributes(item2), )
def testMergeDifferentAttrs(self): """ Test merge from subsets with different attritubes. That is, M1(a,b,c) U M2(a,b,c,d)""" #create two set of particles inFileNameMetadata1 = self.proj.getTmpPath('particles11.sqlite') inFileNameMetadata2 = self.proj.getTmpPath('particles22.sqlite') imgSet1 = SetOfParticles(filename=inFileNameMetadata1) imgSet2 = SetOfParticles(filename=inFileNameMetadata2) inFileNameData = self.proj.getTmpPath('particles.stk') img1 = Particle() img2 = Particle() attrb1 = [11, 12, 13, 14] attrb2 = [21, 22, 23, 24] attrb3 = [31, 32] counter = 0 for i in range(1, 3): img1.cleanObjId() img1.setLocation(i, inFileNameData) img1.setMicId(i % 3) img1.setClassId(i % 5) img1.setSamplingRate(1.) img1._attrb1 = Float(attrb1[counter]) img1._attrb2 = Float(attrb2[counter]) img1._attrb3 = Float(attrb3[counter]) imgSet1.append(img1) counter += 1 for i in range(1, 3): img2.cleanObjId() img2.setLocation(i, inFileNameData) img2.setClassId(i % 5) img2.setMicId(i % 3) img2.setSamplingRate(2.) img2._attrb1 = Float(attrb1[counter]) img2._attrb2 = Float(attrb2[counter]) imgSet2.append(img2) counter += 1 imgSet1.write() imgSet2.write() #import them protImport1 = self.newProtocol( ProtImportParticles, objLabel='import set1', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata1, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport1) protImport2 = self.newProtocol( ProtImportParticles, objLabel='import set2', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata2, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport2) #create merge protocol p_union = self.newProtocol(ProtUnionSet, objLabel='join diff column order', ignoreExtraAttributes=True) p_union.inputSets.append(protImport1.outputParticles) p_union.inputSets.append(protImport2.outputParticles) self.proj.launchProtocol(p_union, wait=True) #assert counter = 0 for img in p_union.outputSet: self.assertAlmostEqual(attrb1[counter], img._attrb1, 4) self.assertAlmostEqual(attrb2[counter], img._attrb2, 4) if hasattr(img, '_attrb3'): self.assertTrue(False, "join should not have attrb3") if not hasattr(img, '_attrb2'): self.assertTrue(False, "join should have attrb2") counter += 1
def testMergeAlternateColumn(self): """Test that the union operation works as expected. Even if the order of the columns do not match. That is, M1(a,b,c) U M2(a,c,b)""" #create two set of particles inFileNameMetadata1 = self.proj.getTmpPath('particles1.sqlite') inFileNameMetadata2 = self.proj.getTmpPath('particles2.sqlite') imgSet1 = SetOfParticles(filename=inFileNameMetadata1) imgSet2 = SetOfParticles(filename=inFileNameMetadata2) inFileNameData = self.proj.getTmpPath('particles.stk') img1 = Particle() img2 = Particle() attrb1 = [11, 12, 13, 14] attrb2 = [21, 22, 23, 24] counter = 0 for i in range(1, 3): img1.cleanObjId() img1.setLocation(i, inFileNameData) img1.setMicId(i % 3) img1.setClassId(i % 5) img1.setSamplingRate(1.) img1._attrb1 = Float(attrb1[counter]) img1._attrb2 = Float(attrb2[counter]) imgSet1.append(img1) counter +=1 for i in range(1, 3): img2.cleanObjId() img2.setLocation(i, inFileNameData) img2.setClassId(i % 5) img2.setMicId(i % 3) img2.setSamplingRate(2.) img2._attrb1= Float(attrb1[counter]) img2._attrb2= Float(attrb2[counter]) imgSet2.append(img2) counter +=1 imgSet1.write() imgSet2.write() #import them protImport1 = self.newProtocol(ProtImportParticles, objLabel='import set1', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata1, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True ) self.launchProtocol(protImport1) protImport2 = self.newProtocol(ProtImportParticles, objLabel='import set2', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata2, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True ) self.launchProtocol(protImport2) #create merge protocol p_union = self.newProtocol(ProtUnionSet, objLabel='join diff column order', ignoreExtraAttributes=True) p_union.inputSets.append(protImport1.outputParticles) p_union.inputSets.append(protImport2.outputParticles) self.proj.launchProtocol(p_union, wait=True) #assert counter=0 for img in p_union.outputSet: self.assertAlmostEqual(attrb1[counter],img._attrb1,4) self.assertAlmostEqual(attrb2[counter],img._attrb2,4) counter += 1
def testOrderBy(self): """ create set of particles and orderby a given attribute """ # This function was written by Roberto. It does things # differently, so let's keep it for reference. #create set of particles inFileNameMetadata = self.proj.getTmpPath('particlesOrderBy.sqlite') inFileNameData = self.proj.getTmpPath('particlesOrderBy.stk') imgSet = SetOfParticles(filename=inFileNameMetadata) imgSet.setSamplingRate(1.5) acq = Acquisition() acq.setAmplitudeContrast(0.1) acq.setMagnification(10000) acq.setVoltage(200) acq.setSphericalAberration(2.0) imgSet.setAcquisition(acq) img = Particle() for i in range(1, 10): img.setLocation(i, inFileNameData) img.setMicId(i % 3) img.setClassId(i % 5) imgSet.append(img) img.cleanObjId() imgSet.write() #now import the dataset prot1 = self.newProtocol( ProtImportParticles, importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata, magnification=10000, samplingRate=1.5) prot1.setObjLabel('from sqlite (test-sets)') self.launchProtocol(prot1) if prot1.outputParticles is None: raise Exception( 'Import of images: %s, failed. outputParticles is None.' % inFileNameMetadata) protSplitSet = self.newProtocol(ProtSplitSet, inputSet=prot1.outputParticles, numberOfSets=2, randomize=True) self.launchProtocol(protSplitSet) inputSets = [ protSplitSet.outputParticles01, protSplitSet.outputParticles02 ] outputSet = SetOfParticles( filename=self.proj.getTmpPath('gold.sqlite')) for itemSet in inputSets: for obj in itemSet: outputSet.append(obj) for item1, item2 in izip(imgSet, outputSet): if not item1.equalAttributes(item2): print "Items differ:" prettyDict(item1.getObjDict()) prettyDict(item2.getObjDict()) self.assertTrue(item1.equalAttributes(item2), )
def testMergeDifferentAttrs(self): """ Test merge from subsets with different attritubes. That is, M1(a,b,c) U M2(a,b,c,d)""" #create two set of particles inFileNameMetadata1 = self.proj.getTmpPath('particles11.sqlite') inFileNameMetadata2 = self.proj.getTmpPath('particles22.sqlite') imgSet1 = SetOfParticles(filename=inFileNameMetadata1) imgSet2 = SetOfParticles(filename=inFileNameMetadata2) inFileNameData = self.proj.getTmpPath('particles.stk') img1 = Particle() img2 = Particle() attrb1 = [11, 12, 13, 14] attrb2 = [21, 22, 23, 24] attrb3 = [31, 32] counter = 0 # Test the join handles different attributes at a second level ctf1 = CTFModel(defocusU=1000, defocusV=1000, defocusAngle=0) ctf2 = CTFModel(defocusU=2000, defocusV=2000, defocusAngle=0) ctf2._myOwnQuality = Float(1.) img1.setCTF(ctf1) img2.setCTF(ctf2) for i in range(1, 3): img1.cleanObjId() img1.setLocation(i, inFileNameData) img1.setMicId(i % 3) img1.setClassId(i % 5) img1.setSamplingRate(1.) img1._attrb1 = Float(attrb1[counter]) img1._attrb2 = Float(attrb2[counter]) img1._attrb3 = Float(attrb3[counter]) imgSet1.append(img1) counter += 1 for i in range(1, 3): img2.cleanObjId() img2.setLocation(i, inFileNameData) img2.setClassId(i % 5) img2.setMicId(i % 3) img2.setSamplingRate(2.) img2._attrb1 = Float(attrb1[counter]) img2._attrb2 = Float(attrb2[counter]) imgSet2.append(img2) counter += 1 imgSet1.write() imgSet2.write() #import them protImport1 = self.newProtocol( ProtImportParticles, objLabel='import set1', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata1, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport1) protImport2 = self.newProtocol( ProtImportParticles, objLabel='import set2', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata2, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport2) #create merge protocol p_union = self.newProtocol(ProtUnionSet, objLabel='join different attrs', ignoreExtraAttributes=True) p_union.inputSets.append(protImport1.outputParticles) p_union.inputSets.append(protImport2.outputParticles) self.proj.launchProtocol(p_union, wait=True) counter = 0 for img in p_union.outputSet: self.assertAlmostEqual(attrb1[counter], img._attrb1, 4) self.assertAlmostEqual(attrb2[counter], img._attrb2, 4) self.assertFalse(hasattr(img, '_attrb3'), "join should not have attrb3") self.assertTrue(hasattr(img, '_attrb2'), "join should have attrb2") ctf = img.getCTF() self.assertIsNotNone(ctf, "Image should have CTF after join") self.assertFalse(hasattr(ctf, '_myOwnQuality'), "CTF should not have non common attributes") counter += 1
def testMergeAlternateColumn(self): """Test that the union operation works as expected. Even if the order of the columns do not match. That is, M1(a,b,c) U M2(a,c,b)""" #create two set of particles inFileNameMetadata1 = self.proj.getTmpPath('particles1.sqlite') inFileNameMetadata2 = self.proj.getTmpPath('particles2.sqlite') imgSet1 = SetOfParticles(filename=inFileNameMetadata1) imgSet2 = SetOfParticles(filename=inFileNameMetadata2) inFileNameData = self.proj.getTmpPath('particles.stk') img1 = Particle() img2 = Particle() attrb1 = [11, 12, 13, 14] attrb2 = [21, 22, 23, 24] counter = 0 for i in range(1, 3): img1.cleanObjId() img1.setLocation(i, inFileNameData) img1.setMicId(i % 3) img1.setClassId(i % 5) img1.setSamplingRate(1.) img1._attrb1 = Float(attrb1[counter]) img1._attrb2 = Float(attrb2[counter]) imgSet1.append(img1) counter += 1 for i in range(1, 3): img2.cleanObjId() img2.setLocation(i, inFileNameData) img2.setClassId(i % 5) img2.setMicId(i % 3) img2.setSamplingRate(2.) img2._attrb1 = Float(attrb1[counter]) img2._attrb2 = Float(attrb2[counter]) imgSet2.append(img2) counter += 1 imgSet1.write() imgSet2.write() #import them protImport1 = self.newProtocol( ProtImportParticles, objLabel='import set1', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata1, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport1) protImport2 = self.newProtocol( ProtImportParticles, objLabel='import set2', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata2, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True) self.launchProtocol(protImport2) #create merge protocol p_union = self.newProtocol(ProtUnionSet, objLabel='join diff column order', ignoreExtraAttributes=True) p_union.inputSets.append(protImport1.outputParticles) p_union.inputSets.append(protImport2.outputParticles) self.proj.launchProtocol(p_union, wait=True) #assert counter = 0 for img in p_union.outputSet: self.assertAlmostEqual(attrb1[counter], img._attrb1, 4) self.assertAlmostEqual(attrb2[counter], img._attrb2, 4) counter += 1
def createOutputStep(self): fnTilted = self._getExtraPath("images_tilted.xmd") fnUntilted = self._getExtraPath("images_untilted.xmd") # Create outputs SetOfParticles both for tilted and untilted imgSetU = self._createSetOfParticles(suffix="Untilted") imgSetU.copyInfo(self.uMics) imgSetT = self._createSetOfParticles(suffix="Tilted") imgSetT.copyInfo(self.tMics) sampling = self.getMicSampling() if self._micsOther( ) else self.getCoordSampling() if self._doDownsample(): sampling *= self.downFactor.get() imgSetU.setSamplingRate(sampling) imgSetT.setSamplingRate(sampling) # set coords from the input, will update later if needed imgSetU.setCoordinates( self.inputCoordinatesTiltedPairs.get().getUntilted()) imgSetT.setCoordinates( self.inputCoordinatesTiltedPairs.get().getTilted()) # Read untilted and tilted particles on a temporary object (also disabled particles) imgSetAuxU = SetOfParticles(filename=':memory:') imgSetAuxU.copyInfo(imgSetU) readSetOfParticles(fnUntilted, imgSetAuxU, removeDisabled=False) imgSetAuxT = SetOfParticles(filename=':memory:') imgSetAuxT.copyInfo(imgSetT) readSetOfParticles(fnTilted, imgSetAuxT, removeDisabled=False) # calculate factor for coords scaling factor = 1 / self.samplingFactor if self._doDownsample(): factor /= self.downFactor.get() coordsT = self.getCoords().getTilted() # For each untilted particle retrieve micId from SetOfCoordinates untilted for imgU, coordU in izip(imgSetAuxU, self.getCoords().getUntilted()): # FIXME: Remove this check when sure that objIds are equal id = imgU.getObjId() if id != coordU.getObjId(): raise Exception( 'ObjIds in untilted img and coord are not the same!!!!') imgT = imgSetAuxT[id] coordT = coordsT[id] # If both particles are enabled append them if imgU.isEnabled() and imgT.isEnabled(): if self._micsOther() or self._doDownsample(): coordU.scale(factor) coordT.scale(factor) imgU.setCoordinate(coordU) imgSetU.append(imgU) imgT.setCoordinate(coordT) imgSetT.append(imgT) if self.doFlip: imgSetU.setIsPhaseFlipped(self.ctfUntilt.hasValue()) imgSetU.setHasCTF(self.ctfUntilt.hasValue()) imgSetT.setIsPhaseFlipped(self.ctfTilt.hasValue()) imgSetT.setHasCTF(self.ctfTilt.hasValue()) imgSetU.write() imgSetT.write() # Define output ParticlesTiltPair outputset = ParticlesTiltPair( filename=self._getPath('particles_pairs.sqlite')) outputset.setTilted(imgSetT) outputset.setUntilted(imgSetU) for imgU, imgT in izip(imgSetU, imgSetT): outputset.append(TiltPair(imgU, imgT)) outputset.setCoordsPair(self.inputCoordinatesTiltedPairs.get()) self._defineOutputs(outputParticlesTiltPair=outputset) self._defineSourceRelation(self.inputCoordinatesTiltedPairs, outputset)
def testMergeDifferentAttrs(self): """ Test merge from subsets with different attritubes. That is, M1(a,b,c) U M2(a,b,c,d)""" #create two set of particles inFileNameMetadata1 = self.proj.getTmpPath('particles11.sqlite') inFileNameMetadata2 = self.proj.getTmpPath('particles22.sqlite') imgSet1 = SetOfParticles(filename=inFileNameMetadata1) imgSet2 = SetOfParticles(filename=inFileNameMetadata2) inFileNameData = self.proj.getTmpPath('particles.stk') img1 = Particle() img2 = Particle() attrb1 = [11, 12, 13, 14] attrb2 = [21, 22, 23, 24] attrb3 = [31, 32] counter = 0 # Test the join handles different attributes at a second level ctf1 = CTFModel(defocusU=1000, defocusV=1000, defocusAngle=0) ctf2 = CTFModel(defocusU=2000, defocusV=2000, defocusAngle=0) ctf2._myOwnQuality = Float(1.) img1.setCTF(ctf1) img2.setCTF(ctf2) for i in range(1, 3): img1.cleanObjId() img1.setLocation(i, inFileNameData) img1.setMicId(i % 3) img1.setClassId(i % 5) img1.setSamplingRate(1.) img1._attrb1 = Float(attrb1[counter]) img1._attrb2 = Float(attrb2[counter]) img1._attrb3 = Float(attrb3[counter]) imgSet1.append(img1) counter += 1 for i in range(1, 3): img2.cleanObjId() img2.setLocation(i, inFileNameData) img2.setClassId(i % 5) img2.setMicId(i % 3) img2.setSamplingRate(2.) img2._attrb1 = Float(attrb1[counter]) img2._attrb2 = Float(attrb2[counter]) imgSet2.append(img2) counter +=1 imgSet1.write() imgSet2.write() #import them protImport1 = self.newProtocol(ProtImportParticles, objLabel='import set1', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata1, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True ) self.launchProtocol(protImport1) protImport2 = self.newProtocol(ProtImportParticles, objLabel='import set2', importFrom=ProtImportParticles.IMPORT_FROM_SCIPION, sqliteFile=inFileNameMetadata2, magnification=10000, samplingRate=7.08, haveDataBeenPhaseFlipped=True ) self.launchProtocol(protImport2) #create merge protocol p_union = self.newProtocol(ProtUnionSet, objLabel='join different attrs', ignoreExtraAttributes=True) p_union.inputSets.append(protImport1.outputParticles) p_union.inputSets.append(protImport2.outputParticles) self.proj.launchProtocol(p_union, wait=True) counter = 0 for img in p_union.outputSet: self.assertAlmostEqual(attrb1[counter], img._attrb1, 4) self.assertAlmostEqual(attrb2[counter], img._attrb2, 4) self.assertFalse(hasattr(img, '_attrb3'), "join should not have attrb3") self.assertTrue(hasattr(img, '_attrb2'), "join should have attrb2") ctf = img.getCTF() self.assertIsNotNone(ctf, "Image should have CTF after join") self.assertFalse(hasattr(ctf, '_myOwnQuality'), "CTF should not have non common attributes") counter += 1
def launchTest(self, fileKey, mList, alignType=None, **kwargs): """ Helper function to launch similar alignment tests give the EMX transformation matrix. Params: fileKey: the file where to grab the input stack images. mList: the matrix list of transformations (should be the same length of the stack of images) """ print "\n" print "*" * 80 print "* Launching test: ", fileKey print "*" * 80 is2D = alignType == ALIGN_2D stackFn = self.dataset.getFile(fileKey) partFn1 = self.getOutputPath(fileKey + "_particles1.sqlite") mdFn = self.getOutputPath(fileKey + "_particles.star") partFn2 = self.getOutputPath(fileKey + "_particles2.sqlite") if self.IS_ALIGNMENT: outputFn = self.getOutputPath(fileKey + "_output.mrcs") outputFnRelion = self.getOutputPath(fileKey + "_output") goldFn = self.dataset.getFile(fileKey + '_Gold_output_relion.mrcs') else: outputFn = self.getOutputPath(fileKey + "_output.vol") goldFn = self.dataset.getFile(fileKey + '_Gold_output.vol') if PRINT_FILES: print "BINARY DATA: ", stackFn print "SET1: ", partFn1 print " MD: ", mdFn print "SET2: ", partFn2 print "OUTPUT: ", outputFn print "GOLD: ", goldFn if alignType == ALIGN_2D or alignType == ALIGN_PROJ: partSet = SetOfParticles(filename=partFn1) else: partSet = SetOfVolumes(filename=partFn1) partSet.setAlignment(alignType) partSet.setAcquisition( Acquisition(voltage=300, sphericalAberration=2, amplitudeContrast=0.1, magnification=60000)) # Populate the SetOfParticles with images # taken from images.mrc file # and setting the previous alignment parameters aList = [numpy.array(m) for m in mList] for i, a in enumerate(aList): p = Particle() p.setLocation(i + 1, stackFn) p.setTransform(Transform(a)) partSet.append(p) # Write out the .sqlite file and check that are correctly aligned print "Parset", partFn1 partSet.printAll() partSet.write() # Convert to a Xmipp metadata and also check that the images are # aligned correctly if alignType == ALIGN_2D or alignType == ALIGN_PROJ: relion.writeSetOfParticles(partSet, mdFn, "/tmp", alignType=alignType) partSet2 = SetOfParticles(filename=partFn2) else: relion.writeSetOfVolumes(partSet, mdFn, alignType=alignType) partSet2 = SetOfVolumes(filename=partFn2) # Let's create now another SetOfImages reading back the written # Xmipp metadata and check one more time. partSet2.copyInfo(partSet) if alignType == ALIGN_2D or alignType == ALIGN_PROJ: relion.readSetOfParticles(mdFn, partSet2, alignType=alignType) else: relion.readSetOfVolumes(mdFn, partSet2, alignType=alignType) partSet2.write() if PRINT_MATRIX: for i, img in enumerate(partSet2): m1 = aList[i] m2 = img.getTransform().getMatrix() print "-" * 5 print img.getFileName(), img.getIndex() print 'm1:\n', m1, relion.geometryFromMatrix(m1, False) print 'm2:\n', m2, relion.geometryFromMatrix(m2, False) # self.assertTrue(numpy.allclose(m1, m2, rtol=1e-2)) # Launch apply transformation and check result images runRelionProgram(self.CMD % locals()) if SHOW_IMAGES: runRelionProgram('scipion show %(outputFn)s' % locals()) if os.path.exists(goldFn): self.assertTrue( ImageHandler().compareData(goldFn, outputFn, tolerance=0.001), "Different data files:\n>%s\n<%s" % (goldFn, outputFn))
def launchTest(self, fileKey, mList, alignType=None, **kwargs): """ Helper function to launch similar alignment tests give the EMX transformation matrix. Params: fileKey: the file where to grab the input stack images. mList: the matrix list of transformations (should be the same length of the stack of images) """ print ("\n") print ("*" * 80) print ("* Launching test: ", fileKey) print ("*" * 80) is2D = alignType == ALIGN_2D stackFn = self.dataset.getFile(fileKey) partFn1 = self.getOutputPath(fileKey + "_particles1.sqlite") mdFn = self.getOutputPath(fileKey + "_particles.star") partFn2 = self.getOutputPath(fileKey + "_particles2.sqlite") if self.IS_ALIGNMENT: outputFn = self.getOutputPath(fileKey + "_output.mrcs") outputFnRelion = self.getOutputPath(fileKey + "_output") goldFn = self.dataset.getFile(fileKey + '_Gold_output_relion.mrcs') else: outputFn = self.getOutputPath(fileKey + "_output.vol") goldFn = self.dataset.getFile(fileKey + '_Gold_output.vol') if PRINT_FILES: print("BINARY DATA: ", stackFn) print("SET1: ", partFn1) print(" MD: ", mdFn) print("SET2: ", partFn2) print("OUTPUT: ", outputFn) print("GOLD: ", goldFn) if alignType == ALIGN_2D or alignType == ALIGN_PROJ: partSet = SetOfParticles(filename=partFn1) else: partSet = SetOfVolumes(filename=partFn1) partSet.setAlignment(alignType) partSet.setAcquisition(Acquisition(voltage=300, sphericalAberration=2, amplitudeContrast=0.1, magnification=60000)) # Populate the SetOfParticles with images # taken from images.mrc file # and setting the previous alignment parameters aList = [numpy.array(m) for m in mList] for i, a in enumerate(aList): p = Particle() p.setLocation(i + 1, stackFn) p.setTransform(Transform(a)) partSet.append(p) # Write out the .sqlite file and check that are correctly aligned print ("Parset", partFn1) partSet.printAll() partSet.write() # Convert to a Xmipp metadata and also check that the images are # aligned correctly if alignType == ALIGN_2D or alignType == ALIGN_PROJ: relion.writeSetOfParticles(partSet, mdFn,"/tmp", alignType=alignType) partSet2 = SetOfParticles(filename=partFn2) else: relion.writeSetOfVolumes(partSet, mdFn, alignType=alignType) partSet2 = SetOfVolumes(filename=partFn2) # Let's create now another SetOfImages reading back the written # Xmipp metadata and check one more time. partSet2.copyInfo(partSet) if alignType == ALIGN_2D or alignType == ALIGN_PROJ: relion.readSetOfParticles(mdFn, partSet2, alignType=alignType) else: relion.readSetOfVolumes(mdFn, partSet2, alignType=alignType) partSet2.write() if PRINT_MATRIX: for i, img in enumerate(partSet2): m1 = aList[i] m2 = img.getTransform().getMatrix() print ("-" * 5) print (img.getFileName(), img.getIndex()) print ('m1:\n', m1, relion.geometryFromMatrix(m1, False)) print ('m2:\n', m2, relion.geometryFromMatrix(m2, False)) # self.assertTrue(numpy.allclose(m1, m2, rtol=1e-2)) # Launch apply transformation and check result images runRelionProgram(self.CMD % locals()) if SHOW_IMAGES: runRelionProgram('scipion show %(outputFn)s' % locals()) if os.path.exists(goldFn): self.assertTrue( ImageHandler().compareData(goldFn, outputFn, tolerance=0.001), "Different data files:\n>%s\n<%s" % (goldFn, outputFn))
class XmippProtEliminateEmptyParticles(XmippProtEliminateEmptyBase): """ Takes a set of particles and using statistical methods (variance of variances of sub-parts of input image) eliminates those samples, where there is no object/particle (only noise is presented there). Threshold parameter can be used for fine-tuning the algorithm for type of data. """ _label = 'eliminate empty particles' def __init__(self, **args): XmippProtEliminateEmptyBase.__init__(self, **args) # --------------------------- DEFINE param functions ---------------------- def _defineParams(self, form): form.addSection(label=Message.LABEL_INPUT) # - - - F O R P A R T I C L E S - - - form.addParam('inputParticles', param.PointerParam, important=True, label="Input particles", pointerClass='SetOfParticles', help='Select the input particles to be classified.') form.addParam('threshold', param.FloatParam, default=1.1, label='Threshold used in elimination:', help='Higher threshold => more particles will be ' 'eliminated. Set to -1 for no elimination, even so ' 'the "xmipp_scoreEmptiness" value will be attached to ' 'every paricle for a posterior inspection.') self.addAdvancedParams(form) # --------------------------- INSERT steps functions ---------------------- def _checkNewInput(self): # Check if there are new particles to process from the input set partsFile = self.inputParticles.get().getFileName() self.lastCheck = getattr(self, 'lastCheck', datetime.now()) mTime = datetime.fromtimestamp(os.path.getmtime(partsFile)) # If the input movies.sqlite have not changed since our last check, # it does not make sense to check for new input data if self.lastCheck > mTime: return None self.lastCheck = datetime.now() outputStep = self._getFirstJoinStep() fDeps = self._insertNewPartsSteps() if outputStep is not None: outputStep.addPrerequisites(*fDeps) self.updateSteps() def eliminationStep(self, fnInputMd): self.inputImages = self.inputParticles.get() partsFile = self.inputImages.getFileName() self.partsSet = SetOfParticles(filename=partsFile) self.partsSet.loadAllProperties() self.streamClosed = self.partsSet.isStreamClosed() if self.check == None: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation') else: writeSetOfParticles(self.partsSet, fnInputMd, alignType=em.ALIGN_NONE, orderBy='creation', where='creation>"' + str(self.check) + '"') for p in self.partsSet.iterItems(orderBy='creation', direction='DESC'): self.check = p.getObjCreation() break self.partsSet.close() self.lenPartsSet = len(self.partsSet) print("os.path.exists(fnInputMd)): %s" % os.path.exists(fnInputMd)) args = "-i %s -o %s -e %s -t %f" % ( fnInputMd, self.fnOutputMd, self.fnElimMd, self.threshold.get()) if self.addFeatures: args += " --addFeatures" if self.useDenoising: args += " --useDenoising -d %f" % self.denoising.get() self.runJob("xmipp_image_eliminate_empty_particles", args) cleanPath(fnInputMd) def createOutputs(self): streamMode = Set.STREAM_CLOSED if getattr(self, 'finished', False) \ else Set.STREAM_OPEN def updateOutputs(mdFn, suffix): newData = os.path.exists(mdFn) lastToClose = getattr(self, 'finished', False) and \ hasattr(self, '%sParticles'%suffix) if newData or lastToClose: outSet = self._loadOutputSet(em.SetOfParticles, '%sParticles.sqlite' % suffix) if newData: partsSet = self._createSetOfParticles("AUX") readSetOfParticles(mdFn, partsSet) outSet.copyItems(partsSet, updateItemCallback=self._updateParticle, itemDataIterator=md.iterRows( mdFn, sortByLabel=md.MDL_ITEM_ID)) self.outputSize = self.outputSize + len(partsSet) self._updateOutputSet('%sParticles' % suffix, outSet, streamMode) cleanPath(mdFn) updateOutputs(self.fnOutMdTmp, 'output') updateOutputs(self.fnElimMdTmp, 'eliminated') def getInput(self): return self.inputParticles.get()
def createOutputStep(self): # Create the SetOfImages object on the database #imgSet = XmippSetOfParticles(self._getPath('images.xmd')) #Create images.xmd metadata fnImages = self._getPath('images.xmd') imgsXmd = xmipp.MetaData() posFiles = glob(self._getExtraPath('*.pos')) for posFn in posFiles: xmdFn = self._getExtraPath(replaceBaseExt(posFn, "xmd")) if exists(xmdFn): md = xmipp.MetaData(xmdFn) mdPos = xmipp.MetaData('particles@%s' % posFn) mdPos.merge(md) #imgSet.appendFromMd(mdPos) imgsXmd.unionAll(mdPos) else: self.warning("The coord file %s wasn't used to extract! Maybe you are extracting over a subset of micrographs" % basename(posFn)) imgsXmd.write(fnImages) # IF selected run xmipp_image_sort_by_statistics to add zscore info to images.xmd if self.doSort: args="-i %(fnImages)s --addToInput" if self.rejectionMethod == REJECT_MAXZSCORE: maxZscore = self.maxZscore.get() args += " --zcut " + str(maxZscore) elif self.rejectionMethod == REJECT_PERCENTAGE: percentage = self.percentage.get() args += " --percent " + str(percentage) self.runJob("xmipp_image_sort_by_statistics", args % locals()) # Create output SetOfParticles imgSet = self._createSetOfParticles() imgSet.copyInfo(self.inputMics) if self.doFlip: # Check if self.inputMics are phase flipped. if self.inputMics.isPhaseFlipped(): imgSet.setIsPhaseFlipped(False) else: imgSet.setIsPhaseFlipped(True) #imgSet.setHasCTF(self.fnCTF is not None) if self.downsampleType == OTHER: imgSet.setSamplingRate(self.inputMics.getSamplingRate()*self.downFactor.get()) imgSet.setCoordinates(self.inputCoords) # Create a temporary set to read from the metadata file # and later create the good one with the coordinates # properly set. We need this because the .update is not # working in the mapper when new attributes are added. imgSet.setHasCTF(self.ctfRelations.hasValue()) auxSet = SetOfParticles(filename=':memory:') auxSet.copyInfo(imgSet) readSetOfParticles(fnImages, auxSet) # For each particle retrieve micId from SetOFCoordinates and set it on the CTFModel for img in auxSet: #FIXME: This can be slow to make a query to grab the coord, maybe use zip(imgSet, coordSet)??? coord = self.inputCoords[img.getObjId()] ctfModel = img.getCTF() if ctfModel is not None: ctfModel.setObjId(coord.getMicId()) ##img.setCTF(ctfModel)####JM img.setMicId(coord.getMicId()) img.setCoordinate(coord) imgSet.append(img) self._storeMethodsInfo(fnImages) self._defineOutputs(outputParticles=imgSet) self._defineSourceRelation(self.inputCoords, imgSet)
def _createSubSetFromParticlesTiltPair(self, particlesTiltPair): """ Create a subset of Micrographs Tilt Pair. """ output = ParticlesTiltPair( filename=self._getPath('particles_pairs.sqlite')) inputU = particlesTiltPair.getUntilted() inputT = particlesTiltPair.getTilted() outputU = SetOfParticles( filename=self._getPath('particles_untilted.sqlite')) outputT = SetOfParticles( filename=self._getPath('particles_tilted.sqlite')) outputU.copyInfo(inputU) outputT.copyInfo(inputT) modifiedSet = ParticlesTiltPair(filename=self._dbName, prefix=self._dbPrefix) for pair, u, t in izip(modifiedSet, inputU, inputT): if pair.isEnabled(): output.append(pair) outputU.append(u) outputT.append(t) # Register outputs output.setUntilted(outputU) output.setTilted(outputT) outputDict = {'outputParticlesTiltPair': output} self._defineOutputs(**outputDict) self._defineTransformRelation(particlesTiltPair, output) return output