def checkSplit(self): outSet = self._getExtraPath('last_classes.xmd') self.listNameImgs = [] self.listNumImgs = [] self.listRefImgs = [] metadataItem = md.MetaData(outSet) for item in metadataItem: nameImg = metadataItem.getValue(md.MDL_IMAGE, item) self.listNameImgs.append(nameImg) numImgs = metadataItem.getValue(md.MDL_CLASS_COUNT, item) self.listNumImgs.append(numImgs) refImg = metadataItem.getValue(md.MDL_REF, item) self.listRefImgs.append(refImg) i = 0 auxList = sorted(self.listNumImgs, reverse=True) while i < len(self.listNumImgs): if len(self.listNumImgs) < self.numClasses.get( ): #inside the while # just in case we lose some class we want to allow one more # split if auxList[i] < (1.75 * self.threshold.get()): i += 1 continue maxValue = auxList[i] maxPos = self.listNumImgs.index(maxValue) self.listNumImgs[maxPos] = -1 bestRef = self.listRefImgs[maxPos] outputMd = self._getExtraPath('dataClass%06d.xmd' % bestRef) self.splitStep(outputMd) i = 0 outSet = self._getExtraPath('split_last_classes.xmd') self.listNameImgs = [] self.listNumImgs = [] self.listRefImgs = [] metadataItem = md.MetaData(outSet) for item in metadataItem: nameImg = metadataItem.getValue(md.MDL_IMAGE, item) self.listNameImgs.append(nameImg) numImgs = metadataItem.getValue(md.MDL_CLASS_COUNT, item) self.listNumImgs.append(numImgs) refImg = metadataItem.getValue(md.MDL_REF, item) self.listRefImgs.append(refImg) copy(outSet, self._getExtraPath('last_classes.xmd')) auxList = sorted(self.listNumImgs, reverse=True) i = 0 else: break
def createOutputStep(self): inputMovies = self.inputMovies.get() particleSet = self._createSetOfMovieParticles() particleSet.copyInfo(inputMovies) # Create a folder to store the resulting micrographs # particleFolder = self._getPath('particles') # makePath(particleFolder) mData = md.MetaData() mdAll = md.MetaData() self._micNameDict = {} for movie in inputMovies: self._micNameDict[movie.getObjId()] = movie.getMicName() movieName = self._getMovieName(movie) movieStk = movieName.replace('.mrc', '.stk') movieMdFile = movieName.replace('.mrc', '.xmd') # Store particle stack and metadata files in final particles folder if os.path.exists(movieStk): mData.read(movieMdFile) mdAll.unionAll(mData) particleMd = self._getPath('movie_particles.xmd') mdAll.addItemId() mdAll.write(particleMd) readSetOfMovieParticles(particleMd, particleSet, removeDisabled=False, postprocessImageRow=self._postprocessImageRow) self._defineOutputs(outputParticles=particleSet) self._defineSourceRelation(self.inputMovies, particleSet)
def testHandlingMd(self): starFn = '/home/josuegbl/raw_final_data.star' fn = '/home/josuegbl/random_data.star' # labels = np.array([0, 0, 0, 0, 0, 0, 0, 1, 0]) mdData = md.MetaData(starFn) mdAux = md.MetaData() mdFinal = md.MetaData() mdAux.randomize(mdData) mdFinal.selectPart(mdAux, 1, 100) # listMd = [] # # for row in md.iterRows(mdData, sortByLabel=md.RLN_PARTICLE_CLASS): # clsPart = row.getValue(md.RLN_PARTICLE_CLASS) # newClass = labels[clsPart-1] + 1 # row.setValue(md.RLN_PARTICLE_CLASS, newClass) # # listMd.append((row, newClass)) # res = defaultdict(list) # for v, k in listMd: res[k].append(v) # # for key, listMd in res.iteritems(): # mdInput = md.MetaData() # for rowMd in listMd: # objId = mdInput.addObject() # rowMd.writeToMd(mdInput, objId) mdFinal.write(fn)
def alignParticlesStep(self): fhInputTranMat = self._getExtraPath('transformation-matrix.txt') outParticlesFn = self._getExtraPath('outputParticles.xmd') transMatFromFile = np.loadtxt(fhInputTranMat) transformationMat = np.reshape(transMatFromFile, (4, 4)) transform = Transform() transform.setMatrix(transformationMat) resultMat = Transform() outputParts = md.MetaData() mdToAlign = md.MetaData(self.imgsInputFn) for row in md.iterRows(mdToAlign): inMat = rowToAlignment(row, ALIGN_PROJ) partTransformMat = inMat.getMatrix() partTransformMatrix = np.matrix(partTransformMat) newTransformMatrix = np.matmul(transformationMat, partTransformMatrix) resultMat.setMatrix(newTransformMatrix) rowOut = md.Row() rowOut.copyFromRow(row) alignmentToRow(resultMat, rowOut, ALIGN_PROJ) rowOut.addToMd(outputParts) outputParts.write(outParticlesFn) cleanPath(self.imgsInputFn)
def mergeVolumesStep(self, numOfRuns): mdOut = md.MetaData() volFnList = [] clsDistList = [] accList = [] for run in range(1, numOfRuns + 1): it = self._lastIter(run) modelFile = self._getFileName('model', ruNum=run, iter=it) mdIn = md.MetaData('model_classes@%s' % modelFile) for row in md.iterRows(mdIn, md.RLN_MLMODEL_REF_IMAGE): volFn = row.getValue(md.RLN_MLMODEL_REF_IMAGE) clsDist = row.getValue('rlnClassDistribution') accRot = row.getValue('rlnAccuracyRotations') if accRot <= 90: volFnList.append(volFn) clsDistList.append(clsDist) accList.append(accRot) self.std = np.std(accList) score = self._estimateScore(accList, clsDistList) tupleList = [(fn, s) for fn, s in zip(volFnList, score)] nVols = self.numOfVols.get() sortList = sorted(tupleList, reverse=True, key=lambda x: x[1])[0:nVols] row = md.Row() for val in sortList: fn, score = val row.setValue(md.RLN_MLMODEL_REF_IMAGE, fn) row.addToMd(mdOut) mdOut.write(self._getRefStar())
def _fillClasses(self, outputClasses): """ Create the SetOfClasses2D """ inputSet = self.inputClasses.get().getImages() myRep = md.MetaData('classes@' + self._getExtraPath( 'final_classes.xmd')) for row in md.iterRows(myRep): fn = row.getValue(md.MDL_IMAGE) rep = Particle() rep.setLocation(xmippToLocation(fn)) repId = row.getObjId() newClass = Class2D(objId=repId) newClass.setAlignment2D() newClass.copyInfo(inputSet) newClass.setAcquisition(inputSet.getAcquisition()) newClass.setRepresentative(rep) outputClasses.append(newClass) i=1 mdBlocks = md.getBlocksInMetaDataFile(self._getExtraPath( 'final_classes.xmd')) for block in mdBlocks: if block.startswith('class00'): mdClass = md.MetaData(block + "@" + self._getExtraPath( 'final_classes.xmd')) imgClassId = i newClass = outputClasses[imgClassId] newClass.enableAppend() for row in md.iterRows(mdClass): part = rowToParticle(row) newClass.append(part) i+=1 newClass.setAlignment2D() outputClasses.update(newClass)
def createOutputStep(self): outputVols = self._createSetOfVolumes() for i, vol in enumerate(self._iterInputVols()): volDir = self._getVolDir(i + 1) volume = vol.clone() volPrefix = 'vol%03d_' % (i + 1) m_pruned = md.MetaData() m_pruned.read(volDir + '/pruned_particles_alignability.xmd') prunedMd = self._getExtraPath( volPrefix + 'pruned_particles_alignability.xmd') moveFile(join(volDir, 'pruned_particles_alignability.xmd'), prunedMd) m_volScore = md.MetaData() m_volScore.read(volDir + '/validationAlignability.xmd') validationMd = self._getExtraPath( volPrefix + 'validation_alignability.xmd') moveFile(join(volDir, 'validationAlignability.xmd'), validationMd) imgSet = self.inputParticles.get() outImgSet = self._createSetOfParticles(volPrefix) outImgSet.copyInfo(imgSet) outImgSet.copyItems(imgSet, updateItemCallback=self._setWeight, itemDataIterator=md.iterRows(prunedMd, sortByLabel=md.MDL_ITEM_ID)) mdValidatoin = md.getFirstRow(validationMd) weight = mdValidatoin.getValue(md.MDL_WEIGHT_PRECISION_ALIGNABILITY) volume.weightAlignabilityPrecision = Float(weight) weight = mdValidatoin.getValue(md.MDL_WEIGHT_ACCURACY_ALIGNABILITY) volume.weightAlignabilityAccuracy = Float(weight) weight = mdValidatoin.getValue(md.MDL_WEIGHT_PRECISION_MIRROR) volume.weightMirror = Float(weight) volume.cleanObjId() # clean objects id to assign new ones inside the set outputVols.append(volume) self._defineOutputs(outputParticles=outImgSet) self.createPlot2D(volPrefix, m_pruned) outputVols.setSamplingRate(volume.getSamplingRate()) self._defineOutputs(outputVolumes=outputVols) cleanPattern(self._getPath("reference_particles.*")) cleanPattern(self._getExtraPath("scaled_particles.*")) cleanPattern(self._getExtraPath("reference_particles.*")) cleanPattern(self._getExtraPath("corrected_ctf_particles.*")) cleanPattern(self._getFileName("volume")) cleanPattern(self._getExtraPath("params.txt"))
def _evalStop(self): noOfLevRuns = self._getLevRuns(self._level) if self.IS_3D and self.useReslog: slope, y0, err = self._getReslogVars() for rLev in noOfLevRuns: iters = self._lastIter(rLev) modelFn = self._getFileName('model', iter=iters, lev=self._level, rLev=rLev) modelMd = md.MetaData('model_classes@' + modelFn) partMdFn = "particles@" + self._getFileName('input_star', lev=self._level, rLev=rLev) partSize = md.getSize(partMdFn) clsId = 1 for row in md.iterRows(modelMd): fn = row.getValue(md.RLN_MLMODEL_REF_IMAGE) clasDist = row.getValue('rlnClassDistribution') classSize = int(clasDist * partSize) if self._getClasDistCond(clasDist): mapId = self._mapsDict[fn] ptcStop = self.minPartsToStop.get() if classSize < int(0.95*partSize): if self.IS_3D and self.useReslog: suffixSsnr = 'model_class_%d@' % clsId ssnrMd = md.MetaData(suffixSsnr + modelFn) f = self._getFunc(ssnrMd) ExpcVal = slope*np.math.log10(classSize) + y0 val = f(1) + (2*err) clsId += 1 print("StopValues: \n" "Val SSnr=1: %0.4f, parts %d, ExpcVal " "%0.4f" % (val, classSize, ExpcVal)) evalSlope = val < ExpcVal and self._level > 2 else: evalSlope = False else: evalSlope = True print("Values to stop the classification: ") print("Lev: ", self._level, "rLev: ", rLev) print("partSize: ", partSize) print("class size: ", classSize) print("min parts to stop: ", ptcStop) print("evalSlope: ", evalSlope) if classSize < ptcStop or evalSlope: self.stopDict[mapId] = True if not bool(self._clsIdDict): self._clsIdDict[mapId] = 1 else: classId = sorted(self._clsIdDict.values())[-1] + 1 self._clsIdDict[mapId] = classId else: self.stopDict[mapId] = False
def generateMdForClassification(self, classesOut): listNameImgs = self.listNameImgs listNumImgs = self.listNumImgs count = 1 # Construct the new classes with the renumerated old classes mdNewClasses = md.MetaData() for i in range(len(listNumImgs)): if listNumImgs[i] != -1: name = listNameImgs[i] fn = name[name.find('@') + 1:-4] + '.xmd' numRef = int(name[0:6]) mdClass = md.MetaData("classes@" + fn) for row in md.iterRows(mdClass): if mdClass.getValue(md.MDL_REF, row.getObjId()) == numRef: row.setValue(md.MDL_REF, count) row.addToMd(mdNewClasses) count += 1 # Add the two new classes to the list of renumerated classes mdClass = md.MetaData("classes@" + classesOut) rows = md.iterRows(mdClass) for row in rows: row.setValue(md.MDL_REF, count) row.addToMd(mdNewClasses) count += 1 mdNewClasses.write( 'classes@' + self._getExtraPath('split_last_classes.xmd'), MD_APPEND) # Generate the intermediate images and the blocks of the intermediate # classes for the unchanged classes count = 1 for i in range(len(listNumImgs)): if listNumImgs[i] != -1: # Read the list of images in this class mdImgsInClass = md.MetaData( self._getExtraPath('dataClass%06d.xmd' % (i + 1))) mdImgsInClass.fillConstant(md.MDL_REF, count) mdImgsInClass.write( self._getExtraPath('dataClass%06d.xmd' % count)) count += 1 # Add the two new classes for newRef in range(0, 2): mdImgsInClass = md.MetaData('class%06d_images@%s' % (newRef + 1, classesOut)) mdImgsInClass.fillConstant(md.MDL_REF, count) mdImgsInClass.write(self._getExtraPath('dataClass%06d.xmd' % count)) count += 1
def _getMetrics(self): """ Internal method to compute some metrics. """ # mean values of FSC-Q mtd = md.MetaData() mtd.read(self._getFileName(MD_MEANS)) mean = mtd.getValue(MDL_VOLUME_SCORE1, 1) meanA = mtd.getValue(MDL_VOLUME_SCORE2, 1) # means value for map divided by resolution (FSC-Qr) mtd2 = md.MetaData() mtd2.read(self._getFileName(MD2_MEANS)) mean2 = mtd2.getValue(MDL_VOLUME_SCORE1, 1) meanA2 = mtd2.getValue(MDL_VOLUME_SCORE2, 1) # statistic from fnal pdb with fsc-q # Number of atoms greater or less than 0.5 total_atom = 0 fscq_greater = 0 fscq_less = 0 with open(self._getFileName(PDB_VALUE_FILE)) as f: lines_data = f.readlines() for j, lin in enumerate(lines_data): if (lin.startswith('ATOM') or lin.startswith('HETATM')): total_atom = total_atom + 1 fscq_atom = float(lin[54:60]) if (fscq_atom > 0.5): fscq_greater = fscq_greater + 1 if (fscq_atom < -0.5): fscq_less = fscq_less + 1 porc_greater = (fscq_greater * 100) / total_atom porc_less = (fscq_less * 100) / total_atom return { 'mean': Float(mean), 'meanA': Float(meanA), 'mean2': Float(mean2), 'meanA2': Float(meanA2), 'total_atom': Integer(total_atom), 'fscq_greater': Integer(fscq_greater), 'fscq_less': Integer(fscq_less), 'porc_greater': Float(porc_greater), 'porc_less': Float(porc_less) }
def createInputMd(self, vols): fnVols = self._getExtraPath('input_volumes.xmd') if self.copyAlignment: alignType = vols.getAlignment() else: alignType = ALIGN_NONE writeSetOfVolumes(vols, fnVols, postprocessImageRow=self._postprocessVolumeRow, alignType=alignType) if not vols.hasAlignment() or not self.copyAlignment: mdFn = md.MetaData(fnVols) mdFn.fillConstant(md.MDL_ANGLE_ROT, 0.) mdFn.fillConstant(md.MDL_ANGLE_TILT, 0.) mdFn.fillConstant(md.MDL_ANGLE_PSI, 0.) mdFn.fillConstant(md.MDL_SHIFT_X, 0.) mdFn.fillConstant(md.MDL_SHIFT_Y, 0.) mdFn.fillConstant(md.MDL_SHIFT_Z, 0.) mdFn.write(fnVols, md.MD_OVERWRITE) # set missing angles missType = ['wedge_y', 'wedge_x', 'pyramid', 'cone'] missNum = self.missingDataType.get() missAng = self.missingAng.get() missDataFn = self._getExtraPath('wedges.xmd') missAngValues = str(missAng).strip().split() thetaY0, thetaYF, thetaX0, thetaXF = 0, 0, 0, 0 mdFn = md.MetaData() if missNum == MISSING_WEDGE_X: thetaX0, thetaXF = missAngValues elif missNum == MISSING_WEDGE_Y: thetaY0, thetaYF = missAngValues elif missNum == MISSING_PYRAMID: thetaY0, thetaYF, thetaX0, thetaXF = missAngValues else: # MISSING_CONE thetaY0 = missAngValues[0] for i in range(1, vols.getSize() + 1): row = md.Row() row.setValue(md.MDL_MISSINGREGION_NR, missNum + 1) row.setValue(md.MDL_MISSINGREGION_TYPE, missType[missNum]) row.setValue(md.MDL_MISSINGREGION_THX0, float(thetaX0)) row.setValue(md.MDL_MISSINGREGION_THXF, float(thetaXF)) row.setValue(md.MDL_MISSINGREGION_THY0, float(thetaY0)) row.setValue(md.MDL_MISSINGREGION_THYF, float(thetaYF)) row.addToMd(mdFn) mdFn.write(missDataFn, md.MD_APPEND)
def checkOutput(self, level): listAuxString = [] listAuxNum = [] listAuxRefs = [] outSet = self._getExtraPath(join('level%03d' % level, 'intermediate_classes.xmd')) metadataItem = md.MetaData(outSet) for item in metadataItem: nameImg = metadataItem.getValue(md.MDL_IMAGE, item) listAuxString.append(nameImg) numImgs = metadataItem.getValue(md.MDL_CLASS_COUNT, item) listAuxNum.append(numImgs) refImg = metadataItem.getValue(md.MDL_REF, item) listAuxRefs.append(refImg) maxValue = max(listAuxNum) maxPos = listAuxNum.index(maxValue) listAuxNum[maxPos] = -1 bestRef = listAuxRefs[maxPos] self._params = {'input': 'class%06d_images' % (bestRef) + '@' + outSet, 'outputMd': self._getExtraPath(join('level%03d' % level,'images_level%03d' % level + '_major.xmd')) } args = ('-i %(input)s -o %(outputMd)s') self.runJob("xmipp_metadata_utilities", args % self._params, numberOfMpi=1) return listAuxNum, listAuxString
def checkContinueClassification(self, level, iter): diff=0 i=0 metadata = md.MetaData(self._getExtraPath(join('level%03d' % level, 'general_images_level%03d' % level + '.xmd'))) for item in metadata: refImg = metadata.getValue(md.MDL_REF, item) nameImg = metadata.getValue(md.MDL_IMAGE, item) if iter==0: self.listContinueClass.append(nameImg) self.listContinueClass.append(refImg) else: if nameImg in self.listContinueClass: idx = self.listContinueClass.index(nameImg) + 1 if refImg!=self.listContinueClass[idx]: diff+=1 self.listContinueClass[idx]=refImg else: diff += 1 self.listContinueClass.append(nameImg) self.listContinueClass.append(refImg) i+=1 num=(diff*100/i) if num<self.percentStopClassify and iter>0: return True else: return False
def readSetOfCoordinates(outputDir, micSet, coordSet): """ Read from Bsoft .star files. Params: outputDir: the directory where the .star files are. micSet: the SetOfMicrographs to associate the .star, which name should be the same of the micrographs. coordSet: the SetOfCoordinates that will be populated. """ boxSize = 100 for mic in micSet: outputFile = join(outputDir, replaceBaseExt(mic.getFileName(), 'star')) if exists(outputFile): posMd = md.MetaData(outputFile) for objId in posMd: coordRow = rowFromMd(posMd, objId) coord = rowToCoordinate(coordRow) boxSize = 2 * float(coordRow.getValue("particle.x_origin", 50)) coord.setMicrograph(mic) coord.setX(coord.getX()) coord.setY(coord.getY()) coordSet.append(coord) # Add an unique ID that will be propagated to particles # posMd.setValue(md.MDL_PARTICLE_ID, long(coord.getObjId()), objId) # reading origin.x value and converting to particle # size, can change, we take last value coordSet.setBoxSize(boxSize)
def _newMd(self, n=5): md0 = md.MetaData() xcoor = range(n) ycoor = [x*x for x in xcoor] for i in range(n): self._addRow(md0, '*****@*****.**' % i, xcoor[i], ycoor[i]) return md0
def plotAngularDistributionFromMd(self, mdFile, title, **kwargs): """ Read the values of rot, tilt and weights from the metadata and plot the angular distribution. ANGLES are in DEGREES In the metadata: rot: MDL_ANGLE_ROT tilt: MDL_ANGLE_TILT weight: MDL_WEIGHT """ angMd = md.MetaData(mdFile) rot = [] tilt = [] if 'histogram' in kwargs: for row in md.iterRows(angMd): rot.append(row.getValue(md.MDL_ANGLE_ROT)) tilt.append(row.getValue(md.MDL_ANGLE_TILT)) return self.plotAngularDistributionHistogram(title, rot, tilt) else: weight = [] for row in md.iterRows(angMd): rot.append(radians(row.getValue(md.MDL_ANGLE_ROT))) tilt.append(row.getValue(md.MDL_ANGLE_TILT)) weight.append(row.getValue(md.MDL_WEIGHT)) return self.plotAngularDistribution(title, rot, tilt, weight, **kwargs)
def readSetOfNewParticles(self, filename, **kwargs): """read from WARP goodparticles star file filename: The goodparticles star file rowToParticle: this function will be used to convert the row to Object """ img = None try: imgMd = md.MetaData(filename) except: print( "Cant't read star file, maybe drive is busy. Skipping this iteration" ) return (set()) newFiles = set() oldFiles = set(self._imgDict.keys()) for imgRow in md.iterRows(imgMd): imgName = imgRow['rlnImageName'] #img = self._imgDict.get(imgName, None) if imgName not in oldFiles: img = rowToParticle(imgRow, **kwargs) if not self.preprocess_success: continue self._imgDict[imgName] = img newFiles.add(imgName) self.partSet.append(img) if not img is None: self.partSet.setHasCTF(img.hasCTF()) return (newFiles)
def createAngDistributionSqlite(self, sqliteFn, numberOfParticles, itemDataIterator): if not os.path.exists(sqliteFn): # List of list of 3 elements containing angleTilt, anglePsi, weight projectionList = [] def getCloseProjection(angleRot, angleTilt): """ Get an existing projection close to angleRot, angleTilt. Return None if not found close enough. """ for projection in projectionList: if (abs(projection[0] - angleRot) <= 0.5 and abs(projection[1] - angleTilt) <= 0.5): return projection return None weight = 1. / numberOfParticles for angleRot, angleTilt in itemDataIterator: projection = getCloseProjection(angleRot, angleTilt) if projection is None: projectionList.append([angleRot, angleTilt, weight]) else: projection[2] = projection[2] + weight mdProj = md.MetaData() for projection in projectionList: mdRow = md.Row() mdRow.setValue(md.MDL_ANGLE_ROT, projection[0]) mdRow.setValue(md.MDL_ANGLE_TILT, projection[1]) mdRow.setValue(md.MDL_WEIGHT, projection[2]) mdRow.writeToMd(mdProj, mdProj.addObject()) mdProj.write(sqliteFn)
def _summary(self): summary = [ "Input particles: %s" % self.inputParticles.get().getNameId()] summary.append("-----------------") if self.inputVolumes.get(): for i, vol in enumerate(self._iterInputVols()): summary.append("Input volume(s)_%d: [%s]" % (i + 1, vol)) summary.append("-----------------") if (not hasattr(self, 'outputVolumes')): summary.append("Output volumes not ready yet.") else: for i, vol in enumerate(self._iterInputVols()): VolPrefix = 'vol%03d_' % (i + 1) mdVal = md.MetaData(self._getExtraPath( VolPrefix + 'validation_alignability.xmd')) weightAccuracy = mdVal.getValue( md.MDL_WEIGHT_ACCURACY_ALIGNABILITY, mdVal.firstObject()) weightPrecision = mdVal.getValue( md.MDL_WEIGHT_PRECISION_ALIGNABILITY, mdVal.firstObject()) weightAlignability = mdVal.getValue(md.MDL_WEIGHT_ALIGNABILITY, mdVal.firstObject()) summary.append("ALIGNABILITY ACCURACY parameter_%d : %f" % ( i + 1, weightAccuracy)) summary.append("ALIGNABILITY PRECISION parameter_%d : %f" % ( i + 1, weightPrecision)) summary.append( "ALIGNABILITY ACCURACY & PRECISION parameter_%d : %f" % ( i + 1, weightAlignability)) summary.append("-----------------") return summary
def performNmaStep(self, atomsFn, modesFn): sampling = self.inputParticles.get().getSamplingRate() discreteAngularSampling = self.discreteAngularSampling.get() trustRegionScale = self.trustRegionScale.get() odir = self._getTmpPath() imgFn = self.imgsFn args = "-i %(imgFn)s --pdb %(atomsFn)s --modes %(modesFn)s --sampling_rate %(sampling)f " args += "--discrAngStep %(discreteAngularSampling)f --odir %(odir)s --centerPDB " args += "--trustradius_scale %(trustRegionScale)d --resume " if self.getInputPdb().getPseudoAtoms(): args += "--fixed_Gaussian " if self.alignmentMethod == NMA_ALIGNMENT_PROJ: args += "--projMatch " self.runJob("xmipp_nma_alignment", args % locals()) cleanPath(self._getPath('nmaTodo.xmd')) inputSet = self.inputParticles.get() mdImgs = md.MetaData(self.imgsFn) for objId in mdImgs: imgPath = mdImgs.getValue(md.MDL_IMAGE, objId) index, fn = xmippToLocation(imgPath) # Conside the index is the id in the input set particle = inputSet[index] mdImgs.setValue(md.MDL_IMAGE, getImageLocation(particle), objId) mdImgs.setValue(md.MDL_ITEM_ID, particle.getObjId(), objId) mdImgs.write(self.imgsFn)
def readMetaDataOutput(self): mData = md.MetaData(self._getFileName(METADATA_MASK_FILE)) NvoxelsOriginalMask = float(mData.getValue(md.MDL_COUNT, mData.firstObject())) NvoxelsOutputMask = float(mData.getValue(md.MDL_COUNT2, mData.firstObject())) nvox = int(round( ((NvoxelsOriginalMask - NvoxelsOutputMask) / NvoxelsOriginalMask) * 100)) return nvox
def createOutputStep(self): from sklearn.manifold import TSNE Xdim = self.inputParticles.get().getXDim() self.Ts = self.inputParticles.get().getSamplingRate() newTs = self.targetResolution.get() * 1.0 / 3.0 self.newTs = max(self.Ts, newTs) self.newXdim = int(Xdim * self.Ts / newTs) fnOut = self._getFileName('fnOut') mdOut = md.MetaData(fnOut) coeffMatrix = np.vstack(mdOut.getColumnValues(md.MDL_SPH_COEFFICIENTS)) X_tsne_1d = TSNE(n_components=1).fit_transform(coeffMatrix) X_tsne_2d = TSNE(n_components=2).fit_transform(coeffMatrix) newMdOut = md.MetaData() i = 0 for row in md.iterRows(mdOut): newRow = row newRow.setValue(md.MDL_SPH_TSNE_COEFF1D, float(X_tsne_1d[i, 0])) newRow.setValue(md.MDL_SPH_TSNE_COEFF2D, [float(X_tsne_2d[i, 0]), float(X_tsne_2d[i, 1])]) if self.newTs != self.Ts: coeffs = mdOut.getValue(md.MDL_SPH_COEFFICIENTS, row.getObjId()) correctionFactor = self.inputVolume.get().getDim( )[0] / self.newXdim coeffs = [correctionFactor * coeff for coeff in coeffs] newRow.setValue(md.MDL_SPH_COEFFICIENTS, coeffs) newRow.addToMd(newMdOut) i += 1 newMdOut.write(fnOut) inputSet = self.inputParticles.get() partSet = self._createSetOfParticles() partSet.copyInfo(inputSet) partSet.setAlignmentProj() partSet.copyItems(inputSet, updateItemCallback=self._updateParticle, itemDataIterator=md.iterRows( fnOut, sortByLabel=md.MDL_ITEM_ID)) partSet.L1 = Integer(self.l1.get()) partSet.L2 = Integer(self.l2.get()) partSet.Rmax = Integer(self.inputVolume.get().getDim()[0] / 2) self._defineOutputs(outputParticles=partSet) self._defineTransformRelation(self.inputParticles, partSet)
def _getMovieShifts(self, movie): from ..convert import readShiftsMovieAlignment """ Returns the x and y shifts for the alignment of this movie. The shifts should refer to the original micrograph without any binning. In case of a bining greater than 1, the shifts should be scaled. """ shiftsMd = md.MetaData("frameShifts@" + self._getShiftsFile(movie)) return readShiftsMovieAlignment(shiftsMd)
def _loadClassesInfo(self, filename, blockId): """ Read some information about the produced 2D classes from the metadata file. """ self._classesInfo = {} # store classes info, indexed by class id mdClasses = md.MetaData(filename) for classNumber, row in enumerate(md.iterRows(mdClasses)): index, fn = xmippToLocation(row.getValue(md.MDL_IMAGE)) self._classesInfo[blockId] = (index, fn, row.clone())
def _fillAverages(self, avgSet): """ Create the SetOfAverages from a given metadata """ myFileClasses = "classes@" + self._getExtraPath('last_classes.xmd') repSet = md.MetaData(myFileClasses) for rep in md.iterRows(repSet): particle = rowToParticle(rep) repId = rep.getValue(md.MDL_REF) #rep.getObjId() particle.setObjId(repId) avgSet.append(particle)
def fastCheckingAtt (self, level, flag_split): listAuxNum = [] if flag_split: metadata = md.MetaData(self._getExtraPath(join('level%03d' % level, 'level%03d' % level + '_classes.xmd'))) else: metadata = md.MetaData(self._getExtraPath(join('level%03d' % level, 'general_level%03d' % level + '_classes.xmd'))) for item in metadata: numImgs = metadata.getValue(md.MDL_CLASS_COUNT, item) listAuxNum.append(numImgs) total = sum(listAuxNum) th = (self.p * total / len(listAuxNum)) aux = [i for i in listAuxNum if i<th] if len(aux)>0: return True else: return False
def _plotFSC(self, a, fscFn, label): mdStar = md.MetaData(fscFn) resolution_inv = [ mdStar.getValue(md.MDL_RESOLUTION_FREQ, id) for id in mdStar ] frc = [mdStar.getValue(md.MDL_RESOLUTION_FRC, id) for id in mdStar] fsc = FSC(objLabel=label) fsc.setData(resolution_inv, frc) return fsc
def generateOutputClasses(self, classesOut, firstTime): if firstTime: self._saveFileDataClasses(classesOut, self._getExtraPath('last_classes.xmd')) # Add the two new classes for i in range(1, 3): mdImgsInClass = md.MetaData('class%06d_images@%s' % (i, classesOut)) mdImgsInClass.write(self._getExtraPath('dataClass%06d.xmd' % i)) return finalMetadata = self._getExtraPath('aux_classes.stk') lastMetadata = self._getExtraPath('last_classes.xmd') newMetadata = classesOut total = self.averageClasses(finalMetadata, lastMetadata, newMetadata, False) copy(self._getExtraPath('aux_classes.stk'), self._getExtraPath('last_classes.stk')) mdAll = md.MetaData() newRef = 1 for i in total: if i != 0: row = md.Row() row.setValue(md.MDL_REF, newRef) row.setValue(md.MDL_IMAGE, '%06d@' % newRef + finalMetadata) row.setValue(md.MDL_CLASS_COUNT, i) row.addToMd(mdAll) newRef += 1 mdAll.write('classes@' + finalMetadata[:-3] + 'xmd', MD_APPEND) copy(self._getExtraPath('aux_classes.xmd'), self._getExtraPath('last_classes.xmd')) newRef = 1 for i, val in enumerate(total): if val != 0: self._unionDataClass(classesOut, i + 1, newRef) newRef += 1
def sharpeningAndMonoResStep(self): last_Niters = -1 last_lambda_sharpening = 1e38 nextIter = True while nextIter is True: self.iteration = self.iteration + 1 # print("iteration") print('\n====================\n' 'Iteration %s' % (self.iteration)) self.sharpenStep(self.iteration) mtd = md.MetaData() mtd.read(self._getFileName('METADATA_PARAMS_SHARPENING')) lambda_sharpening = mtd.getValue(MDL_COST, 1) Niters = mtd.getValue(MDL_ITER, 1) # if (Niters == last_Niters): # nextIter = False # break if (abs(lambda_sharpening - last_lambda_sharpening) <= 0.2): nextIter = False break last_Niters = Niters last_lambda_sharpening = lambda_sharpening self.MonoResStep(self.iteration) imageFile = self._getFileName('OUTPUT_RESOLUTION_FILE') img = ImageHandler().read(imageFile) imgData = img.getData() max_res = np.amax(imgData) min_res = 2 * self.inputVolume.get().getSamplingRate() if (max_res - min_res < 0.75): nextIter = False break os.rename( self._getExtraPath('sharpenedMap_' + str(self.iteration) + '.mrc'), self._getExtraPath('sharpenedMap_last.mrc')) resFile = self.resolutionVolume.get().getFileName() pathres = dirname(resFile) if not exists(self._getFileName('OUTPUT_RESOLUTION_FILE')): print( '\n====================\n' ' WARNING---This is not the ideal case because resolution map has been imported.' ' The ideal case is to calculate it previously' ' in the same project using MonoRes.' '\n====================\n')
def importCoordinates(self, fileName, addCoordinate): print("In importCoordinates Appion with filename=%s" % fileName) if exists(fileName): mdata = md.MetaData() mdata.readPlain(fileName, 'xcoor ycoor') for objId in mdata: x = mdata.getValue(md.MDL_XCOOR, objId) y = mdata.getValue(md.MDL_YCOOR, objId) coord = Coordinate() coord.setPosition(x, y) addCoordinate(coord)