def validate(self): errors = [] N1=getMdSize(self.Operand1) Operation=self.Operation checkDimensions=False if Operation=='column' or Operation=='slice' or Operation=='row': if not self.Operand2.isdigit(): errors.append('You should give a number for the column, slice or row') elif Operation=='dot product': if self.Operand2.isdigit(): errors.append('Second operand cannot be a number') else: checkDimensions=True elif Operation=='plus' or Operation=='minus' or Operation=='multiply' or Operation=='divide' or Operation=='minimum' or \ Operation=='maximum': if not self.Operand2.isdigit(): checkDimensions=True if checkDimensions: md1=MetaData(self.Operand1) md2=MetaData(self.Operand2) x1, y1, z1, _, _ = MetaDataInfo(md1) x2, y2, z2, _, _ = MetaDataInfo(md2) if x1!=x2 or y1!=y2 or z1!=z2: errors.append("Image/Volume sizes in the two operands are not the same") if md2.size()>1: if md2.size()!=md1.size(): errors.append("The number of images/volumes in the two operands are not the same") return errors
def summary(self): message=ProtInitVolumeBase.summary(self) message.append("RANSAC iterations: %d"%self.NRansac) for n in range(self.NumVolumes): fnBase='volumeProposed%05d'%n fnRoot=self.workingDirPath(fnBase+".xmd") if os.path.isfile(fnRoot): md=MetaData(fnRoot) if (md.size()< 5) : message.append("Num of inliers for %s too small and equal to %d"%(fnRoot,md.size())) message.append("Decrease the value of Inlier Threshold parameter and run again") fnBase="ransac00000.xmd" fnRoot=self.workingDirPath("tmp/"+fnBase) if os.path.isfile(fnRoot): md=MetaData(fnRoot) if (md.size()< 5) : message.append("Num of random samples too small and equal to %d"%(md.size())) message.append("If the option Dimensionality reduction is on, increase the number of grids per dimension") message.append("If the option Dimensionality reduction is off, increase the number of random samples") if self.UseSA: message.append("Simulated annealing used") return message
def wizardBrowseCTF2(gui, var): error = None vList = ['LowResolCutoff', 'HighResolCutoff'] freqs = gui.getVarlistValue(vList) importRunName = gui.getVarValue('ImportRun') prot = gui.project.getProtocolFromRunName(importRunName) path = prot.WorkingDir if path and exists(path): mdPath = prot.getFilename('micrographs') if exists(mdPath): from xmipp import MetaData, MDL_MICROGRAPH md = MetaData(mdPath) if md.size(): image = md.getValue(MDL_MICROGRAPH, md.firstObject()) if image: filterExt = "*" + splitext(image)[1] value = gui.getVarValue('DownsampleFactor') results = wizardHelperSetDownsampling(gui, var, path, filterExt, value, freqs, md) if results: gui.setVarlistValue(vList, results[1:]) else: error = "Not micrograph found in metadata <%s>" % mdPath #gui.setVarValue('LowResolCutoff', results[1]) #gui.setVarValue('HighResolCutoff', results[2]) else: error = "Micrograph metadata <%s> is empty" % mdPath else: error = "Micrograph metadata <%s> doesn't exists" % mdPath else: error = "Import run <%s> doesn't exists" % str(path) if error: showWarning("Select Downsampling Wizard", error, gui.master) return None else: return results
def coocurenceMatrix(log,RemainingClasses,WorkingDirStructure,NumVolumes,nI,CorePercentile,CorrThresh): import numpy mdRemaining = MetaData(RemainingClasses) Nimgs=mdRemaining.size() allNames=mdRemaining.getColumnValues(MDL_IMAGE) matrixTotal = numpy.zeros([Nimgs,Nimgs]) for n in range(NumVolumes): fnBase='proposedVolume%05d'%n fnRoot=os.path.join(WorkingDirStructure,fnBase) md = MetaData(fnRoot+".xmd") size = md.size() num=[] corr=[] for objId in md: name = md.getValue(MDL_IMAGE, objId) if name in allNames: num.append(allNames.index(name)) corr.append(md.getValue(MDL_MAXCC, objId)) else: print "Cannot find ",name if size!=len(num): print "Error when processing: ",fnRoot+".xmd" aaa matrix = numpy.zeros([Nimgs,Nimgs]) for i in range(size): for j in range(size): matrix[num[i],num[j]]=((corr[i]+corr[j])/2) #numpy.savetxt(os.path.join(WorkingDirStructure,'coocurrenceMatrix_%05d.txt'%n), matrix) matrixTotal=matrixTotal+matrix matrixTotal=matrixTotal/NumVolumes numpy.savetxt(os.path.join(WorkingDirStructure,'coocurrenceMatrix.txt'),matrixTotal) largestComponent=procCoocurenceMatrix(matrixTotal,CorePercentile,CorrThresh) md = MetaData() for idx in largestComponent: id=md.addObject() md.setValue(MDL_IMAGE,allNames[idx],id) md.write(os.path.join(WorkingDirStructure+"_core","imagesCore.xmd")) if md.size()==0: print "There are no images in the core" aaa
def visualize(self): from protlib_gui_figure import XmippArrayPlotter1D, XmippArrayPlotter2D, XmippArrayPlotter3D components = self.DisplayRawDeformation.split() dim = len(components) if dim > 0: modeList = [] modeNameList = [] # Get modes MD = MetaData(self.Modesfile) MD.removeDisabled() for modeComponent in components: mode = int(modeComponent) if mode > MD.size(): from protlib_gui_ext import showWarning showWarning("Warning", "You don't have so many modes", parent=self.master) else: mode -= 1 currentMode = 0 modeName = "" for id in MD: modeName = MD.getValue(MDL_NMA_MODEFILE, id) currentMode += 1 if currentMode > mode: break modeNameList.append(modeName) modeList.append(mode) # Actually plot if dim == 1: XmippArrayPlotter1D( self.extraPath("deformations.txt"), modeList[0], "Histogram for mode %s" % modeNameList[0], "Deformation value", "Number of images", ) elif dim == 2: XmippArrayPlotter2D( self.extraPath("deformations.txt"), modeList[0], modeList[1], "", modeNameList[0], modeNameList[1] ) elif dim == 3: XmippArrayPlotter3D( self.extraPath("deformations.txt"), modeList[0], modeList[1], modeList[2], "", modeNameList[0], modeNameList[1], modeNameList[2], )
def summary(self): message=ProtHG3DBase.summary(self) message.append("RANSAC iterations: %d"%self.NRansacInitial) for n in range(self.NumVolumesFinal): fnBase='proposedVolume%05d'%n fnRoot=self.workingDirPath(fnBase+".xmd") if os.path.isfile(fnRoot): md=MetaData(fnRoot) if (md.size()< 5) : message.append("Num of inliers for %s too small and equal to %d"%(fnRoot,md.size())) message.append("Decrease the value of Inlier Threshold parameter and run again") fnBase="ransac00000.xmd" fnRoot=self.workingDirPath("tmp/"+fnBase) if os.path.isfile(fnRoot): md=MetaData(fnRoot) return message
def getMdSizeEnabled(filename): """ Return the metadata size containing only ''enabled'' lines """ md = MetaData(filename) md.removeDisabled() return md.size()
def wizardCL2DNumberOfClasses(gui, var): fnSel = gui.getVarValue('InSelFile') if exists(fnSel): md = MetaData(fnSel) gui.setVarValue("NumberOfReferences", int(round(md.size()/200.0)))
class ProtPreprocessVolumes(XmippProtocol): def __init__(self, scriptname, project): XmippProtocol.__init__(self, protDict.preprocess_volume.name, scriptname, project) self.Import = 'from protocol_preprocess_volume import *' self.mdIn=MetaData(self.InModel) if self.mdIn.size()==1: self.OutModel=self.workingDirPath("volume.vol") self.singleVolume=True else: self.OutModel=self.workingDirPath("volumes.stk") self.singleVolume=False def defineSteps(self): self.insertStep('createAcquisition',WorkingDir=self.WorkingDir,Ts=self.FinalTs) if self.InitialTs!=self.FinalTs or self.FinalSize!=-1: if self.InitialTs!=self.FinalTs and self.FinalSize==-1: x, _, _, _, _ = MetaDataInfo(self.mdIn) self.FinalSize=floor(x/(self.FinalTs/self.InitialTs)) self.insertStep("changeSamplingRateAndOrBox",InModel=self.InModel,OutModel=self.OutModel, SingleVolume=self.singleVolume, InitialTs=self.InitialTs,FinalTs=self.FinalTs,Size=self.FinalSize) else: if self.singleVolume: self.insertStep('copyFile',source=self.InModel,dest=self.OutModel) else: self.insertRunJobStep('xmipp_image_convert', "-i %s -o %s --save_metadata_stack --track_origin"%(self.InModel,self.OutModel), verifyFiles=[self.OutModel]) if self.DoChangehand: self.insertStep("changeHand",OutModel=self.OutModel) if self.DoRandomize: self.insertStep("randomize",OutModel=self.OutModel,Ts=self.FinalTs,MaxResolution=self.MaxResolutionRandomize) if self.DoFilter: self.insertStep("filter",OutModel=self.OutModel,Ts=self.FinalTs,MaxResolution=self.MaxResolution) if self.DoSymmetrize: self.insertStep("symmetrize",OutModel=self.OutModel,Symmetry=self.Symmetry, SymmetryAggregation=self.SymmetryAggregation) if self.DoMask: self.insertStep("mask",OutModel=self.OutModel,MaskRadius=self.MaskRadius) if self.DoAdjust: self.insertStep("adjust",OutModel=self.OutModel,SetOfImages=self.SetOfImages) if self.DoNormalize: self.insertStep("normalize",OutModel=self.OutModel,MaskRadius=self.MaskRadiusNormalize) if self.DoThreshold: self.insertStep("threshold",OutModel=self.OutModel,Threshold=self.Threshold) if self.DoSegment: self.insertStep("segment",OutModel=self.OutModel,SegmentationType=self.SegmentationType,SegmentationMass=self.SegmentationMass, Ts=self.FinalTs) def validate(self): errors = [] if self.InitialTs<0: errors.append("Initial sampling rate must be provided") if self.FinalTs<0: errors.append("Initial sampling rate must be provided") maxFreq=2.0*self.FinalTs if self.DoRandomize and self.MaxResolutionRandomize<maxFreq: errors.append("Phase randomization cannot be performed beyond %f A (Nyquist)"%maxFreq) if self.DoFilter and self.MaxResolutionRandomize<maxFreq: errors.append("Low pass filtering cannot be performed beyond %f A (Nyquist)"%maxFreq) if self.DoAdjust and not self.singleVolume: errors.append("Gray adjusting is meant only for single volumes") if self.DoSegment and not self.singleVolume: errors.append("Segmentation is meant only for single volumes") return errors def summary(self): messages = [] messages.append("Input model: [%s]" % self.InModel) messages.append("Output: [%s]" % self.OutModel) messages.append("Operations: ") if self.InitialTs!=self.FinalTs: messages.append(" Sampling rate changed from %f to %f"%(float(self.InitialTs),float(self.FinalTs))) if self.FinalSize>0: messages.append(" Volume boxed to %dx%dx%d voxels"%(int(self.FinalSize),int(self.FinalSize),int(self.FinalSize))) if self.DoChangehand: messages.append(" Hand changed") if self.DoRandomize: messages.append(" Phases randomized beyond %f A"%float(self.MaxResolutionRandomize)) if self.DoFilter: messages.append(" Filtered to %f A"%float(self.MaxResolution)) if self.DoSymmetrize: messages.append(" Symmetrized %s"%self.Symmetry) if self.DoMask: if self.MaskRadius>0: messages.append(" Masked within a sphere of radius %d"%self.MaskRadius) else: messages.append(" Masked within the maximal sphere fitting in its box") if self.DoAdjust: messages.append(" Gray values adjusted to fit [%s]"%self.SetOfImages) if self.DoNormalize: if self.MaskRadiusNormalize>0: messages.append(" Normalized with background beyond radius %d"%self.MaskRadiusNormalize) else: messages.append(" Normalized with background beyond the maximal fitting sphere") if self.DoThreshold: messages.append(" Thresholded below %f"%(float(self.Threshold))) if self.DoSegment: if self.SegmentationType=="Automatic": messages.append(" Automatically segmented") else: m=" Segmented to a mass of " if self.SegmentationType=="Voxel mass": m+="%d voxels"%(int(SegmentationMass)) elif self.SegmentationType=="Aminoacid mass": m+="%d aminoacids"%(int(SegmentationMass)) elif self.SegmentationType=="Dalton mass": m+="%d daltons"%(int(SegmentationMass)) messages.append(m) return messages def papers(self): papers=[] if self.DoNormalize: papers.append('Sorzano, Ultramic (2004) [http://www.ncbi.nlm.nih.gov/pubmed/15450658]') if self.InitialTs!=self.FinalTs: papers.append('Sorzano, IEEE WISP (2009) [http://ieeexplore.ieee.org/xpl/login.jsp?arnumber=5286563]') if self.DoRandomize: papers.append('Chen, Ultramic (2013) [http://www.ncbi.nlm.nih.gov/pubmed/23872039]') return papers def visualize(self): from protlib_utils import runShowJ if os.path.exists(self.OutModel): runShowJ(self.OutModel)