def fromXML(self,xmlObj): """ fromXML: @param xmlObj: A xml object @type xmlObj: L{lxml.etree._Element} @author: Thomas Hrabe """ from lxml.etree import _Element if xmlObj.__class__ != _Element : raise Exception('Is not a lxml.etree._Element! You must provide a valid XMLobject.') if xmlObj.tag == 'GrowingAverageInterimResult': result_element = xmlObj else: Exception('XML object is not a GrowingAverageInterimResult! You must provide a valid GrowingAverageInterimResultXML object.') from pytom.basic.structures import Particle,Reference particleXML = result_element.xpath('/GrowingAverageInterimResult/Particle')[0] self.particle = Particle('') self.particle.fromXML(particleXML) referenceXML = result_element.xpath('/GrowingAverageInterimResult/Result')[0] self.reference = Reference('') self.reference.fromXML(referenceXML)
def toXML(self): """ toXML : Compiles a XML from result object @author: Thomas Hrabe """ from lxml import etree resultElement = etree.Element("Result") resultElement.append(self._particle.toXML()) if self._reference.hasGeneratedByInfo(): from pytom.basic.structures import Reference newRef = Reference(self._reference.getReferenceFilename()) resultElement.append(newRef.toXML()) else: resultElement.append(self._reference.toXML()) resultElement.append(self._shift.toXML()) resultElement.append(self._rotation.toXML()) resultElement.append(self._score.toXML()) resultElement.append(self._angleObject.toXML()) return resultElement
def fromXML(self, xmlObj): """ fromXML : Assigns values to result attributes from XML object @param xmlObj: A xml object @author: Thomas Hrabe """ from lxml.etree import _Element if xmlObj.__class__ != _Element: from pytom.basic.exceptions import ParameterError raise ParameterError( 'Is not a lxml.etree._Element! You must provide a valid XML object.' ) from pytom.score.score import fromXML as fromXMLScore from pytom.angles.angle import AngleObject if xmlObj.tag == "Result": result = xmlObj else: result = xmlObj.xpath('Result') if len(result) == 0: raise PyTomClassError( "This XML is not an MaximisationResult. No Result provided." ) result = result[0] from pytom.basic.structures import Particle, Reference, Rotation, Shift particle_element = result.xpath('Particle')[0] p = Particle('') p.fromXML(particle_element) self._particle = p r = result.xpath('Reference') ref = Reference('') ref.fromXML(r[0]) self._reference = ref scoreXML = result.xpath('Score')[0] self._score = fromXMLScore(scoreXML) shiftXML = result.xpath('Shift')[0] self._shift = Shift() self._shift.fromXML(shiftXML) rotationXML = result.xpath('Rotation')[0] self._rotation = Rotation() self._rotation.fromXML(rotationXML) angleElement = result.xpath('Angles') ang = AngleObject() self._angleObject = ang.fromXML(angleElement[0])
def fromXML(self, xmlObj): from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "CTFCorrectionJob": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('CTFCorrectionJob') if len(jobDescription) == 0: raise Exception("This XML is not a CTFCorrectionJob.") jobDescription = jobDescription[0] from pytom.basic.structures import ParticleList, Reference, Mask, SampleInformation particleList_element = jobDescription.xpath('ParticleList')[0] pl = ParticleList('.') pl.fromXML(particleList_element) self.particleList = pl self.reference = [] r = jobDescription.xpath('Reference') for ref_obj in r: ref = Reference('') ref.fromXML(ref_obj) self.reference.append(ref) m = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(m) try: si = jobDescription.xpath('SampleInformation')[0] self.sampleInformation = SampleInformation() self.sampleInformation.fromXML(si) except: self._sampleInformation = SampleInformation() self.ctf_conv_pl = jobDescription.get('CTFConvolutedParticleList') self.peak_offset = int(jobDescription.get('PeakOffset')) self.bw_range = [ int(i) for i in jobDescription.get('BandwidthRange')[1:-1].split(',') ] self.freq = int(jobDescription.get('Frequency')) self.destination = jobDescription.get('Destination') self.max_iter = int(jobDescription.get('MaxIterations')) self.r_score = jobDescription.get('RScore') == 'True' self.weighting = jobDescription.get('WeightedAverage') == 'True' self.bfactor = jobDescription.get('BFactor') self.sum_ctf_sqr = jobDescription.get('CTFSquared')
def __init__(self, particle='', reference=-1.0, score=-1.0, shift=-1.0, rotation=-1.0, angleObject=-1): from pytom.basic.structures import Particle, Reference, Shift, Rotation from numpy import long if particle.__class__ == str: self._particle = Particle(particle) elif particle.__class__ == Particle: self._particle = particle else: self._particle = Particle() if reference.__class__ == str: self._reference = Reference(reference) elif reference.__class__ == Reference: self._reference = reference else: self._reference = Reference() if shift.__class__ == list: self._shift = Shift(shift) elif shift.__class__ == float: self._shift = Shift() else: self._shift = shift if rotation.__class__ == list: self._rotation = Rotation(rotation) elif rotation.__class__ == Rotation: self._rotation = rotation else: self._rotation = Rotation() if score.__class__ == float: from pytom.score.score import xcfScore self._score = xcfScore() else: self._score = score if angleObject.__class__ == float or isinstance( angleObject, (int, long)): from pytom.angles.angleList import AngleList self._angleObject = AngleList() else: self._angleObject = angleObject
def PeakJob_Test(self): from pytom.localization.peak_job import PeakJob from pytom.basic.structures import Mask, Reference, WedgeInfo from pytom.localization.structures import Volume from pytom.score.score import FLCFScore from pytom.angles.angleList import AngleList v = Volume(self.testfilename) ref = Reference(self.testfilename) m = Mask(self.testfilename) w = WedgeInfo(30) s = FLCFScore() rot = AngleList([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) a = PeakJob(v, ref, m, w, rot, s, 1) xmlObj = a.toXML() b = PeakJob() b.fromXML(xmlObj) self.assertTrue(b.volume.getFilename() == a.volume.getFilename(), msg='') self.assertTrue(b.jobID == a.jobID, msg='') self.assertTrue(b.reference.getReferenceFilename() == a.reference.getReferenceFilename(), msg='') self.assertTrue(b.mask.getFilename() == a.mask.getFilename(), msg='') self.assertTrue(b.wedge.getWedgeAngle() == a.wedge.getWedgeAngle(), msg='') self.assertTrue(b.score.getScoreFunc() == a.score.getScoreFunc(), msg='')
def _run(self): """ run: Starts growing average alignment """ from pytom.basic.structures import Reference from pytom_volume import read from pytom_fftplan import fftShift #create reference object - as self.reference and weighting on disk reference = self._particleList[self._startParticleNumber] referenceFile = reference.getFilename() refVolume = read(referenceFile); #parse filename pos1 = referenceFile.rfind('/') pos2 = referenceFile.rfind('.em') r = referenceFile[pos1+1:pos2] #self._reference = Reference(referenceFile,r + '-StartWeight.em') self._reference = Reference(referenceFile) wedgeInfo = reference.getWedgeInfo() wedgeVolume = wedgeInfo.returnWedgeVolume(refVolume.sizeX(),refVolume.sizeY(),refVolume.sizeZ(),False) #wedgeVolume.setAll(1); wedgeVolume.write(r + '-StartWeight.em') self._startGrowingAverageLoop()
def PeakJobMsg_Test(self): from pytom.localization.peak_job_msg import PeakJobMsg from pytom.localization.peak_job import PeakJob from pytom.basic.structures import Mask, Reference, WedgeInfo from pytom.localization.structures import Volume from pytom.score.score import FLCFScore from pytom.angles.angleList import AngleList v = Volume(self.testfilename) ref = Reference(self.testfilename) m = Mask(self.testfilename) w = WedgeInfo(30) s = FLCFScore() rot = AngleList([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) j = PeakJob(v, ref, m, w, rot, s) a = PeakJobMsg(str(0), str(1)) a.setJob(j) xmlObj = a.toXML() b = PeakJobMsg() b.fromXML(xmlObj) self.assertTrue(b.getSender() == a.getSender(), msg='') self.assertTrue(b.getRecipient() == a.getRecipient(), msg='')
def averageParallel(particleList, averageName, showProgressBar=False, verbose=False, createInfoVolumes=False, weighting=None, norm=False, setParticleNodesRatio=3, cores=6): """ compute average using parfor @param particleList: The particles @param averageName: Filename of new average @param verbose: Prints particle information. Disabled by default. @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default. @param weighting: weight particles by exp CC in average @type weighting: bool @param setParticleNodesRatio: minimum number of particles per node @type setParticleNodesRatio: L{int} @return: A new Reference object @rtype: L{pytom.basic.structures.Reference} @author: FF """ from pytom_volume import read, complexRealMult from pytom.basic.fourier import fft, ifft from pytom.basic.filter import lowpassFilter from pytom.basic.structures import Reference from pytom.alignment.alignmentFunctions import invert_WedgeSum import os splitLists = splitParticleList(particleList, setParticleNodesRatio=setParticleNodesRatio, numberOfNodes=cores) splitFactor = len(splitLists) avgNameList = [] preList = [] wedgeList = [] for ii in range(splitFactor): avgName = averageName + '_dist' + str(ii) + '.em' avgNameList.append(avgName) preList.append(averageName + '_dist' + str(ii) + '-PreWedge.em') wedgeList.append(averageName + '_dist' + str(ii) + '-WedgeSumUnscaled.em') #reference = average(particleList=plist, averageName=xxx, showProgressBar=True, verbose=False, # createInfoVolumes=False, weighting=weighting, norm=False) from multiprocessing import Process procs = [] for i in range(splitFactor): proc = Process(target=average, args=(splitLists[i], avgNameList[i], showProgressBar, verbose, createInfoVolumes, weighting, norm)) procs.append(proc) proc.start() import time while procs: procs = [proc for proc in procs if proc.is_alive()] time.sleep(.1) #averageList = mpi.parfor( average, list(zip(splitLists, avgNameList, [showProgressBar]*splitFactor, # [verbose]*splitFactor, [createInfoVolumes]*splitFactor, # [weighting]*splitFactor, [norm]*splitFactor)), verbose=True) #collect results from files unweiAv = read(preList[0]) wedgeSum = read(wedgeList[0]) os.system('rm ' + wedgeList[0]) os.system('rm ' + avgNameList[0]) os.system('rm ' + preList[0]) for ii in range(1, splitFactor): av = read(preList[ii]) unweiAv += av os.system('rm ' + preList[ii]) w = read(wedgeList[ii]) wedgeSum += w os.system('rm ' + wedgeList[ii]) os.system('rm ' + avgNameList[ii]) if createInfoVolumes: unweiAv.write(averageName[:len(averageName) - 3] + '-PreWedge.em') wedgeSum.write(averageName[:len(averageName) - 3] + '-WedgeSumUnscaled.em') # convolute unweighted average with inverse of wedge sum invert_WedgeSum(invol=wedgeSum, r_max=unweiAv.sizeX() / 2 - 2., lowlimit=.05 * len(particleList), lowval=.05 * len(particleList)) fResult = fft(unweiAv) r = complexRealMult(fResult, wedgeSum) unweiAv = ifft(r) unweiAv.shiftscale( 0.0, 1 / float(unweiAv.sizeX() * unweiAv.sizeY() * unweiAv.sizeZ())) # low pass filter to remove artifacts at fringes unweiAv = lowpassFilter(volume=unweiAv, band=unweiAv.sizeX() / 2 - 2, smooth=(unweiAv.sizeX() / 2 - 1) / 10.)[0] unweiAv.write(averageName) return Reference(averageName, particleList)
def fromXML(self, xmlObj): """ read from xml file @param xmlObj: xml object @type xmlObj: L{lxml.etree.Element} """ from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "FRMJob": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('FRMJob') if len(jobDescription) == 0: raise Exception("This XML is not a FRMJob.") jobDescription = jobDescription[0] from pytom.basic.structures import ParticleList, Reference, Mask, SampleInformation, MultiSymmetries pl = ParticleList('.') particleList_element = jobDescription.xpath('ParticleList') if len(particleList_element) > 0: pl.fromXML(particleList_element[0]) else: list_elements = jobDescription.xpath('ParticleListLocation') for e in list_elements: sub_pl = ParticleList() sub_pl.fromXMLFile(e.get('Path')) pl += sub_pl self.particleList = pl r = jobDescription.xpath('Reference')[0] self.reference = Reference('') self.reference.fromXML(r) m = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(m) try: si = jobDescription.xpath('SampleInformation')[0] self.sampleInformation = SampleInformation() self.sampleInformation.fromXML(si) except: self.sampleInformation = SampleInformation() try: syms = jobDescription.xpath('MultiSymmetries')[0] self.symmetries = MultiSymmetries() self.symmetries.fromXML(syms) except: self.symmetries = MultiSymmetries() self.peak_offset = int(jobDescription.get('PeakOffset')) self.bw_range = [ int(i) for i in jobDescription.get('BandwidthRange')[1:-1].split(',') ] self.freq = int(jobDescription.get('Frequency')) self.destination = jobDescription.get('Destination') self.max_iter = int(jobDescription.get('MaxIterations')) self.r_score = jobDescription.get('RScore') == 'True' self.weighting = jobDescription.get('WeightedAverage') == 'True' self.bfactor = jobDescription.get('BFactor') self.binning = int(jobDescription.get('binning')) if jobDescription.get('AdaptiveResolution'): adaptive_resolution = jobDescription.get('AdaptiveResolution') if adaptive_resolution == '+1': self.adaptive_res = False # always increase by 1 else: self.adaptive_res = float(adaptive_resolution) else: self.adaptive_res = 0.0 # default, if not specified if jobDescription.get('FSC'): self.fsc_criterion = float(jobDescription.get('FSC')) else: self.fsc_criterion = 0.5 # default value # for the constraint try: from sh_alignment.constrained_frm import AngularConstraint con = jobDescription.xpath('AngularConstraint') if len(con) != 0: ac = AngularConstraint() c = ac.fromXML(con[0]) self.constraint = c else: self.constraint = None except: self.constraint = None
def start(self, job, verbose=False): """ start FRM job @param job: FRM job @type job: L{FRMJob} @param verbose: print stuff (default: False) @type verbose: C{bool} """ if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil self.destination = job.destination new_reference = job.reference old_freq = job.freq new_freq = job.freq #print(f"reference = {job.reference}") #print(f"particlelist = {job.particleList}") print(f"iterations = {job.max_iter:d}") print(f"binning = {job.binning:d}") #print(f"mask = {job.mask}") #print(f"peak_offset= {job.peak_offset:f2.1}") print(f"destination= {job.destination:s}") print(f"freq cut = {job.freq:d}") # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency new_job = FRMJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting, constraint=job.constraint, binning=job.binning) # distribute it self.distribute_job(new_job, verbose) # get the result back all_even_pre = None all_even_wedge = None all_odd_pre = None all_odd_wedge = None pl = ParticleList() for j in range(self.num_workers): result = self.get_result() pl += result.pl even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols( result.name) if all_even_pre: all_even_pre += even_pre all_even_wedge += even_wedge all_odd_pre += odd_pre all_odd_wedge += odd_wedge else: all_even_pre = even_pre all_even_wedge = even_wedge all_odd_pre = odd_pre all_odd_wedge = odd_wedge # write the new particle list to the disk pl.toXMLFile( os.path.join(job.destination, 'aligned_pl_iter' + str(i) + '.xml')) # create half sets even = self.create_average(all_even_pre, all_even_wedge) odd = self.create_average(all_odd_pre, all_odd_wedge) # apply symmetries before determine resolution even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')) odd.write( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) # determine the resolution if verbose: print(self.node_name + ': determining the resolution ...') current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge average = self.create_average(all_even_pre, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution and update the new reference average_name = os.path.join(self.destination, 'average_iter' + str(i) + '.em') # pl.average(average_name, True) average.write(average_name) new_reference = Reference(average_name) # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = os.path.join( self.destination, 'average_iter' + str(i) + '_res' + str(current_resolution) + '.em') filtered[0].write(filtered_ref_name) # if the position/orientation is not improved, break it # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * new_freq) old_freq = new_freq else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': New frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
class FRMJob(PyTomClass): # i need to rename the class, but for now it works def __init__(self, pl=None, ref=None, mask=None, peak_offset=0, sample_info=None, bw_range=None, freq=None, dest='.', max_iter=10, r_score=False, weighting=False, bfactor=None, symmetries=None, adaptive_res=0.1, fsc_criterion=0.5, constraint=None, binning=1): """ initiate FRM job @param pl: particle list @type ps: L{pytom.basic.structures.ParticleList} @param ref: reference density @type ref: L{pytom.basic.structures.Reference} @param mask: mask @type ref: L{pytom.basic.structures.Mask} @param peak_offset: peak offset in voxel @type peak_offset: C{int} @param sample_info: ?? (Default: None) @type sample_info: ?? @param bw_range: bandwidth range in pixel (2-dim vector) @type bw_range: C{list} @param freq: frequency (default: None) @type: C{int} @param dest: distination directory (default: '.') @type: C{str} @param max_iter: maximum number of iterations @type max_iter: C{int} @param r_score: use r_score (??) (default: False) @type r_score: C{bool} @param weighting: weighting (default: False) @type weighting: C{bool} @param bfactor: B-factor (default: None) @type bfactor: C{float}? @param symmetries: symmetry (default: None) @type L{pytom.basic.structures.Symmetries} @param adaptive_res: adaptive resolution - add to resolution for filtering @type adaptive_res: C{float} @param fsc_criterion: FSC criterion (default: 0.5) @type fsc_criterion: C{float} @param constraint: Constraint on orientations (deafult: None) @type constraint: ?? @param binning: Perform binning (downscale) of subvolumes by factor. Default=1. @type binning C{float} """ self.particleList = pl self.reference = ref self.mask = mask self.peak_offset = peak_offset self.sampleInformation = sample_info self.bw_range = bw_range self.freq = freq self.destination = dest self.max_iter = max_iter self.r_score = r_score self.weighting = weighting self.bfactor = bfactor self.symmetries = symmetries self.adaptive_res = adaptive_res self.fsc_criterion = fsc_criterion self.constraint = constraint self.binning = binning def fromXML(self, xmlObj): """ read from xml file @param xmlObj: xml object @type xmlObj: L{lxml.etree.Element} """ from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "FRMJob": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('FRMJob') if len(jobDescription) == 0: raise Exception("This XML is not a FRMJob.") jobDescription = jobDescription[0] from pytom.basic.structures import ParticleList, Reference, Mask, SampleInformation, MultiSymmetries pl = ParticleList('.') particleList_element = jobDescription.xpath('ParticleList') if len(particleList_element) > 0: pl.fromXML(particleList_element[0]) else: list_elements = jobDescription.xpath('ParticleListLocation') for e in list_elements: sub_pl = ParticleList() sub_pl.fromXMLFile(e.get('Path')) pl += sub_pl self.particleList = pl r = jobDescription.xpath('Reference')[0] self.reference = Reference('') self.reference.fromXML(r) m = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(m) try: si = jobDescription.xpath('SampleInformation')[0] self.sampleInformation = SampleInformation() self.sampleInformation.fromXML(si) except: self.sampleInformation = SampleInformation() try: syms = jobDescription.xpath('MultiSymmetries')[0] self.symmetries = MultiSymmetries() self.symmetries.fromXML(syms) except: self.symmetries = MultiSymmetries() self.peak_offset = int(jobDescription.get('PeakOffset')) self.bw_range = [ int(i) for i in jobDescription.get('BandwidthRange')[1:-1].split(',') ] self.freq = int(jobDescription.get('Frequency')) self.destination = jobDescription.get('Destination') self.max_iter = int(jobDescription.get('MaxIterations')) self.r_score = jobDescription.get('RScore') == 'True' self.weighting = jobDescription.get('WeightedAverage') == 'True' self.bfactor = jobDescription.get('BFactor') self.binning = int(jobDescription.get('binning')) if jobDescription.get('AdaptiveResolution'): adaptive_resolution = jobDescription.get('AdaptiveResolution') if adaptive_resolution == '+1': self.adaptive_res = False # always increase by 1 else: self.adaptive_res = float(adaptive_resolution) else: self.adaptive_res = 0.0 # default, if not specified if jobDescription.get('FSC'): self.fsc_criterion = float(jobDescription.get('FSC')) else: self.fsc_criterion = 0.5 # default value # for the constraint try: from sh_alignment.constrained_frm import AngularConstraint con = jobDescription.xpath('AngularConstraint') if len(con) != 0: ac = AngularConstraint() c = ac.fromXML(con[0]) self.constraint = c else: self.constraint = None except: self.constraint = None def toXML(self): """ copy to xml structure @return: xml object for job @rtype L{lxml.etree.Element} """ from lxml import etree jobElement = etree.Element("FRMJob") jobElement.append(self.particleList.toXML()) jobElement.append(self.reference.toXML()) jobElement.append(self.mask.toXML()) jobElement.append(self.sampleInformation.toXML()) if self.symmetries is not None: jobElement.append(self.symmetries.toXML()) jobElement.set("PeakOffset", str(self.peak_offset)) jobElement.set("BandwidthRange", str(self.bw_range)) jobElement.set("Frequency", str(self.freq)) jobElement.set("Destination", self.destination) jobElement.set("MaxIterations", str(self.max_iter)) jobElement.set("RScore", str(self.r_score)) jobElement.set("WeightedAverage", str(self.weighting)) jobElement.set("BFactor", str(self.bfactor)) jobElement.set("binning", str(self.binning)) if self.adaptive_res is False: jobElement.set("AdaptiveResolution", '+1') else: jobElement.set("AdaptiveResolution", str(self.adaptive_res)) jobElement.set("FSC", str(self.fsc_criterion)) if self.constraint: jobElement.append(self.constraint.toXML()) return jobElement def check(self): from pytom.tools.files import checkDirExists self.particleList.check() self.reference.check() self.mask.check() if not checkDirExists(self.destination): raise RuntimeError('Destination path not found! ' + self.destination)
def start(self, job, verbose=False): if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil # randomly split the particle list into 2 half sets if len(job.particleList.splitByClass()) != 2: import numpy as np n = len(job.particleList) labels = np.random.randint(2, size=(n, )) print(self.node_name + ': Number of 1st half set:', n - np.sum(labels), 'Number of 2nd half set:', np.sum(labels)) for i in range(n): p = job.particleList[i] p.setClass(labels[i]) self.destination = job.destination new_reference = job.reference old_freq = job.freq new_freq = job.freq # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency new_job = FRMJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting) # distribute it self.distribute_job(new_job, verbose) # get the result back all_even_pre = None # the 1st set all_even_wedge = None all_odd_pre = None # the 2nd set all_odd_wedge = None pl = ParticleList() for j in range(self.num_workers): result = self.get_result() pl += result.pl pre, wedge = self.retrieve_res_vols(result.name) if self.assignment[result.worker_id] == 0: if all_even_pre: all_even_pre += pre all_even_wedge += wedge else: all_even_pre = pre all_even_wedge = wedge else: if all_odd_pre: all_odd_pre += pre all_odd_wedge += wedge else: all_odd_pre = pre all_odd_wedge = wedge # write the new particle list to the disk pl.toXMLFile('aligned_pl_iter' + str(i) + '.xml') # create the averages separately if verbose: print(self.node_name + ': determining the resolution ...') even = self.create_average(all_even_pre, all_even_wedge) odd = self.create_average(all_odd_pre, all_odd_wedge) # apply symmetries if any even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) # determine the transformation between even and odd # here we assume the wedge from both sets are fully sampled from sh_alignment.frm import frm_align pos, angle, score = frm_align(odd, None, even, None, job.bw_range, new_freq, job.peak_offset) print(self.node_name + 'Transform of even set to match the odd set - shift: ' + str(pos) + ' rotation: ' + str(angle)) # transform the odd set accordingly from pytom_volume import vol, transformSpline from pytom.basic.fourier import ftshift from pytom_volume import reducedToFull from pytom_freqweight import weight transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) full_all_odd_wedge = reducedToFull(all_odd_wedge) ftshift(full_all_odd_wedge) odd_weight = weight( full_all_odd_wedge) # the funny part of pytom transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) transformSpline(all_odd_pre, transformed_odd_pre, -angle[1], -angle[0], -angle[2], odd.sizeX() / 2, odd.sizeY() / 2, odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) odd_weight.rotate(-angle[1], -angle[0], -angle[2]) transformed_odd_wedge = odd_weight.getWeightVolume(True) transformSpline(odd, transformed_odd, -angle[1], -angle[0], -angle[2], odd.sizeX() / 2, odd.sizeY() / 2, odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) all_odd_pre = transformed_odd_pre all_odd_wedge = transformed_odd_wedge odd = transformed_odd # determine resolution resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')) odd.write( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge average = self.create_average(all_even_pre, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution average_name = os.path.join(self.destination, 'average_iter' + str(i) + '.em') average.write(average_name) # update the references new_reference = [ Reference( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')), Reference( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) ] # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = os.path.join( self.destination, 'average_iter' + str(i) + '_res' + str(current_resolution) + '.em') filtered[0].write(filtered_ref_name) # if the position/orientation is not improved, break it # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * old_freq) else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': New frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
def fromXML(self, xmlObj): # only rewrite this function from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "FRMJob": # the name is not changed here! jobDescription = xmlObj else: jobDescription = xmlObj.xpath('FRMJob') if len(jobDescription) == 0: raise Exception("This XML is not a FRMJob.") jobDescription = jobDescription[0] from pytom.basic.structures import Reference, Mask, SampleInformation, MultiSymmetries particleList_element = jobDescription.xpath('ParticleListSet')[0] pl = ParticleListSet() pl.fromXML(particleList_element) self.particleList = pl # here i still use the original name! self.reference = [] r = jobDescription.xpath('Reference') for ref_obj in r: ref = Reference('') ref.fromXML(ref_obj) self.reference.append(ref) m = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(m) try: si = jobDescription.xpath('SampleInformation')[0] self.sampleInformation = SampleInformation() self.sampleInformation.fromXML(si) except: self.sampleInformation = SampleInformation() try: syms = jobDescription.xpath('MultiSymmetries')[0] self.symmetries = MultiSymmetries() self.symmetries.fromXML(syms) except: self.symmetries = MultiSymmetries() self.peak_offset = int(jobDescription.get('PeakOffset')) self.bw_range = [ int(i) for i in jobDescription.get('BandwidthRange')[1:-1].split(',') ] self.freq = int(jobDescription.get('Frequency')) self.destination = jobDescription.get('Destination') self.max_iter = int(jobDescription.get('MaxIterations')) self.r_score = jobDescription.get('RScore') == 'True' self.weighting = jobDescription.get('WeightedAverage') == 'True' self.bfactor = jobDescription.get('BFactor') if jobDescription.get('AdaptiveResolution'): adaptive_resolution = jobDescription.get('AdaptiveResolution') if adaptive_resolution == '+1': self.adaptive_res = False # always increase by 1 else: self.adaptive_res = float(adaptive_resolution) else: self.adaptive_res = 0.0 # default, if not specified if jobDescription.get('FSC'): self.fsc_criterion = float(jobDescription.get('FSC')) else: self.fsc_criterion = 0.5 # default value
class PeakJob(PyTomClass): """ PeakJob: stores all the infos needed for calculation of the peak score """ def __init__(self, volume='', reference='', mask='', wedge='', rotations='', score='', jobID=0, members=1, dstDir='./', bandpass=None): """ @param volume: target volume @type volume: L{pytom.localization.structures.Volume} @param reference: reference volume @type reference: L{pytom.basic.structures.Reference} @param mask: mask volume @type mask: L{pytom.basic.structures.Mask} @param wedge: wedge information @type wedge: L{pytom.basic.structures.WedgeInfo} @param rotations: rotation list @type rotations: L{pytom.angles.angle} @param score: score function @type score: L{pytom.score.score} @param jobID: job identification @type jobID: integer @param members: how many members are there available to accomplish this job (1 means only itself) @type members: integer @param dstDir: destination directory where the result is written to @type dstDir: string @param bandpass: bandpass object that will be applied to the reference @type bandpass: L{pytom.basic.structure.BandPassFilter} """ self.volume = volume self.reference = reference self.mask = mask self.wedge = wedge self.rotations = rotations self.score = score self.jobID = jobID self.members = members if dstDir[-1] == '/': self.dstDir = dstDir else: self.dstDir = dstDir + '/' self.bandpass = bandpass def copy(self, fromJob): self.volume = fromJob.volume self.reference = fromJob.reference self.mask = fromJob.mask self.wedge = fromJob.wedge self.rotations = fromJob.rotations self.score = fromJob.score self.jobID = fromJob.jobID self.members = fromJob.members self.dstDir = fromJob.dstDir self.bandpass = fromJob.bandpass def fromXML(self, xmlObj): """ fromXML : Assigns values to job attributes from XML object @param xmlObj: A xml object @author: chen """ from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "JobDescription": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('JobDescription') if len(jobDescription) == 0: raise Exception("This XML is not an JobDescription.") jobDescription = jobDescription[0] id = jobDescription.get('ID') if id != None and id != 'None': self.jobID = int(id) members = jobDescription.get('Members') if members != None and members != 'None': self.members = int(members) dstDir = jobDescription.get('Destination') if dstDir != None: if dstDir[-1] == '/': self.dstDir = dstDir else: self.dstDir = dstDir + '/' from pytom.score.score import fromXML as fromXMLScore from pytom.basic.structures import Mask, Reference, Wedge # from pytom.angles.angleList import AngleList from pytom.localization.structures import Volume e = jobDescription.xpath('Volume')[0] v = Volume() v.fromXML(e) self.volume = v ref = jobDescription.xpath('Reference')[0] self.reference = Reference('') self.reference.fromXML(ref) wedgeXML = jobDescription.xpath('Wedge') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('SingleTiltWedge') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('WedgeInfo') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('DoubleTiltWedge') assert len(wedgeXML) > 0 self.wedge = Wedge() self.wedge.fromXML(wedgeXML[0]) mask = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(mask) score = jobDescription.xpath('Score') self.score = fromXMLScore(score[0]) rot = jobDescription.xpath('Angles')[0] from pytom.angles.angle import AngleObject ang = AngleObject() self.rotations = ang.fromXML(rot) # self.rotations = AngleList() # self.rotations.fromXML(rot) bp = jobDescription.xpath('BandPassFilter') if bp != []: bp = bp[0] from pytom.basic.structures import BandPassFilter self.bandpass = BandPassFilter(0, 0, 0) self.bandpass.fromXML(bp) else: self.bandpass = None def toXML(self): """ toXML : Compiles a XML file from job object @author: chen """ from lxml import etree jobElement = etree.Element("JobDescription", ID=str(self.jobID), Members=str(self.members), Destination=str(self.dstDir)) jobElement.append(self.volume.toXML()) jobElement.append(self.reference.toXML()) jobElement.append(self.mask.toXML()) jobElement.append(self.wedge.toXML()) jobElement.append(self.rotations.toXML()) jobElement.append(self.score.toXML()) if self.bandpass: jobElement.append(self.bandpass.toXML()) return jobElement def check(self): """ check: Performs check whether all settings are valid. Paths and Files exist @author: chen """ from pytom.tools.files import checkFileExists, checkDirExists returnValue = checkFileExists(self.volume.getFilename()) if not returnValue: raise IOError('File: ' + str(self.volume) + ' not found!') returnValue = checkFileExists(self.reference.getReferenceFilename()) if not returnValue: raise IOError('File: ' + str(self.reference) + ' not found!') returnValue = checkFileExists(self.mask.getFilename()) if not returnValue: raise IOError('File: ' + str(self.mask) + ' not found!') returnValue = checkDirExists(self.dstDir[:-1]) if not returnValue: raise IOError('Directory: ' + str(self.dstDir) + ' not found!') return returnValue def send(self, source, destination): """ send: Send the job-relevant message from source to destination @param source: source machine id gained from pytom_mpi @type source: int @param destination: destination machine id @type destination: int @author: chen """ from pytom.localization.peak_job_msg import PeakJobMsg # self.check() msg = PeakJobMsg(str(source), str(destination)) msg.setJob(self) import pytom_mpi print(f'destination: {destination}\ntype: {type(destination)}') pytom_mpi.send(str(msg), int(destination))
class FRMJob(PyTomClass): # i need to rename the class, but for now it works def __init__(self, pl=None, ref=None, mask=None, peak_offset=0, sample_info=None, bw_range=None, freq=None, dest='.', max_iter=10, r_score=False, weighting=False, bfactor=None, symmetries=None, adaptive_res=0.1, fsc_criterion=0.5, constraint=None): self.particleList = pl self.reference = ref self.mask = mask self.peak_offset = peak_offset self.sampleInformation = sample_info self.bw_range = bw_range self.freq = freq self.destination = dest self.max_iter = max_iter self.r_score = r_score self.weighting = weighting self.bfactor = bfactor self.symmetries = symmetries self.adaptive_res = adaptive_res self.fsc_criterion = fsc_criterion self.constraint = constraint def fromXML(self, xmlObj): from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "FRMJob": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('FRMJob') if len(jobDescription) == 0: raise Exception("This XML is not a FRMJob.") jobDescription = jobDescription[0] from pytom.basic.structures import ParticleList, Reference, Mask, SampleInformation, MultiSymmetries pl = ParticleList('.') particleList_element = jobDescription.xpath('ParticleList') if len(particleList_element) > 0: pl.fromXML(particleList_element[0]) else: list_elements = jobDescription.xpath('ParticleListLocation') for e in list_elements: sub_pl = ParticleList() sub_pl.fromXMLFile(e.get('Path')) pl += sub_pl self.particleList = pl r = jobDescription.xpath('Reference')[0] self.reference = Reference('') self.reference.fromXML(r) m = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(m) try: si = jobDescription.xpath('SampleInformation')[0] self.sampleInformation = SampleInformation() self.sampleInformation.fromXML(si) except: self.sampleInformation = SampleInformation() try: syms = jobDescription.xpath('MultiSymmetries')[0] self.symmetries = MultiSymmetries() self.symmetries.fromXML(syms) except: self.symmetries = MultiSymmetries() self.peak_offset = int(jobDescription.get('PeakOffset')) self.bw_range = [ int(i) for i in jobDescription.get('BandwidthRange')[1:-1].split(',') ] self.freq = int(jobDescription.get('Frequency')) self.destination = jobDescription.get('Destination') self.max_iter = int(jobDescription.get('MaxIterations')) self.r_score = jobDescription.get('RScore') == 'True' self.weighting = jobDescription.get('WeightedAverage') == 'True' self.bfactor = jobDescription.get('BFactor') if jobDescription.get('AdaptiveResolution'): adaptive_resolution = jobDescription.get('AdaptiveResolution') if adaptive_resolution == '+1': self.adaptive_res = False # always increase by 1 else: self.adaptive_res = float(adaptive_resolution) else: self.adaptive_res = 0.0 # default, if not specified if jobDescription.get('FSC'): self.fsc_criterion = float(jobDescription.get('FSC')) else: self.fsc_criterion = 0.5 # default value # for the constraint try: from sh_alignment.constrained_frm import AngularConstraint con = jobDescription.xpath('AngularConstraint') if len(con) != 0: ac = AngularConstraint() c = ac.fromXML(con[0]) self.constraint = c else: self.constraint = None except: self.constraint = None def toXML(self): from lxml import etree jobElement = etree.Element("FRMJob") jobElement.append(self.particleList.toXML()) jobElement.append(self.reference.toXML()) jobElement.append(self.mask.toXML()) jobElement.append(self.sampleInformation.toXML()) if self.symmetries is not None: jobElement.append(self.symmetries.toXML()) jobElement.set("PeakOffset", str(self.peak_offset)) jobElement.set("BandwidthRange", str(self.bw_range)) jobElement.set("Frequency", str(self.freq)) jobElement.set("Destination", self.destination) jobElement.set("MaxIterations", str(self.max_iter)) jobElement.set("RScore", str(self.r_score)) jobElement.set("WeightedAverage", str(self.weighting)) jobElement.set("BFactor", str(self.bfactor)) if self.adaptive_res is False: jobElement.set("AdaptiveResolution", '+1') else: jobElement.set("AdaptiveResolution", str(self.adaptive_res)) jobElement.set("FSC", str(self.fsc_criterion)) if self.constraint: jobElement.append(self.constraint.toXML()) return jobElement def check(self): from pytom.tools.files import checkDirExists self.particleList.check() self.reference.check() self.mask.check() if not checkDirExists(self.destination): raise RuntimeError('Destination path not found! ' + self.destination)
def growingAverageNew(particleList=None,angleObject=None,maskFile=None,scoreObject=None,startClassNumber=0,destinationDirectory='.',preprocessing = None,binning=1,verbose=False): """ """ from pytom.alignment.alignmentFunctions import bestAlignment from pytom.basic.structures import Reference,Particle,Rotation,ParticleList from pytom.alignment.preprocessing import Preprocessing if not preprocessing: preprocessing = Preprocessing() numberOfClasses = len(particleList.splitByClass()) if verbose: print('Processing ' + str(numberOfClasses) + ' classes.') print('Generating start average') startAverageList = particleList.particlesFromClass(float(startClassNumber)) startAverageList.average(destinationDirectory + '/GA_it0.em',progressBar=verbose) currentReference = Reference(destinationDirectory + '/GA_it0.em') growingAverageParticleList = ParticleList(particleList.getDirectory()) for p in startAverageList: p.setRotation(Rotation(0,0,0)) growingAverageParticleList.append(p) for i in range(2,numberOfClasses): currentParticleList = particleList.particlesFromClass(float(i)) if verbose: print('Generating ' + str(i) + '. class average') currentParticleList.average(destinationDirectory + '/CA_it'+str(i)+'.em',progressBar=verbose) currentParticle = Particle(destinationDirectory + '/CA_it'+str(i)+'.em',wedgeInfo=currentParticleList[0].getWedgeInfo()) if verbose: print('Running alignment iteration ' + str(i)) print(currentParticle) print(currentReference) currentPeak = bestAlignment(currentParticle.getVolume(),currentReference.getVolume(),currentReference.getWeighting(),currentParticle.getWedgeInfo(),angleObject,scoreObject,maskFile,preprocessing=preprocessing,binning=binning) if verbose: print('Parameters determined:') print(currentPeak) for p in currentParticleList: p.setRotation(currentPeak.getRotation()) p.setShift(currentPeak.getShift()) growingAverageParticleList.append(p) if verbose: print('Generating growing average ' + str(i)) growingAverageParticleList.average(destinationDirectory + '/GA_it'+ str(i) +'.em',progressBar=verbose) currentReference = Reference(destinationDirectory + '/GA_it'+ str(i) +'.em') angleObject.reset()
if not checkFileExists(mask): raise RuntimeError('Mask file ' + mask + ' does not exist!') if not checkDirExists(destination): raise RuntimeError('Destination directory ' + destination + ' does not exist!') from pytom.basic.structures import Mask, Reference, Wedge, BandPassFilter from pytom.localization.structures import Volume from pytom.angles.globalSampling import GlobalSampling from pytom.score.score import FLCFScore from pytom.localization.peak_job import PeakJob from pytom.frontend.serverpages.createLocalizationJob import createRunscripts v = Volume(volume) r = Reference(reference) m = Mask(mask) w = Wedge([float(wedge1), float(wedge2)]) a = GlobalSampling(angles) job = PeakJob(volume=v, reference=r, mask=m, wedge=w, rotations=a, score=FLCFScore(), jobID=0, members=1, dstDir=destination, bandpass=BandPassFilter(0, float(band), 0))
def start(self, job, verbose=False): if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil from pytom.basic.fourier import convolute from pytom_volume import vol, power, read # randomly split the particle list into 2 half sets import numpy as np num_pairs = len(job.particleList.pairs) for i in range(num_pairs): # randomize the class labels to indicate the two half sets pl = job.particleList.pairs[i].get_phase_flip_pl() n = len(pl) labels = np.random.randint(2, size=(n, )) print(self.node_name + ': Number of 1st half set:', n - np.sum(labels), 'Number of 2nd half set:', np.sum(labels)) for j in range(n): p = pl[j] p.setClass(labels[j]) new_reference = job.reference old_freq = job.freq new_freq = job.freq # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency # here the job.particleList is actually ParticleListSet new_job = MultiDefocusJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting, job.bfactor) # distribute it num_all_particles = self.distribute_job(new_job, verbose) # calculate the denominator sum_ctf_squared = None for pair in job.particleList.pairs: if sum_ctf_squared is None: sum_ctf_squared = pair.get_ctf_sqr_vol() * pair.snr else: sum_ctf_squared += pair.get_ctf_sqr_vol() * pair.snr # get the result back all_even_pre = None all_even_wedge = None all_odd_pre = None all_odd_wedge = None pls = [] for j in range(len(job.particleList.pairs)): pls.append(ParticleList()) for j in range(self.num_workers): result = self.get_result() pair_id = self.assignment[result.worker_id] pair = job.particleList.pairs[pair_id] pl = pls[pair_id] pl += result.pl even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols( result.name) if all_even_pre: all_even_pre += even_pre * pair.snr all_even_wedge += even_wedge all_odd_pre += odd_pre * pair.snr all_odd_wedge += odd_wedge else: all_even_pre = even_pre * pair.snr all_even_wedge = even_wedge all_odd_pre = odd_pre * pair.snr all_odd_wedge = odd_wedge # write the new particle list to the disk for j in range(len(job.particleList.pairs)): pls[j].toXMLFile('aligned_pl' + str(j) + '_iter' + str(i) + '.xml') # correct for the number of particles in wiener filter sum_ctf_squared = sum_ctf_squared / num_all_particles # all_even_pre = all_even_pre/(num_all_particles/2) # all_odd_pre = all_odd_pre/(num_all_particles/2) # bfactor if job.bfactor and job.bfactor != 'None': # bfactor_kernel = create_bfactor_vol(sum_ctf_squared.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor) bfactor_kernel = read(job.bfactor) bfactor_kernel_sqr = vol(bfactor_kernel) power(bfactor_kernel_sqr, 2) all_even_pre = convolute(all_even_pre, bfactor_kernel, True) all_odd_pre = convolute(all_odd_pre, bfactor_kernel, True) sum_ctf_squared = sum_ctf_squared * bfactor_kernel_sqr # create averages of two sets if verbose: print(self.node_name + ': determining the resolution ...') even = self.create_average( all_even_pre, sum_ctf_squared, all_even_wedge ) # assume that the CTF sum is the same for the even and odd odd = self.create_average(all_odd_pre, sum_ctf_squared, all_odd_wedge) # determine the transformation between even and odd # here we assume the wedge from both sets are fully sampled from sh_alignment.frm import frm_align pos, angle, score = frm_align(odd, None, even, None, job.bw_range, new_freq, job.peak_offset) print( self.node_name + ': transform of even set to match the odd set - shift: ' + str(pos) + ' rotation: ' + str(angle)) # transform the odd set accordingly from pytom_volume import vol, transformSpline from pytom.basic.fourier import ftshift from pytom_volume import reducedToFull from pytom_freqweight import weight transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) full_all_odd_wedge = reducedToFull(all_odd_wedge) ftshift(full_all_odd_wedge) odd_weight = weight( full_all_odd_wedge) # the funny part of pytom transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) transformSpline(all_odd_pre, transformed_odd_pre, -angle[1], -angle[0], -angle[2], int(odd.sizeX() / 2), int(odd.sizeY() / 2), int(odd.sizeZ() / 2), -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) odd_weight.rotate(-angle[1], -angle[0], -angle[2]) transformed_odd_wedge = odd_weight.getWeightVolume(True) transformSpline(odd, transformed_odd, -angle[1], -angle[0], -angle[2], int(odd.sizeX() / 2), int(odd.sizeY() / 2), int(odd.sizeZ() / 2), -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) all_odd_pre = transformed_odd_pre all_odd_wedge = transformed_odd_wedge odd = transformed_odd # apply symmetries before determine resolution # with gold standard you should be careful about applying the symmetry! even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write('fsc_' + str(i) + '_even.em') odd.write('fsc_' + str(i) + '_odd.em') current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge # all_even_pre = all_even_pre/2 # correct for the number of particles in wiener filter average = self.create_average(all_even_pre, sum_ctf_squared, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution and update the new reference average_name = 'average_iter' + str(i) + '.em' average.write(average_name) # update the references new_reference = [ Reference('fsc_' + str(i) + '_even.em'), Reference('fsc_' + str(i) + '_odd.em') ] # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = 'average_iter' + str(i) + '_res' + str( current_resolution) + '.em' filtered[0].write(filtered_ref_name) # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * old_freq) else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': Determined frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
class MaximisationResult(PyTomClass): """ MaximisationResult : Stores results of one maximisation process """ def __init__(self, particle='', reference=-1.0, score=-1.0, shift=-1.0, rotation=-1.0, angleObject=-1): from pytom.basic.structures import Particle, Reference, Shift, Rotation from numpy import long if particle.__class__ == str: self._particle = Particle(particle) elif particle.__class__ == Particle: self._particle = particle else: self._particle = Particle() if reference.__class__ == str: self._reference = Reference(reference) elif reference.__class__ == Reference: self._reference = reference else: self._reference = Reference() if shift.__class__ == list: self._shift = Shift(shift) elif shift.__class__ == float: self._shift = Shift() else: self._shift = shift if rotation.__class__ == list: self._rotation = Rotation(rotation) elif rotation.__class__ == Rotation: self._rotation = rotation else: self._rotation = Rotation() if score.__class__ == float: from pytom.score.score import xcfScore self._score = xcfScore() else: self._score = score if angleObject.__class__ == float or isinstance( angleObject, (int, long)): from pytom.angles.angleList import AngleList self._angleObject = AngleList() else: self._angleObject = angleObject def toParticle(self): """ toParticle: Converts this object to a Particle object. @return: @rtype: L{pytom.basic.structures.Particle} """ from pytom.basic.structures import Particle particle = self._particle particle.setRotation(self._rotation) particle.setShift(self._shift) particle.setScore(self._score) return particle def getParticle(self): return self._particle def getShift(self): from pytom.basic.structures import Shift if self._shift.__class__ == list: return Shift(self._shift) else: return self._shift def setShift(self, shift): from pytom.basic.structures import Shift assert shift.__class__ == Shift self._shift = shift def getAngleObject(self): return self._angleObject def getRotation(self): from pytom.basic.structures import Rotation if self._rotation.__class__ == list: return Rotation(self._rotation[0], self._rotation[1], self._rotation[2]) else: return self._rotation.copy() def getScore(self): """ getScore: Returns score object """ return self._score def setRotation(self, rotation): """ setRotation: @param rotation: """ from pytom.basic.structures import Rotation if rotation.__class__ == list: rotation = Rotation(rotation) self._rotation = rotation def fromXML(self, xmlObj): """ fromXML : Assigns values to result attributes from XML object @param xmlObj: A xml object @author: Thomas Hrabe """ from lxml.etree import _Element if xmlObj.__class__ != _Element: from pytom.basic.exceptions import ParameterError raise ParameterError( 'Is not a lxml.etree._Element! You must provide a valid XML object.' ) from pytom.score.score import fromXML as fromXMLScore from pytom.angles.angle import AngleObject if xmlObj.tag == "Result": result = xmlObj else: result = xmlObj.xpath('Result') if len(result) == 0: raise PyTomClassError( "This XML is not an MaximisationResult. No Result provided." ) result = result[0] from pytom.basic.structures import Particle, Reference, Rotation, Shift particle_element = result.xpath('Particle')[0] p = Particle('') p.fromXML(particle_element) self._particle = p r = result.xpath('Reference') ref = Reference('') ref.fromXML(r[0]) self._reference = ref scoreXML = result.xpath('Score')[0] self._score = fromXMLScore(scoreXML) shiftXML = result.xpath('Shift')[0] self._shift = Shift() self._shift.fromXML(shiftXML) rotationXML = result.xpath('Rotation')[0] self._rotation = Rotation() self._rotation.fromXML(rotationXML) angleElement = result.xpath('Angles') ang = AngleObject() self._angleObject = ang.fromXML(angleElement[0]) def toXML(self): """ toXML : Compiles a XML from result object @author: Thomas Hrabe """ from lxml import etree resultElement = etree.Element("Result") resultElement.append(self._particle.toXML()) if self._reference.hasGeneratedByInfo(): from pytom.basic.structures import Reference newRef = Reference(self._reference.getReferenceFilename()) resultElement.append(newRef.toXML()) else: resultElement.append(self._reference.toXML()) resultElement.append(self._shift.toXML()) resultElement.append(self._rotation.toXML()) resultElement.append(self._score.toXML()) resultElement.append(self._angleObject.toXML()) return resultElement def copy(self): return MaximisationResult(self._particle, self._reference, self._score, self._shift, self._rotation, self._angleObject)
def _disrtibuteAverageMPI(particleList,averageName,showProgressBar = False,verbose=False, createInfoVolumes = False,setParticleNodesRatio = 3,sendEndMessage = False): """ _distributeAverageMPI : Distributes averaging to multiple MPI nodes. @param particleList: The particles @param averageName: Filename of new average @param verbose: Prints particle information. Disabled by default. @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default. @return: A new Reference object @rtype: L{pytom.basic.structures.Reference} @author: Thomas Hrabe """ import pytom_mpi from pytom.alignment.structures import ExpectationJob from pytom.parallel.parallelWorker import ParallelWorker from pytom.parallel.alignmentMessages import ExpectationJobMsg from pytom_volume import read,complexDiv,complexRealMult from pytom.basic.fourier import fft,ifft from pytom.basic.filter import lowpassFilter from pytom.basic.structures import Reference import os import sys numberOfNodes = pytom_mpi.size() particleNodesRatio = float(len(particleList)) / float(numberOfNodes) splitFactor = numberOfNodes if particleNodesRatio < setParticleNodesRatio: #make sure each node gets at least 20 particles. splitFactor = len(particleList) / setParticleNodesRatio splitLists = particleList.splitNSublists(splitFactor) msgList = [] avgNameList = [] preList = [] wedgeList = [] for i in range(len(splitLists)): plist = splitLists[i] avgName = averageName + '_dist' +str(i) + '.em' avgNameList.append(avgName) preList.append(averageName + '_dist' +str(i) + '-PreWedge.em') wedgeList.append(averageName + '_dist' +str(i) + '-WedgeSumUnscaled.em') job = ExpectationJob(plist,avgName) message = ExpectationJobMsg(0,i) message.setJob(job) msgList.append(message) #distribute averaging worker = ParallelWorker() worker.fillJobList(msgList) worker.parallelWork(True,sendEndMessage) #collect results result = read(preList[0]) wedgeSum = read(wedgeList[0]) for i in range(1,len(preList)): r = read(preList[i]) result += r w = read(wedgeList[i]) wedgeSum += w result.write(averageName[:len(averageName)-3]+'-PreWedge.em') wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumUnscaled.em') invert_WedgeSum( invol=wedgeSum, r_max=result.sizeX()/2-2., lowlimit=.05*len(particleList), lowval=.05*len(particleList)) fResult = fft(result) r = complexRealMult(fResult,wedgeSum) result = ifft(r) result.shiftscale(0.0,1/float(result.sizeX()*result.sizeY()*result.sizeZ())) # do a low pass filter result = lowpassFilter(result, result.sizeX()/2-2, (result.sizeX()/2-1)/10.)[0] result.write(averageName) # clean results for i in range(0,len(preList)): os.system('rm ' + avgNameList[i]) os.system('rm ' + preList[i]) os.system('rm ' + wedgeList[i]) return Reference(averageName,particleList)
def fromXML(self, xmlObj): """ fromXML : Assigns values to job attributes from XML object @param xmlObj: A xml object @author: chen """ from lxml.etree import _Element if xmlObj.__class__ != _Element: raise Exception('You must provide a valid XML object.') if xmlObj.tag == "JobDescription": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('JobDescription') if len(jobDescription) == 0: raise Exception("This XML is not an JobDescription.") jobDescription = jobDescription[0] id = jobDescription.get('ID') if id != None and id != 'None': self.jobID = int(id) members = jobDescription.get('Members') if members != None and members != 'None': self.members = int(members) dstDir = jobDescription.get('Destination') if dstDir != None: if dstDir[-1] == '/': self.dstDir = dstDir else: self.dstDir = dstDir + '/' from pytom.score.score import fromXML as fromXMLScore from pytom.basic.structures import Mask, Reference, Wedge # from pytom.angles.angleList import AngleList from pytom.localization.structures import Volume e = jobDescription.xpath('Volume')[0] v = Volume() v.fromXML(e) self.volume = v ref = jobDescription.xpath('Reference')[0] self.reference = Reference('') self.reference.fromXML(ref) wedgeXML = jobDescription.xpath('Wedge') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('SingleTiltWedge') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('WedgeInfo') if len(wedgeXML) == 0: wedgeXML = jobDescription.xpath('DoubleTiltWedge') assert len(wedgeXML) > 0 self.wedge = Wedge() self.wedge.fromXML(wedgeXML[0]) mask = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(mask) score = jobDescription.xpath('Score') self.score = fromXMLScore(score[0]) rot = jobDescription.xpath('Angles')[0] from pytom.angles.angle import AngleObject ang = AngleObject() self.rotations = ang.fromXML(rot) # self.rotations = AngleList() # self.rotations.fromXML(rot) bp = jobDescription.xpath('BandPassFilter') if bp != []: bp = bp[0] from pytom.basic.structures import BandPassFilter self.bandpass = BandPassFilter(0, 0, 0) self.bandpass.fromXML(bp) else: self.bandpass = None
def average( particleList, averageName, showProgressBar=False, verbose=False, createInfoVolumes=False, weighting=False, norm=False): """ average : Creates new average from a particleList @param particleList: The particles @param averageName: Filename of new average @param verbose: Prints particle information. Disabled by default. @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default. @param weighting: apply weighting to each average according to its correlation score @param norm: apply normalization for each particle @return: A new Reference object @rtype: L{pytom.basic.structures.Reference} @author: Thomas Hrabe @change: limit for wedgeSum set to 1% or particles to avoid division by small numbers - FF """ from pytom_volume import read,vol,reducedToFull,limit, complexRealMult from pytom.basic.filter import lowpassFilter, rotateWeighting from pytom_volume import transformSpline as transform from pytom.basic.fourier import convolute from pytom.basic.structures import Reference from pytom.basic.normalise import mean0std1 from pytom.tools.ProgressBar import FixedProgBar from math import exp import os if len(particleList) == 0: raise RuntimeError('The particle list is empty. Aborting!') if showProgressBar: progressBar = FixedProgBar(0,len(particleList),'Particles averaged ') progressBar.update(0) numberAlignedParticles = 0 result = [] wedgeSum = [] newParticle = None # pre-check that scores != 0 if weighting: wsum = 0. for particleObject in particleList: wsum += particleObject.getScore().getValue() if wsum < 0.00001: weighting = False print("Warning: all scores have been zero - weighting not applied") for particleObject in particleList: if verbose: print(particleObject) if not os.path.exists(particleObject.getFilename()): continue particle = read(particleObject.getFilename()) if norm: # normalize the particle mean0std1(particle) # happen inplace wedgeInfo = particleObject.getWedge() # apply its wedge to itself particle = wedgeInfo.apply(particle) if result == []: sizeX = particle.sizeX() sizeY = particle.sizeY() sizeZ = particle.sizeZ() newParticle = vol(sizeX,sizeY,sizeZ) centerX = sizeX/2 centerY = sizeY/2 centerZ = sizeZ/2 result = vol(sizeX,sizeY,sizeZ) result.setAll(0.0) if analytWedge: wedgeSum = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ) else: # > FF bugfix wedgeSum = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ) # < FF # > TH bugfix #wedgeSum = vol(sizeX,sizeY,sizeZ) # < TH #wedgeSum.setAll(0) assert wedgeSum.sizeX() == sizeX and wedgeSum.sizeY() == sizeY and wedgeSum.sizeZ() == sizeZ/2+1, \ "wedge initialization result in wrong dims :(" wedgeSum.setAll(0) ### create spectral wedge weighting rotation = particleObject.getRotation() rotinvert = rotation.invert() if analytWedge: # > analytical buggy version wedge = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False, rotinvert) else: # > FF: interpol bugfix wedge = rotateWeighting( weighting=wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False), z1=rotinvert[0], z2=rotinvert[1], x=rotinvert[2], mask=None, isReducedComplex=True, returnReducedComplex=True) # < FF # > TH bugfix #wedgeVolume = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ, # humanUnderstandable=True, rotation=rotinvert) #wedge = rotate(volume=wedgeVolume, rotation=rotinvert, imethod='linear') # < TH ### shift and rotate particle shiftV = particleObject.getShift() newParticle.setAll(0) transform(particle,newParticle,-rotation[1],-rotation[0],-rotation[2], centerX,centerY,centerZ,-shiftV[0],-shiftV[1],-shiftV[2],0,0,0) if weighting: weight = 1.-particleObject.getScore().getValue() #weight = weight**2 weight = exp(-1.*weight) result = result + newParticle * weight wedgeSum = wedgeSum + wedge * weight else: result = result + newParticle wedgeSum = wedgeSum + wedge if showProgressBar: numberAlignedParticles = numberAlignedParticles + 1 progressBar.update(numberAlignedParticles) ###apply spectral weighting to sum result = lowpassFilter(result, sizeX/2-1, 0.)[0] #if createInfoVolumes: result.write(averageName[:len(averageName)-3]+'-PreWedge.em') wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumUnscaled.em') invert_WedgeSum( invol=wedgeSum, r_max=sizeX/2-2., lowlimit=.05*len(particleList), lowval=.05*len(particleList)) if createInfoVolumes: wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumInverted.em') result = convolute(v=result, k=wedgeSum, kernel_in_fourier=True) # do a low pass filter #result = lowpassFilter(result, sizeX/2-2, (sizeX/2-1)/10.)[0] result.write(averageName) if createInfoVolumes: resultINV = result * -1 #write sign inverted result to disk (good for chimera viewing ... ) resultINV.write(averageName[:len(averageName)-3]+'-INV.em') newReference = Reference(averageName,particleList) return newReference
from pytom.alignment.GLocalSampling import GLocalSamplingJob, mainAlignmentLoop from pytom.basic.structures import ParticleList, Reference, Mask, SampleInformation, PointSymmetry from pytom.score.score import FLCFScore, nxcfScore from pytom.angles.localSampling import LocalSampling from pytom.alignment.preprocessing import Preprocessing #particleList if not checkFileExists(particleList): raise RuntimeError('ParticleList file ' + particleList + ' does not exist!') pl = ParticleList() pl.fromXMLFile(particleList) if reference: if not checkFileExists(reference): raise RuntimeError('Reference file ' + reference + ' does not exist!') ref = Reference(referenceFile=reference) else: ref = Reference() if not checkFileExists(mask): raise RuntimeError('Mask file ' + mask + ' does not exist!') if isSphere: isSphere = True else: isSphere = False m = Mask(filename=mask, isSphere=isSphere) if not checkDirExists(destination): raise RuntimeError('Destination directory ' + destination + ' does not exist!') if not angShells:
def interpretRequestParameters(parameters): from pytom.localization.peak_job import PeakJob from pytom.localization.structures import Volume from pytom.basic.structures import Mask, Reference, WedgeInfo from pytom.angles.globalSampling import GlobalSampling from pytom.basic.structures import BandPassFilter from pytom.frontend.serverpages.serverMessages import FileMessage if 'tomo' in parameters: vol = Volume(parameters['tomo']) else: raise RuntimeError('Parameter missing in request!') if 'ref' in parameters: ref = Reference(parameters['ref']) else: raise RuntimeError('Parameter missing in request!') if 'mask' in parameters: mask = Mask(parameters['mask']) else: raise RuntimeError('Parameter missing in request!') if 'angle' in parameters: ang = GlobalSampling(parameters['angle']) else: raise RuntimeError('Parameter missing in request!') if 'dest' in parameters: dest = parameters['dest'] else: dest = './' if 'low' in parameters: low = float(parameters['low']) else: raise RuntimeError('Parameter missing in request!') if 'high' in parameters: high = float(parameters['high']) else: raise RuntimeError('Parameter missing in request!') if 'smooth' in parameters: smooth = float(parameters['smooth']) else: raise RuntimeError('Parameter missing in request!') if 'w1' in parameters: w1 = float(parameters['w1']) else: raise RuntimeError('Parameter missing in request!') if 'w2' in parameters: w2 = float(parameters['w2']) else: raise RuntimeError('Parameter missing in request!') if 'x' in parameters: x = float(parameters['x']) else: x = 0 if 'y' in parameters: y = float(parameters['y']) else: y = 0 if 'z' in parameters: z = float(parameters['z']) else: z = 0 bp = BandPassFilter(low, high, smooth) wedge = WedgeInfo([w1, w2]) from pytom.score.score import FLCFScore sc = FLCFScore() job = PeakJob(vol, ref, mask, wedge, ang, dstDir=dest, score=sc, bandpass=bp) jobXMLFile = '' if 'jobFile' in parameters: jobXMLFile = parameters['jobFile'] job.toXMLFile(jobXMLFile) if jobXMLFile[-4:] == '.xml': jobRunFile = jobXMLFile[0:-4] else: jobRunFile = jobXMLFile createRunscripts(jobRunFile + '.sh', jobXMLFile, x, y, z) return FileMessage('LocalizationJob', jobXMLFile, 'created')
def fromXML(self, xmlObj): """ fromXML : Assigns values to job attributes from XML object @param xmlObj: A xml object @author: Thomas Hrabe """ from lxml.etree import _Element if xmlObj.__class__ != _Element: from pytom.basic.exceptions import ParameterError raise ParameterError( 'You must provide a valid XML-MaximisationJob object.') if xmlObj.tag == "JobDescription": jobDescription = xmlObj else: jobDescription = xmlObj.xpath('JobDescription') if len(jobDescription) == 0: from pytom.basic.structures import PyTomClassError raise PyTomClassError("This XML is not an JobDescription.") jobDescription = jobDescription[0] from pytom.angles.angle import AngleObject from pytom.score.score import fromXML as fromXMLScore from pytom.alignment.preprocessing import Preprocessing from pytom.basic.structures import Mask, Particle, Reference, ReferenceList self.binning = int(jobDescription.get('Binning')) particle_element = jobDescription.xpath('Particle')[0] p = Particle('') p.fromXML(particle_element) self.particle = p r = jobDescription.xpath('Reference') if len(r) > 0: ref = Reference('') ref.fromXML(r[0]) self.reference = ref else: r = jobDescription.xpath('ReferenceList') ref = ReferenceList() ref.fromXML(r[0]) self.reference = ref mask = jobDescription.xpath('Mask')[0] self.mask = Mask('') self.mask.fromXML(mask) self.numberRefinementRounds = jobDescription.get( 'NumberRefinementRounds') self.numberRefinementRounds = int(self.numberRefinementRounds) score = jobDescription.xpath('Score') self.score = fromXMLScore(score[0]) angles = jobDescription.xpath('Angles') ang = AngleObject() self.rotations = ang.fromXML(angles[0]) preObj = xmlObj.xpath('Preprocessing') if len(preObj) == 0: self.preprocessing = Preprocessing() else: p = Preprocessing() p.fromXML(preObj[0]) self.preprocessing = p
class GrowingAverageInterimResult(PyTomClass): """ GrowingAverageInterimResult: """ def __init__(self,particle,reference,rotation,shift,score): self.particle = particle self.reference = reference self.rotation = rotation self.shift = shift self.score = score def getFilename(self): return self.particle.getFilename() def getWedgeInfo(self): return self.particle.getWedgeInfo() def getRotation(self): return self.rotation def getShift(self): return self.shift def toXML(self): """ toXML : Compiles a XML file from result object rtype : L{lxml.etree._Element} @author: Thomas Hrabe """ from lxml import etree result_element = etree.Element('GrowingAverageInterimResult') result_element.append(self.particle.toXML()) result_element.append(self.reference.toXML()) result_element.append(self.rotation.toXML()) result_element.append(self.shift.toXML()) result_element.append(self.score.toXML()) return result_element def fromXML(self,xmlObj): """ fromXML: @param xmlObj: A xml object @type xmlObj: L{lxml.etree._Element} @author: Thomas Hrabe """ from lxml.etree import _Element if xmlObj.__class__ != _Element : raise Exception('Is not a lxml.etree._Element! You must provide a valid XMLobject.') if xmlObj.tag == 'GrowingAverageInterimResult': result_element = xmlObj else: Exception('XML object is not a GrowingAverageInterimResult! You must provide a valid GrowingAverageInterimResultXML object.') from pytom.basic.structures import Particle,Reference particleXML = result_element.xpath('/GrowingAverageInterimResult/Particle')[0] self.particle = Particle('') self.particle.fromXML(particleXML) referenceXML = result_element.xpath('/GrowingAverageInterimResult/Result')[0] self.reference = Reference('') self.reference.fromXML(referenceXML)
def averageGPU(particleList, averageName, showProgressBar=False, verbose=False, createInfoVolumes=False, weighting=False, norm=False, gpuId=None, profile=True): """ average : Creates new average from a particleList @param particleList: The particles @param averageName: Filename of new average @param verbose: Prints particle information. Disabled by default. @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default. @param weighting: apply weighting to each average according to its correlation score @param norm: apply normalization for each particle @return: A new Reference object @rtype: L{pytom.basic.structures.Reference} @author: Thomas Hrabe @change: limit for wedgeSum set to 1% or particles to avoid division by small numbers - FF """ import time from pytom.tompy.io import read, write, read_size from pytom.tompy.filter import bandpass as lowpassFilter, rotateWeighting, applyFourierFilter, applyFourierFilterFull, create_wedge from pytom.voltools import transform, StaticVolume from pytom.basic.structures import Reference from pytom.tompy.normalise import mean0std1 from pytom.tompy.tools import volumesSameSize, invert_WedgeSum, create_sphere from pytom.tompy.transform import fourier_full2reduced, fourier_reduced2full from cupyx.scipy.fftpack.fft import fftn as fftnP from cupyx.scipy.fftpack.fft import ifftn as ifftnP from cupyx.scipy.fftpack.fft import get_fft_plan from pytom.tools.ProgressBar import FixedProgBar from multiprocessing import RawArray import numpy as np import cupy as xp if not gpuId is None: device = f'gpu:{gpuId}' xp.cuda.Device(gpuId).use() else: print(gpuId) raise Exception('Running gpu code on non-gpu device') print(device) cstream = xp.cuda.Stream() if profile: stream = xp.cuda.Stream.null t_start = stream.record() # from pytom.tools.ProgressBar import FixedProgBar from math import exp import os if len(particleList) == 0: raise RuntimeError('The particle list is empty. Aborting!') if showProgressBar: progressBar = FixedProgBar(0, len(particleList), 'Particles averaged ') progressBar.update(0) numberAlignedParticles = 0 # pre-check that scores != 0 if weighting: wsum = 0. for particleObject in particleList: wsum += particleObject.getScore().getValue() if wsum < 0.00001: weighting = False print("Warning: all scores have been zero - weighting not applied") import time sx, sy, sz = read_size(particleList[0].getFilename()) wedgeInfo = particleList[0].getWedge().convert2numpy() print('angle: ', wedgeInfo.getWedgeAngle()) wedgeZero = xp.fft.fftshift( xp.array(wedgeInfo.returnWedgeVolume(sx, sy, sz, True).get(), dtype=xp.float32)) # wedgeZeroReduced = fourier_full2reduced(wedgeZero) wedge = xp.zeros_like(wedgeZero, dtype=xp.float32) wedgeSum = xp.zeros_like(wedge, dtype=xp.float32) print('init texture') wedgeText = StaticVolume(xp.fft.fftshift(wedgeZero), device=device, interpolation='filt_bspline') newParticle = xp.zeros((sx, sy, sz), dtype=xp.float32) centerX = sx // 2 centerY = sy // 2 centerZ = sz // 2 result = xp.zeros((sx, sy, sz), dtype=xp.float32) fftplan = get_fft_plan(wedge.astype(xp.complex64)) n = 0 total = len(particleList) # total = int(np.floor((11*1024**3 - mempool.total_bytes())/(sx*sy*sz*4))) # total = 128 # # # particlesNP = np.zeros((total, sx, sy, sz),dtype=np.float32) # particles = [] # mask = create_sphere([sx,sy,sz], sx//2-6, 2) # raw = RawArray('f', int(particlesNP.size)) # shared_array = np.ctypeslib.as_array(raw) # shared_array[:] = particlesNP.flatten() # procs = allocateProcess(particleList, shared_array, n, total, wedgeZero.size) # del particlesNP if profile: t_end = stream.record() t_end.synchronize() time_took = xp.cuda.get_elapsed_time(t_start, t_end) print(f'startup time {n:5d}: \t{time_took:.3f}ms') t_start = stream.record() for particleObject in particleList: rotation = particleObject.getRotation() rotinvert = rotation.invert() shiftV = particleObject.getShift() # if n % total == 0: # while len(procs): # procs =[proc for proc in procs if proc.is_alive()] # time.sleep(0.1) # print(0.1) # # del particles # # xp._default_memory_pool.free_all_blocks() # # pinned_mempool.free_all_blocks() # particles = xp.array(shared_array.reshape(total, sx, sy, sz), dtype=xp.float32) # procs = allocateProcess(particleList, shared_array, n, total, size=wedgeZero.size) # #pinned_mempool.free_all_blocks() # #print(mempool.total_bytes()/1024**3) particle = read(particleObject.getFilename(), deviceID=device) #particle = particles[n%total] if norm: # normalize the particle mean0std1(particle) # happen inplace # apply its wedge to #particle = applyFourierFilter(particle, wedgeZeroReduced) #particle = (xp.fft.ifftn( xp.fft.fftn(particle) * wedgeZero)).real particle = (ifftnP(fftnP(particle, plan=fftplan) * wedgeZero, plan=fftplan)).real ### create spectral wedge weighting wedge *= 0 wedgeText.transform( rotation=[rotinvert[0], rotinvert[2], rotinvert[1]], rotation_order='rzxz', output=wedge) #wedge = xp.fft.fftshift(fourier_reduced2full(create_wedge(30, 30, 21, 42, 42, 42, rotation=[rotinvert[0],rotinvert[2], rotinvert[1]]))) # if analytWedge: # # > analytical buggy version # wedge = wedgeInfo.returnWedgeVolume(sx, sy, sz, True, rotinvert) # else: # # > FF: interpol bugfix # wedge = rotateWeighting(weighting=wedgeInfo.returnWedgeVolume(sx, sy, sz, True), rotation=[rotinvert[0], rotinvert[2], rotinvert[1]]) # # < FF # # > TH bugfix # # wedgeVolume = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ, # # humanUnderstandable=True, rotation=rotinvert) # # wedge = rotate(volume=wedgeVolume, rotation=rotinvert, imethod='linear') # # < TH ### shift and rotate particle newParticle *= 0 transform(particle, output=newParticle, rotation=[-rotation[1], -rotation[2], -rotation[0]], center=[centerX, centerY, centerZ], translation=[-shiftV[0], -shiftV[1], -shiftV[2]], device=device, interpolation='filt_bspline', rotation_order='rzxz') #write(f'trash/GPU_{n}.em', newParticle) # print(rotation.toVector()) # break result += newParticle wedgeSum += xp.fft.fftshift(wedge) # if showProgressBar: # numberAlignedParticles = numberAlignedParticles + 1 # progressBar.update(numberAlignedParticles) if n % total == 0: if profile: t_end = stream.record() t_end.synchronize() time_took = xp.cuda.get_elapsed_time(t_start, t_end) print(f'total time {n:5d}: \t{time_took:.3f}ms') t_start = stream.record() cstream.synchronize() n += 1 print('averaged particles') ###apply spectral weighting to sum result = lowpassFilter(result, high=sx / 2 - 1, sigma=0) # if createInfoVolumes: write(averageName[:len(averageName) - 3] + '-PreWedge.em', result) write(averageName[:len(averageName) - 3] + '-WedgeSumUnscaled.em', fourier_full2reduced(wedgeSum)) wedgeSumINV = invert_WedgeSum(wedgeSum, r_max=sx // 2 - 2., lowlimit=.05 * len(particleList), lowval=.05 * len(particleList)) wedgeSumINV = wedgeSumINV #print(wedgeSum.mean(), wedgeSum.std()) if createInfoVolumes: write(averageName[:len(averageName) - 3] + '-WedgeSumInverted.em', xp.fft.fftshift(wedgeSumINV)) result = applyFourierFilterFull(result, xp.fft.fftshift(wedgeSumINV)) # do a low pass filter result = lowpassFilter(result, sx / 2 - 2, (sx / 2 - 1) / 10.)[0] write(averageName, result) if createInfoVolumes: resultINV = result * -1 # write sign inverted result to disk (good for chimera viewing ... ) write(averageName[:len(averageName) - 3] + '-INV.em', resultINV) newReference = Reference(averageName, particleList) return newReference
def distributeExpectation(particleLists, iterationDirectory, averagePrefix, verbose=False, symmetry=None): """ distributeExpectation: Distributes particle expectation (averaging) to multiple workers. Required by many algorithms such as MCOEXMX @param particleLists: list of particleLists @param iterationDirectory: @param averagePrefix: @param verbose: @param symmetry: """ import pytom_mpi from pytom.tools.files import checkDirExists from pytom.parallel.alignmentMessages import ExpectationJobMsg, ExpectationResultMsg from pytom.alignment.structures import ExpectationJob from pytom.basic.structures import Reference, ReferenceList from os import mkdir if not pytom_mpi.isInitialised(): pytom_mpi.init() mpi_myid = pytom_mpi.rank() if not mpi_myid == 0: raise RuntimeError( 'This function (distributeExpectation) can only be processed by mpi_id = 0! ID == ' + str(mpi_myid) + ' Aborting!') if not checkDirExists(iterationDirectory): raise IOError('The iteration directory does not exist. ' + iterationDirectory) mpi_numberNodes = pytom_mpi.size() if mpi_numberNodes <= 1: raise RuntimeError( 'You must run clustering with openMPI on multiple CPUs') listIterator = 0 referenceList = ReferenceList() #distribute jobs to all nodes for i in range(1, mpi_numberNodes): if verbose: print('Starting first job distribute step') if listIterator < len(particleLists): if not checkDirExists(iterationDirectory + 'class' + str(listIterator) + '/'): mkdir(iterationDirectory + 'class' + str(listIterator) + '/') averageName = iterationDirectory + 'class' + str( listIterator) + '/' + averagePrefix + '-' + str( listIterator) + '.em' if not symmetry.isOneFold(): newPl = symmetry.apply(particleLists[listIterator]) job = ExpectationJob(newPl, averageName) else: job = ExpectationJob(particleLists[listIterator], averageName) newReference = Reference(averageName, particleLists[listIterator]) referenceList.append(newReference) jobMsg = ExpectationJobMsg(0, str(i)) jobMsg.setJob(job) pytom_mpi.send(str(jobMsg), i) if verbose: print(jobMsg) listIterator = listIterator + 1 finished = False #there are more jobs than nodes. continue distributing and collect results receivedMsgCounter = 0 while not finished: #listen and collect mpi_msgString = pytom_mpi.receive() if verbose: print(mpi_msgString) jobResultMsg = ExpectationResultMsg('', '') jobResultMsg.fromStr(mpi_msgString) receivedMsgCounter = receivedMsgCounter + 1 #send new job to free node if listIterator < len(particleLists): if not checkDirExists(iterationDirectory + 'class' + str(listIterator) + '/'): mkdir(iterationDirectory + 'class' + str(listIterator) + '/') averageName = iterationDirectory + 'class' + str( listIterator) + '/' + averagePrefix + '-' + str( listIterator) + '.em' job = ExpectationJob(particleLists[listIterator], averageName) newReference = Reference(averageName, particleLists[listIterator]) referenceList.append(newReference) jobMsg = ExpectationJobMsg(0, str(jobResultMsg.getSender())) jobMsg.setJob(job) pytom_mpi.send(str(jobMsg), i) if verbose: print(jobMsg) listIterator = listIterator + 1 finished = listIterator >= len( particleLists) and receivedMsgCounter == len(particleLists) return referenceList
@param splitType: split type of the job @type splitType: "Ang" or "Vol" """ def __init__(self, jobID=-1, originalJobID=-1, splitType=None): self.jobID = jobID self.originalJobID = originalJobID self.splitType = splitType if __name__ == '__main__': from pytom.basic.structures import Mask, Reference, WedgeInfo from pytom.localization.structures import Volume from pytom.score.score import FLCFScore, xcfScore from pytom.localization.peak_job import PeakJob v = Volume('/fs/pool/pool-foerster/apps/src/molmatch/test/testvol.em') ref = Reference('/fs/pool/pool-foerster/apps/src/molmatch/test/templ.em') m = Mask('/fs/pool/pool-foerster/apps/src/molmatch/test/mask_15.em', True) w = WedgeInfo(0) s = FLCFScore() from pytom.angles.globalSampling import GlobalSampling r = GlobalSampling( '/fs/home/ychen/develop/pytom/trunk/pytom/pytomc/libs/libtomc/data/common_data/angles_90_26.em' ) job = PeakJob(v, ref, m, w, r, s) job.toXMLFile('JobInfo.xml') job.toHTMLFile('JobInfo.html')