def start(self, job, verbose=False): if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil # randomly split the particle list into 2 half sets if len(job.particleList.splitByClass()) != 2: import numpy as np n = len(job.particleList) labels = np.random.randint(2, size=(n, )) print(self.node_name + ': Number of 1st half set:', n - np.sum(labels), 'Number of 2nd half set:', np.sum(labels)) for i in range(n): p = job.particleList[i] p.setClass(labels[i]) self.destination = job.destination new_reference = job.reference old_freq = job.freq new_freq = job.freq # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency new_job = FRMJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting) # distribute it self.distribute_job(new_job, verbose) # get the result back all_even_pre = None # the 1st set all_even_wedge = None all_odd_pre = None # the 2nd set all_odd_wedge = None pl = ParticleList() for j in range(self.num_workers): result = self.get_result() pl += result.pl pre, wedge = self.retrieve_res_vols(result.name) if self.assignment[result.worker_id] == 0: if all_even_pre: all_even_pre += pre all_even_wedge += wedge else: all_even_pre = pre all_even_wedge = wedge else: if all_odd_pre: all_odd_pre += pre all_odd_wedge += wedge else: all_odd_pre = pre all_odd_wedge = wedge # write the new particle list to the disk pl.toXMLFile('aligned_pl_iter' + str(i) + '.xml') # create the averages separately if verbose: print(self.node_name + ': determining the resolution ...') even = self.create_average(all_even_pre, all_even_wedge) odd = self.create_average(all_odd_pre, all_odd_wedge) # apply symmetries if any even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) # determine the transformation between even and odd # here we assume the wedge from both sets are fully sampled from sh_alignment.frm import frm_align pos, angle, score = frm_align(odd, None, even, None, job.bw_range, new_freq, job.peak_offset) print(self.node_name + 'Transform of even set to match the odd set - shift: ' + str(pos) + ' rotation: ' + str(angle)) # transform the odd set accordingly from pytom_volume import vol, transformSpline from pytom.basic.fourier import ftshift from pytom_volume import reducedToFull from pytom_freqweight import weight transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) full_all_odd_wedge = reducedToFull(all_odd_wedge) ftshift(full_all_odd_wedge) odd_weight = weight( full_all_odd_wedge) # the funny part of pytom transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) transformSpline(all_odd_pre, transformed_odd_pre, -angle[1], -angle[0], -angle[2], odd.sizeX() / 2, odd.sizeY() / 2, odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) odd_weight.rotate(-angle[1], -angle[0], -angle[2]) transformed_odd_wedge = odd_weight.getWeightVolume(True) transformSpline(odd, transformed_odd, -angle[1], -angle[0], -angle[2], odd.sizeX() / 2, odd.sizeY() / 2, odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) all_odd_pre = transformed_odd_pre all_odd_wedge = transformed_odd_wedge odd = transformed_odd # determine resolution resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')) odd.write( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge average = self.create_average(all_even_pre, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution average_name = os.path.join(self.destination, 'average_iter' + str(i) + '.em') average.write(average_name) # update the references new_reference = [ Reference( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')), Reference( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) ] # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = os.path.join( self.destination, 'average_iter' + str(i) + '_res' + str(current_resolution) + '.em') filtered[0].write(filtered_ref_name) # if the position/orientation is not improved, break it # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * old_freq) else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': New frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
def start(self, job, verbose=False): """ start FRM job @param job: FRM job @type job: L{FRMJob} @param verbose: print stuff (default: False) @type verbose: C{bool} """ if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil self.destination = job.destination new_reference = job.reference old_freq = job.freq new_freq = job.freq #print(f"reference = {job.reference}") #print(f"particlelist = {job.particleList}") print(f"iterations = {job.max_iter:d}") print(f"binning = {job.binning:d}") #print(f"mask = {job.mask}") #print(f"peak_offset= {job.peak_offset:f2.1}") print(f"destination= {job.destination:s}") print(f"freq cut = {job.freq:d}") # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency new_job = FRMJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting, constraint=job.constraint, binning=job.binning) # distribute it self.distribute_job(new_job, verbose) # get the result back all_even_pre = None all_even_wedge = None all_odd_pre = None all_odd_wedge = None pl = ParticleList() for j in range(self.num_workers): result = self.get_result() pl += result.pl even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols( result.name) if all_even_pre: all_even_pre += even_pre all_even_wedge += even_wedge all_odd_pre += odd_pre all_odd_wedge += odd_wedge else: all_even_pre = even_pre all_even_wedge = even_wedge all_odd_pre = odd_pre all_odd_wedge = odd_wedge # write the new particle list to the disk pl.toXMLFile( os.path.join(job.destination, 'aligned_pl_iter' + str(i) + '.xml')) # create half sets even = self.create_average(all_even_pre, all_even_wedge) odd = self.create_average(all_odd_pre, all_odd_wedge) # apply symmetries before determine resolution even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write( os.path.join(self.destination, 'fsc_' + str(i) + '_even.em')) odd.write( os.path.join(self.destination, 'fsc_' + str(i) + '_odd.em')) # determine the resolution if verbose: print(self.node_name + ': determining the resolution ...') current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge average = self.create_average(all_even_pre, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution and update the new reference average_name = os.path.join(self.destination, 'average_iter' + str(i) + '.em') # pl.average(average_name, True) average.write(average_name) new_reference = Reference(average_name) # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = os.path.join( self.destination, 'average_iter' + str(i) + '_res' + str(current_resolution) + '.em') filtered[0].write(filtered_ref_name) # if the position/orientation is not improved, break it # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * new_freq) old_freq = new_freq else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': New frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
if v1Filename and v2Filename: print('Resolution determined for ', v1Filename, ' and ', v2Filename) elif particleList: print('Resolution determined for ', particleList, ' ') print('') print('FSC Criterion: ', fscCriterion) print('Number of Bands: ', numberBands) print('') print('Nyquist: ', r[0]) print('Band: ', r[1]) if pixelSize: from pytom.basic.resolution import bandToAngstrom resolution = bandToAngstrom(r[1], pixelSize, numberBands) print('Resolution determined for pixelsize : ', pixelSize, ' at ', resolution, ' Angstrom') else: print('XML') if plot and v1Filename and v2Filename: import matplotlib import numpy matplotlib.use('Qt5Agg') from pylab import subplots, savefig, show fig, ax = subplots(1, 1, figsize=(10, 5)) ax.plot(f, label='FSC orig')
def multiRef_EXMXAlign(multiRefJob, doFinalize=True, verbose=False): """ multiRef_EXMXAlign: Performs multi reference alignment on a particle list @param multiRefJob: The multi reference alignment job @param doFinalize: Send finalize msgs to workers or not. Default is true @param verbose: Default is false """ import pytom_mpi if doFinalize: pytom_mpi.init() if pytom_mpi.rank() == 0: from pytom.alignment.ExMaxAlignment import ExMaxManager from pytom.tools.files import checkDirExists from os import mkdir from pytom.basic.resolution import bandToAngstrom, angstromToBand, angleFromResolution particleList = multiRefJob.getParticleList() initialParticleList = particleList previousParticleList = initialParticleList destinationDirectory = multiRefJob.getDestinationDirectory() numberIterations = multiRefJob.getNumberIterations() numberClasses = multiRefJob.getNumberClasses() exMaxJob = multiRefJob.getExMaxJob() p = particleList[0] pVol = p.getVolume() cubeSize = pVol.sizeX() preprocessing = exMaxJob.getPreprocessing() sampleInfo = exMaxJob.getSampleInformation() if verbose: print(multiRefJob) if not checkDirExists(destinationDirectory): raise IOError('Destination directory ' + destinationDirectory + ' not found!') try: particleLists = particleList.splitByClass() if len(particleLists) <= 1: raise Exception() except Exception: from pytom.cluster.clusterFunctions import randomiseParticleListClasses if numberClasses: if verbose: print('Randomizing particle list') particleList = randomiseParticleListClasses( particleList, numberClasses) particleList.toXMLFile(destinationDirectory + '/RandomisedParticleList.xml') particleLists = particleList.splitByClass() else: raise RuntimeError( 'The particle list provided is not pre-classified and you did not set numberClasses for a random seed!' ) iteration = 0 converged = False while iteration < numberIterations and (not converged): if verbose: print('Running iteration ' + str(iteration) + ' of ' + str(numberIterations)) iterationDirectory = destinationDirectory + '/' + str( iteration) + '/' if not checkDirExists(iterationDirectory): mkdir(iterationDirectory) #determine resolution of all classes maxRes = 0 minRes = 1000000 if not checkDirExists(iterationDirectory + 'resolution/'): mkdir(iterationDirectory + 'resolution/') for classIterator in range(len(particleLists)): currentParticleList = particleLists[classIterator] if len(currentParticleList) > 1: [resNyquist, resolutionBand, numberBands] = currentParticleList.determineResolution( criterion=exMaxJob.getFSCCriterion(), numberBands=cubeSize / 2, mask=exMaxJob.getMask(), keepHalfsetAverages=False, halfsetPrefix=iterationDirectory + 'resolution/' + 'class' + str(classIterator) + '_fsc-', verbose=verbose) else: continue resolutionAngstrom = bandToAngstrom(resolutionBand, sampleInfo.getPixelSize(), numberBands, 1) #resolutionAngstrom = bandToAngstrom(resolutionBand,sampleInfo.getPixelSize(),numberBands,exMaxJob.getBinning() ) if resolutionBand > maxRes: maxRes = resolutionBand if resolutionBand < minRes: minRes = resolutionBand if verbose: print( 'Class ', classIterator, ' - current resolution :' + str(resolutionAngstrom) + ' Angstrom') #set highest frequency according to user specification band = maxRes if not multiRefJob.getUseMaxResolution(): band = minRes if band == numberBands: #determineResolution returns numberBands for filter if fsc result is invalid. in that case, use nyquist /2 as filter setting print('Warning MultiRefAlignment.py: LL 114') print( 'Warning: Resolution determined for all classes was invalid. Will use Nyquist/2 for current iteration' ) band = numberBands / 2 preprocessing.setHighestFrequency(band) exMaxJob.setPreprocessing(preprocessing) alignmentLists = [None] * len(particleLists) #generate cluster centers referenceList = distributeExpectation( particleLists, iterationDirectory, 'clusterCenter' + str(iteration), verbose, exMaxJob.getSymmetry()) for classIterator in range(len(particleLists)): classDirectory = iterationDirectory + 'class' + str( classIterator) + '/' #determine distance for all particles refinementDirectory = classDirectory + 'refinement/' if verbose: print(refinementDirectory) if not checkDirExists(refinementDirectory): mkdir(refinementDirectory) exMaxJob.setParticleList(particleList) exMaxJob.setReference(referenceList[classIterator]) exMaxJob.setDestination(refinementDirectory) #run refinement manager = ExMaxManager(exMaxJob) manager.distributeAlignment(verbose) alignmentLists[classIterator] = manager.getAlignmentList() alignmentLists[classIterator].toXMLFile(iterationDirectory + 'AlignmentList' + str(classIterator) + '.xml') #perform classification here if verbose: print('Classifying after iteration ' + str(iteration)) particleList = classifyParticleList(initialParticleList, alignmentLists, verbose) particleList.toXMLFile(iterationDirectory + 'classifiedParticles.xml') particleLists = particleList.splitByClass() difference = previousParticleList.classDifference(particleList) converged = multiRefJob.getEndThreshold() >= difference[3] #set up for next round! previousParticleList = particleList iteration = iteration + 1 if doFinalize: manager.parallelEnd() pytom_mpi.finalise() return [particleList, alignmentLists] else: from pytom.alignment.ExMaxAlignment import ExMaxWorker worker = ExMaxWorker() worker.parallelRun() pytom_mpi.finalise()
def start(self, job, verbose=False): if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil from pytom.basic.fourier import convolute from pytom_volume import vol, power, read # randomly split the particle list into 2 half sets import numpy as np num_pairs = len(job.particleList.pairs) for i in range(num_pairs): # randomize the class labels to indicate the two half sets pl = job.particleList.pairs[i].get_phase_flip_pl() n = len(pl) labels = np.random.randint(2, size=(n, )) print(self.node_name + ': Number of 1st half set:', n - np.sum(labels), 'Number of 2nd half set:', np.sum(labels)) for j in range(n): p = pl[j] p.setClass(labels[j]) new_reference = job.reference old_freq = job.freq new_freq = job.freq # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency # here the job.particleList is actually ParticleListSet new_job = MultiDefocusJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter - i, job.r_score, job.weighting, job.bfactor) # distribute it num_all_particles = self.distribute_job(new_job, verbose) # calculate the denominator sum_ctf_squared = None for pair in job.particleList.pairs: if sum_ctf_squared is None: sum_ctf_squared = pair.get_ctf_sqr_vol() * pair.snr else: sum_ctf_squared += pair.get_ctf_sqr_vol() * pair.snr # get the result back all_even_pre = None all_even_wedge = None all_odd_pre = None all_odd_wedge = None pls = [] for j in range(len(job.particleList.pairs)): pls.append(ParticleList()) for j in range(self.num_workers): result = self.get_result() pair_id = self.assignment[result.worker_id] pair = job.particleList.pairs[pair_id] pl = pls[pair_id] pl += result.pl even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols( result.name) if all_even_pre: all_even_pre += even_pre * pair.snr all_even_wedge += even_wedge all_odd_pre += odd_pre * pair.snr all_odd_wedge += odd_wedge else: all_even_pre = even_pre * pair.snr all_even_wedge = even_wedge all_odd_pre = odd_pre * pair.snr all_odd_wedge = odd_wedge # write the new particle list to the disk for j in range(len(job.particleList.pairs)): pls[j].toXMLFile('aligned_pl' + str(j) + '_iter' + str(i) + '.xml') # correct for the number of particles in wiener filter sum_ctf_squared = sum_ctf_squared / num_all_particles # all_even_pre = all_even_pre/(num_all_particles/2) # all_odd_pre = all_odd_pre/(num_all_particles/2) # bfactor if job.bfactor and job.bfactor != 'None': # bfactor_kernel = create_bfactor_vol(sum_ctf_squared.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor) bfactor_kernel = read(job.bfactor) bfactor_kernel_sqr = vol(bfactor_kernel) power(bfactor_kernel_sqr, 2) all_even_pre = convolute(all_even_pre, bfactor_kernel, True) all_odd_pre = convolute(all_odd_pre, bfactor_kernel, True) sum_ctf_squared = sum_ctf_squared * bfactor_kernel_sqr # create averages of two sets if verbose: print(self.node_name + ': determining the resolution ...') even = self.create_average( all_even_pre, sum_ctf_squared, all_even_wedge ) # assume that the CTF sum is the same for the even and odd odd = self.create_average(all_odd_pre, sum_ctf_squared, all_odd_wedge) # determine the transformation between even and odd # here we assume the wedge from both sets are fully sampled from sh_alignment.frm import frm_align pos, angle, score = frm_align(odd, None, even, None, job.bw_range, new_freq, job.peak_offset) print( self.node_name + ': transform of even set to match the odd set - shift: ' + str(pos) + ' rotation: ' + str(angle)) # transform the odd set accordingly from pytom_volume import vol, transformSpline from pytom.basic.fourier import ftshift from pytom_volume import reducedToFull from pytom_freqweight import weight transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) full_all_odd_wedge = reducedToFull(all_odd_wedge) ftshift(full_all_odd_wedge) odd_weight = weight( full_all_odd_wedge) # the funny part of pytom transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ()) transformSpline(all_odd_pre, transformed_odd_pre, -angle[1], -angle[0], -angle[2], int(odd.sizeX() / 2), int(odd.sizeY() / 2), int(odd.sizeZ() / 2), -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) odd_weight.rotate(-angle[1], -angle[0], -angle[2]) transformed_odd_wedge = odd_weight.getWeightVolume(True) transformSpline(odd, transformed_odd, -angle[1], -angle[0], -angle[2], int(odd.sizeX() / 2), int(odd.sizeY() / 2), int(odd.sizeZ() / 2), -(pos[0] - odd.sizeX() / 2), -(pos[1] - odd.sizeY() / 2), -(pos[2] - odd.sizeZ() / 2), 0, 0, 0) all_odd_pre = transformed_odd_pre all_odd_wedge = transformed_odd_wedge odd = transformed_odd # apply symmetries before determine resolution # with gold standard you should be careful about applying the symmetry! even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) resNyquist, resolutionBand, numberBands = self.determine_resolution( even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write('fsc_' + str(i) + '_even.em') odd.write('fsc_' + str(i) + '_odd.em') current_resolution = bandToAngstrom( resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print( self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge # all_even_pre = all_even_pre/2 # correct for the number of particles in wiener filter average = self.create_average(all_even_pre, sum_ctf_squared, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution and update the new reference average_name = 'average_iter' + str(i) + '.em' average.write(average_name) # update the references new_reference = [ Reference('fsc_' + str(i) + '_even.em'), Reference('fsc_' + str(i) + '_odd.em') ] # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand) / 10) filtered_ref_name = 'average_iter' + str(i) + '_res' + str( current_resolution) + '.em' filtered[0].write(filtered_ref_name) # change the frequency to a higher value new_freq = int(ceil(resolutionBand)) + 1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print( self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1 + job.adaptive_res) * old_freq) else: # always increase by 1 print( self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!' ) new_freq = old_freq + 1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': Determined frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
def start(self, job, verbose=False): if self.mpi_id == 0: from pytom.basic.structures import ParticleList, Reference from pytom.basic.resolution import bandToAngstrom from pytom.basic.filter import lowpassFilter from math import ceil from pytom.basic.fourier import convolute from pytom_volume import vol, power, read new_reference = job.reference old_freq = job.freq new_freq = job.freq # main node for i in range(job.max_iter): if verbose: print(self.node_name + ': starting iteration %d ...' % i) # construct a new job by updating the reference and the frequency # here the job.particleList is actually ParticleListSet new_job = MultiDefocusJob(job.particleList, new_reference, job.mask, job.peak_offset, job.sampleInformation, job.bw_range, new_freq, job.destination, job.max_iter-i, job.r_score, job.weighting, job.bfactor) # distribute it num_all_particles = self.distribute_job(new_job, verbose) # calculate the denominator sum_ctf_squared = None for pair in job.particleList.pairs: if sum_ctf_squared is None: sum_ctf_squared = pair.get_ctf_sqr_vol() * pair.snr else: sum_ctf_squared += pair.get_ctf_sqr_vol() * pair.snr # get the result back all_even_pre = None all_even_wedge = None all_odd_pre = None all_odd_wedge = None pls = [] for j in range(len(job.particleList.pairs)): pls.append(ParticleList()) for j in range(self.num_workers): result = self.get_result() pair_id = self.assignment[result.worker_id] pair = job.particleList.pairs[pair_id] pl = pls[pair_id] pl += result.pl even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols(result.name) if all_even_pre: all_even_pre += even_pre * pair.snr all_even_wedge += even_wedge all_odd_pre += odd_pre * pair.snr all_odd_wedge += odd_wedge else: all_even_pre = even_pre * pair.snr all_even_wedge = even_wedge all_odd_pre = odd_pre * pair.snr all_odd_wedge = odd_wedge # write the new particle list to the disk for j in range(len(job.particleList.pairs)): pls[j].toXMLFile('aligned_pl'+str(j)+'_iter'+str(i)+'.xml') # correct for the number of particles in wiener filter sum_ctf_squared = sum_ctf_squared/num_all_particles # all_even_pre = all_even_pre/(num_all_particles/2) # all_odd_pre = all_odd_pre/(num_all_particles/2) # bfactor if job.bfactor and job.bfactor != 'None': # bfactor_kernel = create_bfactor_vol(sum_ctf_squared.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor) bfactor_kernel = read(job.bfactor) bfactor_kernel_sqr = vol(bfactor_kernel) power(bfactor_kernel_sqr, 2) all_even_pre = convolute(all_even_pre, bfactor_kernel, True) all_odd_pre = convolute(all_odd_pre, bfactor_kernel, True) sum_ctf_squared = sum_ctf_squared*bfactor_kernel_sqr # determine the resolution if verbose: print(self.node_name + ': determining the resolution ...') even = self.create_average(all_even_pre, sum_ctf_squared, all_even_wedge) # assume that the CTF sum is the same for the even and odd odd = self.create_average(all_odd_pre, sum_ctf_squared, all_odd_wedge) # apply symmetries before determine resolution even = job.symmetries.applyToParticle(even) odd = job.symmetries.applyToParticle(odd) resNyquist, resolutionBand, numberBands = self.determine_resolution(even, odd, job.fsc_criterion, None, job.mask, verbose) # write the half set to the disk even.write('fsc_'+str(i)+'_even.em') odd.write('fsc_'+str(i)+'_odd.em') current_resolution = bandToAngstrom(resolutionBand, job.sampleInformation.getPixelSize(), numberBands, 1) if verbose: print(self.node_name + ': current resolution ' + str(current_resolution), resNyquist) # create new average all_even_pre += all_odd_pre all_even_wedge += all_odd_wedge # all_even_pre = all_even_pre/2 # correct for the number of particles in wiener filter average = self.create_average(all_even_pre, sum_ctf_squared, all_even_wedge) # apply symmetries average = job.symmetries.applyToParticle(average) # filter average to resolution and update the new reference average_name = 'average_iter'+str(i)+'.em' average.write(average_name) new_reference = Reference(average_name) # low pass filter the reference and write it to the disk filtered = lowpassFilter(average, ceil(resolutionBand), ceil(resolutionBand)/10) filtered_ref_name = 'average_iter'+str(i)+'_res'+str(current_resolution)+'.em' filtered[0].write(filtered_ref_name) # change the frequency to a higher value new_freq = int(ceil(resolutionBand))+1 if new_freq <= old_freq: if job.adaptive_res is not False: # two different strategies print(self.node_name + ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!' % job.adaptive_res) new_freq = int((1+job.adaptive_res)*old_freq) else: # always increase by 1 print(self.node_name + ': Determined resolution gets worse. Increase the frequency to be aligned by 1!') new_freq = old_freq+1 old_freq = new_freq else: old_freq = new_freq if new_freq >= numberBands: print(self.node_name + ': Determined frequency too high. Terminate!') break if verbose: print(self.node_name + ': change the frequency to ' + str(new_freq)) # send end signal to other nodes and terminate itself self.end(verbose) else: # other nodes self.run(verbose)
v2Filename) elif particleList: print('Resolution determined for ', particleList, ' ') print('') print('FSC Criterion: ', fscCriterion) print('Number of Bands: ', numberBands) print('') print('Nyquist: ', r[0]) print('Band: ', r[1]) if pixelSize: from pytom.basic.resolution import bandToAngstrom print('Resolution determined for pixelsize : ', pixelSize, ' at ', bandToAngstrom(r[1], pixelSize, numberBands), ' Angstrom') else: print('XML') if plot and v1Filename and v2Filename: import matplotlib import numpy matplotlib.use('Qt5Agg') from pylab import subplots, savefig, show fig, ax = subplots(1, 1, figsize=(10, 5)) ax.plot(f, label='FSC orig') if randomize: ax.plot(fsc_rand, label='FSC rand') ax.plot(fsc_corr, label='FSC corrected')