Esempio n. 1
0
    def cc(self, rot, trans):
        """
        evaluate the CC for given rotation and translation - here directly CC not multiplied by -1 as in evalScore

        @param rot: rotation of vol2
        @type rot: L{pytom.basic.Rotation}
        @param trans: translation of vol2
        @type trans: L{pytom.basic.Shift}
        @return: cc
        @rtype: L{float}
        @author: FF
        """
        from pytom_volume import transformSpline

        #transform vol2
        transformSpline(self.vol2, self.rotvol2, rot[0], rot[1], rot[2],
                        self.centX, self.centY, self.centZ, 0, 0, 0,
                        trans[0] / self.binning, trans[1] / self.binning,
                        trans[2] / self.binning)
        # compute CCC
        cc = self.score(volume=self.vol1,
                        template=self.rotvol2,
                        mask=self.mask,
                        volumeIsNormalized=True)
        return cc
Esempio n. 2
0
    def evalScore(self, rot_trans):
        """
        evaluate score for given rotation and translation - NEGATIVE cc because\
        optimizer MINIMIZES

        @param rot_trans: rotation and translation vector (6-dim: z1,z2,x \
            angles, x,y,z, translations)
        @type rot_trans: L{list}
        @author: FF
        """
        from pytom_volume import transformSpline, transform
        if self.interpolation.lower() == 'spline':
            transformSpline(self.vol2, self.rotvol2, rot_trans[0],
                            rot_trans[1], rot_trans[2], self.centX, self.centY,
                            self.centZ, 0, 0, 0, rot_trans[3] / self.binning,
                            rot_trans[4] / self.binning,
                            rot_trans[5] / self.binning)
        else:
            transform(self.vol2, self.rotvol2, rot_trans[0], rot_trans[1],
                      rot_trans[2], self.centX, self.centY, self.centZ, 0, 0,
                      0, rot_trans[3] / self.binning,
                      rot_trans[4] / self.binning, rot_trans[5] / self.binning)
        self.val = -1. * (self.score(volume=self.vol1,
                                     template=self.rotvol2,
                                     mask=self.mask,
                                     volumeIsNormalized=True))
        return self.val
Esempio n. 3
0
def calculate_difference_map(v1,
                             band1,
                             v2,
                             band2,
                             mask=None,
                             focus_mask=None,
                             align=True,
                             sigma=None,
                             threshold=0.4):
    """mask if for alignment, while focus_mask is for difference map.
    """
    from pytom_volume import vol, power, abs, limit, transformSpline, variance, mean, max, min
    from pytom.basic.normalise import mean0std1
    from pytom.basic.filter import lowpassFilter

    # do lowpass filtering first
    lv1 = lowpassFilter(v1, band1, band1 / 10.)[0]
    lv2 = lowpassFilter(v2, band2, band2 / 10.)[0]

    # do alignment of two volumes, if required. v1 is used as reference.
    if align:
        from sh_alignment.frm import frm_align
        band = int(band1 if band1 < band2 else band2)
        pos, angle, score = frm_align(lv2, None, lv1, None, [4, 64], band,
                                      lv1.sizeX() // 4, mask)
        shift = [
            pos[0] - v1.sizeX() // 2, pos[1] - v1.sizeY() // 2,
            pos[2] - v1.sizeZ() // 2
        ]

        # transform v2
        lvv2 = vol(lv2)
        transformSpline(lv2, lvv2, -angle[1], -angle[0], -angle[2],
                        lv2.sizeX() // 2,
                        lv2.sizeY() // 2,
                        lv2.sizeZ() // 2, -shift[0], -shift[1], -shift[2], 0,
                        0, 0)
    else:
        lvv2 = lv2

    # do normalization
    mean0std1(lv1)
    mean0std1(lvv2)

    # only consider the density beyond certain sigma
    if sigma is None or sigma == 0:
        pass
    elif sigma < 0:  # negative density counts
        assert min(lv1) < sigma
        assert min(lvv2) < sigma
        limit(lv1, 0, 0, sigma, 0, False, True)
        limit(lvv2, 0, 0, sigma, 0, False, True)
    else:  # positive density counts
        assert max(lv1) > sigma
        assert max(lvv2) > sigma
        limit(lv1, sigma, 0, 0, 0, True, False)
        limit(lvv2, sigma, 0, 0, 0, True, False)

    # if we want to focus on specific area only
    if focus_mask:
        lv1 *= focus_mask
        lvv2 *= focus_mask

    # calculate the STD map
    avg = (lv1 + lvv2) / 2
    var1 = avg - lv1
    power(var1, 2)
    var2 = avg - lvv2
    power(var2, 2)

    std_map = var1 + var2
    power(std_map, 0.5)

    # calculate the coefficient of variance map
    # std_map = std_map/abs(avg)

    if focus_mask:
        std_map *= focus_mask

    # threshold the STD map
    mv = mean(std_map)
    threshold = mv + (max(std_map) - mv) * threshold
    limit(std_map, threshold, 0, threshold, 1, True, True)

    # do a lowpass filtering
    std_map1 = lowpassFilter(std_map, v1.sizeX() // 4, v1.sizeX() / 40.)[0]

    if align:
        std_map2 = vol(std_map)
        transformSpline(std_map1, std_map2, angle[0], angle[1], angle[2],
                        v1.sizeX() // 2,
                        v1.sizeY() // 2,
                        v1.sizeZ() // 2, 0, 0, 0, shift[0], shift[1], shift[2])
    else:
        std_map2 = std_map1

    limit(std_map1, 0.5, 0, 1, 1, True, True)
    limit(std_map2, 0.5, 0, 1, 1, True, True)

    # return the respective difference maps
    return (std_map1, std_map2)
Esempio n. 4
0
def compareTwoVolumes(particle,reference,referenceWeighting,wedgeInfo,rotations,shift,
         scoreObject=0,mask=None,preprocessing=[],binning=1,verbose=False):
    """
    compare: Compares two volumes
    @param particle: A particle
    @param reference: A reference
    @param referenceWeighting: Fourier weighting of the reference (sum of wedges for instance)
    @param wedgeInfo: What does the wedge look alike?
    @type wedgeInfo: L{pytom.basic.structures.WedgeInfo}
    @param rotations: All rotations to be scanned
    @type rotations: Must be a L{pytom.angles.angleList.OneAngleList}
    @param scoreObject: 
    @type scoreObject: L{pytom.score.score.Score}
    @param mask: 
    @param preprocessing: Class storing preprocessing of particle and reference such as bandpass
    @type preprocessing: L{pytom.alignment.preprocessing.Preprocessing}
    @param binning: Is binning applied (Disabled when == 1)
    @return: Returns the correlation coefficient of two volumes.
    @author: Thomas Hrabe
    """

    from pytom_volume import vol,transformSpline
    from pytom.basic.filter import filter,rotateWeighting
    from pytom.angles.angleList import OneAngleList
    
    assert particle.__class__ == vol, "particle not of type vol"
    assert reference.__class__ == vol, "reference not of type vol"
    assert referenceWeighting.__class__ == vol or referenceWeighting.__class__ == str
    assert rotations.__class__ == OneAngleList or len(rotations) == 1
    
    if scoreObject == 0:
        from pytom.score.score import nxcfScore
        scoreObject = nxcfScore()
    
    particleCopy = vol(particle.sizeX(),particle.sizeY(),particle.sizeZ())
    
    #process particle   
    particle = wedgeInfo.apply(particle)
    particle = preprocessing.apply(particle,True)
    particleCopy.copyVolume(particle)
    
    #manipulate reference
    currentRotation = rotations.nextRotation()
    
    sizeX = particle.sizeX() 
    sizeY = particle.sizeY()
    sizeZ = particle.sizeZ()
    
    centerX = sizeX/2.0 
    centerY = sizeY/2.0 
    centerZ = sizeZ/2.0 
    
    
    simulatedVol= vol(sizeX,sizeY,sizeZ)

    transformSpline(reference,simulatedVol,currentRotation[0],currentRotation[1],currentRotation[2],
                    centerX,centerY,centerZ,0,0,0,shift[0]/binning,shift[1]/binning,shift[2]/binning)
    
    simulatedVol = wedgeInfo.apply(simulatedVol)
    m = mask.getVolume(currentRotation)
    simulatedVol = simulatedVol * m
    
    simulatedVol = preprocessing.apply(simulatedVol,True)

    if not referenceWeighting.__class__ == str:
        from pytom_freqweight import weight
        
        weightingRotated = rotateWeighting(weighting=referenceWeighting, z1=currentRotation[0], z2=currentRotation[1],
                                           x=currentRotation[2], isReducedComplex=None, returnReducedComplex=True,
                                           binarize=False)
        if verbose:
            particleCopy.info('pc')
            weightingRotated.info('wr')
        
        r = list(filter(particleCopy,weight(weightingRotated)))            
        particleCopy = r[0]
        
    scoringResult = scoreObject.scoringCoefficient(particleCopy,simulatedVol,m)
    
    if scoringResult.__class__ == list:
        scoringResult = scoringResult[0]
    
    
    assert scoringResult == scoringResult, "scoringResult possibly NaN"
    
    return scoringResult 
Esempio n. 5
0
    def start(self, job, verbose=False):
        if self.mpi_id == 0:
            from pytom.basic.structures import ParticleList, Reference
            from pytom.basic.resolution import bandToAngstrom
            from pytom.basic.filter import lowpassFilter
            from math import ceil

            # randomly split the particle list into 2 half sets
            if len(job.particleList.splitByClass()) != 2:
                import numpy as np
                n = len(job.particleList)
                labels = np.random.randint(2, size=(n, ))
                print(self.node_name + ': Number of 1st half set:',
                      n - np.sum(labels), 'Number of 2nd half set:',
                      np.sum(labels))
                for i in range(n):
                    p = job.particleList[i]
                    p.setClass(labels[i])

            self.destination = job.destination
            new_reference = job.reference
            old_freq = job.freq
            new_freq = job.freq
            # main node
            for i in range(job.max_iter):
                if verbose:
                    print(self.node_name + ': starting iteration %d ...' % i)

                # construct a new job by updating the reference and the frequency
                new_job = FRMJob(job.particleList, new_reference, job.mask,
                                 job.peak_offset, job.sampleInformation,
                                 job.bw_range, new_freq, job.destination,
                                 job.max_iter - i, job.r_score, job.weighting)

                # distribute it
                self.distribute_job(new_job, verbose)

                # get the result back
                all_even_pre = None  # the 1st set
                all_even_wedge = None
                all_odd_pre = None  # the 2nd set
                all_odd_wedge = None
                pl = ParticleList()
                for j in range(self.num_workers):
                    result = self.get_result()
                    pl += result.pl
                    pre, wedge = self.retrieve_res_vols(result.name)

                    if self.assignment[result.worker_id] == 0:
                        if all_even_pre:
                            all_even_pre += pre
                            all_even_wedge += wedge
                        else:
                            all_even_pre = pre
                            all_even_wedge = wedge
                    else:
                        if all_odd_pre:
                            all_odd_pre += pre
                            all_odd_wedge += wedge
                        else:
                            all_odd_pre = pre
                            all_odd_wedge = wedge

                # write the new particle list to the disk
                pl.toXMLFile('aligned_pl_iter' + str(i) + '.xml')

                # create the averages separately
                if verbose:
                    print(self.node_name + ': determining the resolution ...')
                even = self.create_average(all_even_pre, all_even_wedge)
                odd = self.create_average(all_odd_pre, all_odd_wedge)

                # apply symmetries if any
                even = job.symmetries.applyToParticle(even)
                odd = job.symmetries.applyToParticle(odd)

                # determine the transformation between even and odd
                # here we assume the wedge from both sets are fully sampled
                from sh_alignment.frm import frm_align
                pos, angle, score = frm_align(odd, None, even, None,
                                              job.bw_range, new_freq,
                                              job.peak_offset)
                print(self.node_name +
                      'Transform of even set to match the odd set - shift: ' +
                      str(pos) + ' rotation: ' + str(angle))

                # transform the odd set accordingly
                from pytom_volume import vol, transformSpline
                from pytom.basic.fourier import ftshift
                from pytom_volume import reducedToFull
                from pytom_freqweight import weight
                transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(),
                                          odd.sizeZ())
                full_all_odd_wedge = reducedToFull(all_odd_wedge)
                ftshift(full_all_odd_wedge)
                odd_weight = weight(
                    full_all_odd_wedge)  # the funny part of pytom
                transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ())

                transformSpline(all_odd_pre, transformed_odd_pre, -angle[1],
                                -angle[0], -angle[2],
                                odd.sizeX() / 2,
                                odd.sizeY() / 2,
                                odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2),
                                -(pos[1] - odd.sizeY() / 2),
                                -(pos[2] - odd.sizeZ() / 2), 0, 0, 0)
                odd_weight.rotate(-angle[1], -angle[0], -angle[2])
                transformed_odd_wedge = odd_weight.getWeightVolume(True)
                transformSpline(odd, transformed_odd, -angle[1], -angle[0],
                                -angle[2],
                                odd.sizeX() / 2,
                                odd.sizeY() / 2,
                                odd.sizeZ() / 2, -(pos[0] - odd.sizeX() / 2),
                                -(pos[1] - odd.sizeY() / 2),
                                -(pos[2] - odd.sizeZ() / 2), 0, 0, 0)

                all_odd_pre = transformed_odd_pre
                all_odd_wedge = transformed_odd_wedge
                odd = transformed_odd

                # determine resolution
                resNyquist, resolutionBand, numberBands = self.determine_resolution(
                    even, odd, job.fsc_criterion, None, job.mask, verbose)

                # write the half set to the disk
                even.write(
                    os.path.join(self.destination,
                                 'fsc_' + str(i) + '_even.em'))
                odd.write(
                    os.path.join(self.destination,
                                 'fsc_' + str(i) + '_odd.em'))

                current_resolution = bandToAngstrom(
                    resolutionBand, job.sampleInformation.getPixelSize(),
                    numberBands, 1)
                if verbose:
                    print(
                        self.node_name + ': current resolution ' +
                        str(current_resolution), resNyquist)

                # create new average
                all_even_pre += all_odd_pre
                all_even_wedge += all_odd_wedge
                average = self.create_average(all_even_pre, all_even_wedge)

                # apply symmetries
                average = job.symmetries.applyToParticle(average)

                # filter average to resolution
                average_name = os.path.join(self.destination,
                                            'average_iter' + str(i) + '.em')
                average.write(average_name)

                # update the references
                new_reference = [
                    Reference(
                        os.path.join(self.destination,
                                     'fsc_' + str(i) + '_even.em')),
                    Reference(
                        os.path.join(self.destination,
                                     'fsc_' + str(i) + '_odd.em'))
                ]

                # low pass filter the reference and write it to the disk
                filtered = lowpassFilter(average, ceil(resolutionBand),
                                         ceil(resolutionBand) / 10)
                filtered_ref_name = os.path.join(
                    self.destination, 'average_iter' + str(i) + '_res' +
                    str(current_resolution) + '.em')
                filtered[0].write(filtered_ref_name)

                # if the position/orientation is not improved, break it

                # change the frequency to a higher value
                new_freq = int(ceil(resolutionBand)) + 1
                if new_freq <= old_freq:
                    if job.adaptive_res is not False:  # two different strategies
                        print(
                            self.node_name +
                            ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!'
                            % job.adaptive_res)
                        new_freq = int((1 + job.adaptive_res) * old_freq)
                    else:  # always increase by 1
                        print(
                            self.node_name +
                            ': Determined resolution gets worse. Increase the frequency to be aligned by 1!'
                        )
                        new_freq = old_freq + 1
                        old_freq = new_freq
                else:
                    old_freq = new_freq
                if new_freq >= numberBands:
                    print(self.node_name +
                          ': New frequency too high. Terminate!')
                    break

                if verbose:
                    print(self.node_name + ': change the frequency to ' +
                          str(new_freq))

            # send end signal to other nodes and terminate itself
            self.end(verbose)
        else:
            # other nodes
            self.run(verbose)
Esempio n. 6
0
def alignVolumesAndFilterByFSC(vol1,
                               vol2,
                               mask=None,
                               nband=None,
                               iniRot=None,
                               iniTrans=None,
                               interpolation='linear',
                               fsc_criterion=0.143,
                               verbose=0):
    """
    align two volumes, compute their FSC, and filter by FSC
    @param vol1: volume 1
    @param vol2: volume 2
    @mask: mask volume
    @type mask: L{pytom_volume.vol}
    @param nband: Number of bands
    @type nband: L{int}
    @param iniRot: initial guess for rotation
    @param iniTrans: initial guess for translation
    @param interpolation: interpolation type - 'linear' (default) or 'spline'
    @param fsc_criterion: filter -> 0 according to resolution criterion
    @type fsc_criterion: float
    @param verbose: verbose level (0=mute, 1 some output, 2=talkative)
    @type verbose: int
    @type interpolation: str
    @return: (filvol1, filvol2, fsc, fsc_fil, optiRot, optiTrans) i.e., filtered volumes, their FSC, the corresponding\
        filter that was applied to the volumes, and the optimal rotation and translation of vol2 with respect to vol1\
        note: filvol2 is NOT rotated and translated!
    @author: FF
    """
    from pytom_volume import transformSpline, vol
    from pytom.basic.correlation import FSC
    from pytom.basic.filter import filter_volume_by_profile
    from pytom.alignment.localOptimization import Alignment
    from pytom.basic.correlation import nxcc

    assert isinstance(object=vol1, class_or_type_or_tuple=vol
                      ), "alignVolumesAndFilterByFSC: vol1 must be of type vol"
    assert isinstance(object=vol2, class_or_type_or_tuple=vol
                      ), "alignVolumesAndFilterByFSC: vol2 must be of type vol"
    # filter volumes prior to alignment according to SNR
    fsc = FSC(volume1=vol1, volume2=vol2, numberBands=nband)
    fil = design_fsc_filter(fsc=fsc, fildim=int(vol2.sizeX() / 2))
    #filter only one volume so that resulting CCC is weighted by SNR only once
    filvol2 = filter_volume_by_profile(volume=vol2, profile=fil)
    # align vol2 to vol1
    if verbose == 2:
        alignment = Alignment(vol1=vol1,
                              vol2=filvol2,
                              score=nxcc,
                              mask=mask,
                              iniRot=iniRot,
                              iniTrans=iniTrans,
                              opti='fmin_powell',
                              interpolation=interpolation,
                              verbose=verbose)
    else:
        alignment = Alignment(vol1=vol1,
                              vol2=filvol2,
                              score=nxcc,
                              mask=mask,
                              iniRot=iniRot,
                              iniTrans=iniTrans,
                              opti='fmin_powell',
                              interpolation=interpolation,
                              verbose=False)
    optiScore, optiRot, optiTrans = alignment.localOpti(iniRot=iniRot,
                                                        iniTrans=iniTrans)
    if verbose:
        from pytom.angles.angleFnc import differenceAngleOfTwoRotations
        from pytom.basic.structures import Rotation
        diffAng = differenceAngleOfTwoRotations(rotation1=Rotation(0, 0, 0),
                                                rotation2=optiRot)
        print(
            "Alignment densities: Rotations: %2.3f, %2.3f, %2.3f; Translations: %2.3f, %2.3f, %2.3f "
            % (optiRot[0], optiRot[1], optiRot[2], optiTrans[0], optiTrans[1],
               optiTrans[2]))
        print("Orientation difference: %2.3f deg" % diffAng)
    vol2_alig = vol(vol2.sizeX(), vol2.sizeY(), vol2.sizeZ())
    transformSpline(vol2, vol2_alig, optiRot[0], optiRot[1], optiRot[2],
                    int(vol2.sizeX() / 2), int(vol2.sizeY() / 2),
                    int(vol2.sizeY() / 2), 0, 0, 0, optiTrans[0], optiTrans[1],
                    optiTrans[2])
    # finally compute FSC and filter of both volumes
    if not nband:
        nband = int(vol2.sizeX() / 2)
    fsc = FSC(volume1=vol1, volume2=vol2_alig, numberBands=nband)
    fil = design_fsc_filter(fsc=fsc,
                            fildim=int(vol2.sizeX() / 2),
                            fsc_criterion=fsc_criterion)
    filvol1 = filter_volume_by_profile(volume=vol1, profile=fil)
    #filvol2 = filter_volume_by_profile( volume=vol2_alig, profile=fil)
    filvol2 = filter_volume_by_profile(volume=vol2, profile=fil)

    return (filvol1, filvol2, fsc, fil, optiRot, optiTrans)
Esempio n. 7
0
    def start(self, job, verbose=False):
        if self.mpi_id == 0:
            from pytom.basic.structures import ParticleList, Reference
            from pytom.basic.resolution import bandToAngstrom
            from pytom.basic.filter import lowpassFilter
            from math import ceil
            from pytom.basic.fourier import convolute
            from pytom_volume import vol, power, read

            # randomly split the particle list into 2 half sets
            import numpy as np
            num_pairs = len(job.particleList.pairs)
            for i in range(num_pairs):
                # randomize the class labels to indicate the two half sets
                pl = job.particleList.pairs[i].get_phase_flip_pl()
                n = len(pl)
                labels = np.random.randint(2, size=(n, ))
                print(self.node_name + ': Number of 1st half set:',
                      n - np.sum(labels), 'Number of 2nd half set:',
                      np.sum(labels))
                for j in range(n):
                    p = pl[j]
                    p.setClass(labels[j])

            new_reference = job.reference
            old_freq = job.freq
            new_freq = job.freq
            # main node
            for i in range(job.max_iter):
                if verbose:
                    print(self.node_name + ': starting iteration %d ...' % i)

                # construct a new job by updating the reference and the frequency
                # here the job.particleList is actually ParticleListSet
                new_job = MultiDefocusJob(job.particleList, new_reference,
                                          job.mask, job.peak_offset,
                                          job.sampleInformation, job.bw_range,
                                          new_freq, job.destination,
                                          job.max_iter - i, job.r_score,
                                          job.weighting, job.bfactor)

                # distribute it
                num_all_particles = self.distribute_job(new_job, verbose)

                # calculate the denominator
                sum_ctf_squared = None
                for pair in job.particleList.pairs:
                    if sum_ctf_squared is None:
                        sum_ctf_squared = pair.get_ctf_sqr_vol() * pair.snr
                    else:
                        sum_ctf_squared += pair.get_ctf_sqr_vol() * pair.snr

                # get the result back
                all_even_pre = None
                all_even_wedge = None
                all_odd_pre = None
                all_odd_wedge = None
                pls = []
                for j in range(len(job.particleList.pairs)):
                    pls.append(ParticleList())

                for j in range(self.num_workers):
                    result = self.get_result()

                    pair_id = self.assignment[result.worker_id]
                    pair = job.particleList.pairs[pair_id]

                    pl = pls[pair_id]
                    pl += result.pl
                    even_pre, even_wedge, odd_pre, odd_wedge = self.retrieve_res_vols(
                        result.name)

                    if all_even_pre:
                        all_even_pre += even_pre * pair.snr
                        all_even_wedge += even_wedge
                        all_odd_pre += odd_pre * pair.snr
                        all_odd_wedge += odd_wedge
                    else:
                        all_even_pre = even_pre * pair.snr
                        all_even_wedge = even_wedge
                        all_odd_pre = odd_pre * pair.snr
                        all_odd_wedge = odd_wedge

                # write the new particle list to the disk
                for j in range(len(job.particleList.pairs)):
                    pls[j].toXMLFile('aligned_pl' + str(j) + '_iter' + str(i) +
                                     '.xml')

                # correct for the number of particles in wiener filter
                sum_ctf_squared = sum_ctf_squared / num_all_particles
                #                all_even_pre = all_even_pre/(num_all_particles/2)
                #                all_odd_pre = all_odd_pre/(num_all_particles/2)

                # bfactor
                if job.bfactor and job.bfactor != 'None':
                    #                    bfactor_kernel = create_bfactor_vol(sum_ctf_squared.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor)
                    bfactor_kernel = read(job.bfactor)
                    bfactor_kernel_sqr = vol(bfactor_kernel)
                    power(bfactor_kernel_sqr, 2)
                    all_even_pre = convolute(all_even_pre, bfactor_kernel,
                                             True)
                    all_odd_pre = convolute(all_odd_pre, bfactor_kernel, True)
                    sum_ctf_squared = sum_ctf_squared * bfactor_kernel_sqr

                # create averages of two sets
                if verbose:
                    print(self.node_name + ': determining the resolution ...')
                even = self.create_average(
                    all_even_pre, sum_ctf_squared, all_even_wedge
                )  # assume that the CTF sum is the same for the even and odd
                odd = self.create_average(all_odd_pre, sum_ctf_squared,
                                          all_odd_wedge)

                # determine the transformation between even and odd
                # here we assume the wedge from both sets are fully sampled
                from sh_alignment.frm import frm_align
                pos, angle, score = frm_align(odd, None, even, None,
                                              job.bw_range, new_freq,
                                              job.peak_offset)
                print(
                    self.node_name +
                    ': transform of even set to match the odd set - shift: ' +
                    str(pos) + ' rotation: ' + str(angle))

                # transform the odd set accordingly
                from pytom_volume import vol, transformSpline
                from pytom.basic.fourier import ftshift
                from pytom_volume import reducedToFull
                from pytom_freqweight import weight
                transformed_odd_pre = vol(odd.sizeX(), odd.sizeY(),
                                          odd.sizeZ())
                full_all_odd_wedge = reducedToFull(all_odd_wedge)
                ftshift(full_all_odd_wedge)
                odd_weight = weight(
                    full_all_odd_wedge)  # the funny part of pytom
                transformed_odd = vol(odd.sizeX(), odd.sizeY(), odd.sizeZ())

                transformSpline(all_odd_pre, transformed_odd_pre, -angle[1],
                                -angle[0], -angle[2], int(odd.sizeX() / 2),
                                int(odd.sizeY() / 2), int(odd.sizeZ() / 2),
                                -(pos[0] - odd.sizeX() / 2),
                                -(pos[1] - odd.sizeY() / 2),
                                -(pos[2] - odd.sizeZ() / 2), 0, 0, 0)
                odd_weight.rotate(-angle[1], -angle[0], -angle[2])
                transformed_odd_wedge = odd_weight.getWeightVolume(True)
                transformSpline(odd, transformed_odd, -angle[1], -angle[0],
                                -angle[2], int(odd.sizeX() / 2),
                                int(odd.sizeY() / 2), int(odd.sizeZ() / 2),
                                -(pos[0] - odd.sizeX() / 2),
                                -(pos[1] - odd.sizeY() / 2),
                                -(pos[2] - odd.sizeZ() / 2), 0, 0, 0)

                all_odd_pre = transformed_odd_pre
                all_odd_wedge = transformed_odd_wedge
                odd = transformed_odd

                # apply symmetries before determine resolution
                # with gold standard you should be careful about applying the symmetry!
                even = job.symmetries.applyToParticle(even)
                odd = job.symmetries.applyToParticle(odd)
                resNyquist, resolutionBand, numberBands = self.determine_resolution(
                    even, odd, job.fsc_criterion, None, job.mask, verbose)

                # write the half set to the disk
                even.write('fsc_' + str(i) + '_even.em')
                odd.write('fsc_' + str(i) + '_odd.em')

                current_resolution = bandToAngstrom(
                    resolutionBand, job.sampleInformation.getPixelSize(),
                    numberBands, 1)
                if verbose:
                    print(
                        self.node_name + ': current resolution ' +
                        str(current_resolution), resNyquist)

                # create new average
                all_even_pre += all_odd_pre
                all_even_wedge += all_odd_wedge
                #                all_even_pre = all_even_pre/2 # correct for the number of particles in wiener filter
                average = self.create_average(all_even_pre, sum_ctf_squared,
                                              all_even_wedge)

                # apply symmetries
                average = job.symmetries.applyToParticle(average)

                # filter average to resolution and update the new reference
                average_name = 'average_iter' + str(i) + '.em'
                average.write(average_name)

                # update the references
                new_reference = [
                    Reference('fsc_' + str(i) + '_even.em'),
                    Reference('fsc_' + str(i) + '_odd.em')
                ]

                # low pass filter the reference and write it to the disk
                filtered = lowpassFilter(average, ceil(resolutionBand),
                                         ceil(resolutionBand) / 10)
                filtered_ref_name = 'average_iter' + str(i) + '_res' + str(
                    current_resolution) + '.em'
                filtered[0].write(filtered_ref_name)

                # change the frequency to a higher value
                new_freq = int(ceil(resolutionBand)) + 1
                if new_freq <= old_freq:
                    if job.adaptive_res is not False:  # two different strategies
                        print(
                            self.node_name +
                            ': Determined resolution gets worse. Include additional %f percent frequency to be aligned!'
                            % job.adaptive_res)
                        new_freq = int((1 + job.adaptive_res) * old_freq)
                    else:  # always increase by 1
                        print(
                            self.node_name +
                            ': Determined resolution gets worse. Increase the frequency to be aligned by 1!'
                        )
                        new_freq = old_freq + 1
                        old_freq = new_freq
                else:
                    old_freq = new_freq
                if new_freq >= numberBands:
                    print(self.node_name +
                          ': Determined frequency too high. Terminate!')
                    break

                if verbose:
                    print(self.node_name + ': change the frequency to ' +
                          str(new_freq))

            # send end signal to other nodes and terminate itself
            self.end(verbose)
        else:
            # other nodes
            self.run(verbose)