Example #1
0
    def run(self, verbose=False):
        from sh_alignment.frm import frm_align
        from pytom.basic.structures import Shift, Rotation
        from pytom.tools.ProgressBar import FixedProgBar

        while True:
            # get the job
            try:
                job = self.get_job()
            except:
                if verbose:
                    print(self.node_name + ': end')
                break  # get some non-job message, break it

            if verbose:
                prog = FixedProgBar(0,
                                    len(job.particleList) - 1,
                                    self.node_name + ':')
                i = 0

            ref = job.reference[0].getVolume()
            # run the job
            for p in job.particleList:
                if verbose:
                    prog.update(i)
                    i += 1
                v = p.getVolume()

                pos, angle, score = frm_align(v, p.getWedge(), ref, None,
                                              job.bw_range, job.freq,
                                              job.peak_offset,
                                              job.mask.getVolume())

                p.setShift(
                    Shift([
                        pos[0] - v.sizeX() / 2, pos[1] - v.sizeY() / 2,
                        pos[2] - v.sizeZ() / 2
                    ]))
                p.setRotation(Rotation(angle))
                p.setScore(FRMScore(score))

            # average the particle list
            name_prefix = os.path.join(
                self.destination, self.node_name + '_' + str(job.max_iter))
            self.average_sub_pl(job.particleList, name_prefix, job.weighting)

            # send back the result
            self.send_result(
                FRMResult(name_prefix, job.particleList, self.mpi_id))

        pytom_mpi.finalise()
Example #2
0
    def sequentialGA(self,verbose=False):
        """
        sequentialGA: Sequential growing average procedure
        @todo: not tested yet
        """
        import os
        from pytom.tools.ProgressBar import FixedProgBar
        
        jobCounter = 0
        
        classIterator = 0
        numberClasses = len(self.particleClassLists)
        
        progressBar = FixedProgBar(0,len(self.particleClassLists),'Jobs finished ')
        progressBar.update(0)
        numberJobsDone = 1

        
        
        for classIterator in range(numberClasses):
            #distribute Jobs, make sure all available nodes are working 
            

            print('Sending class ' +classIterator.__str__())
            
            os.system('mkdir ' + self.destinationDirectory + '/class' + classIterator.__str__())
            
            job = GrowingAverageJob(self.particleClassLists[classIterator],self.angleObject,self.mask,self.score,0,self.destinationDirectory+ '/class' + str(classIterator),self.preprocessing)
                
            if verbose:
                print(job)
            
            
            worker = GAWorker(-1)
            worker.fromJob(job)
            worker._run()
            
            #update progress bar
            progressBar.update(numberJobsDone)
            numberJobsDone = numberJobsDone +1
Example #3
0
def paverage(particleList, norm, binning, verbose, outdir='./'):
    from pytom_volume import read, vol
    from pytom_volume import transformSpline as transform
    from pytom.basic.structures import Particle
    from pytom.basic.normalise import mean0std1
    from pytom.tools.ProgressBar import FixedProgBar
    from pytom.basic.transformations import resize

    if len(particleList) == 0:
        raise RuntimeError('The particlelist provided is empty. Aborting!')

    if verbose:
        progressBar = FixedProgBar(0, len(particleList), 'Particles averaged ')
        progressBar.update(0)
        numberAlignedParticles = 0

    result = None
    wedgeSum = None
    newParticle = None

    for particleObject in particleList:
        particle = read(particleObject.getFilename(), 0, 0, 0, 0, 0, 0, 0, 0,
                        0, 1, 1, 1)
        if binning != 1:
            particle, particlef = resize(volume=particle,
                                         factor=1. / binning,
                                         interpolation='Fourier')

        if norm:
            mean0std1(particle)

        wedgeInfo = particleObject.getWedge()
        if result is None:  # initialization
            sizeX = particle.sizeX()
            sizeY = particle.sizeY()
            sizeZ = particle.sizeZ()

            newParticle = vol(sizeX, sizeY, sizeZ)

            centerX = sizeX // 2
            centerY = sizeY // 2
            centerZ = sizeZ // 2

            result = vol(sizeX, sizeY, sizeZ)
            result.setAll(0.0)
            wedgeSum = wedgeInfo.returnWedgeVolume(sizeX, sizeY, sizeZ)
            wedgeSum.setAll(0)

        # create spectral wedge weighting
        rotation = particleObject.getRotation()
        wedge = wedgeInfo.returnWedgeVolume(sizeX, sizeY, sizeZ, False,
                                            rotation.invert())

        wedgeSum += wedge

        # shift and rotate particle
        shiftV = particleObject.getShift()
        newParticle.setAll(0)
        transform(particle, newParticle, -rotation[1], -rotation[0],
                  -rotation[2], centerX, centerY, centerZ,
                  -shiftV[0] // binning, -shiftV[1] // binning,
                  -shiftV[2] // binning, 0, 0, 0)

        result += newParticle

        if verbose:
            numberAlignedParticles = numberAlignedParticles + 1
            progressBar.update(numberAlignedParticles)

    # write to the disk

    fname_result = os.path.join(outdir, 'avg_{}.em'.format(mpi.rank))
    fname_wedge = os.path.join(outdir, 'wedge_{}.em'.format(mpi.rank))

    result.write(fname_result)
    result = Particle(fname_result)
    wedgeSum.write(fname_wedge)
    wedgeSum = Particle(fname_wedge)

    return (result, wedgeSum)
Example #4
0
def copyProjectionsForXMIPP(particleList,
                            projectionLists,
                            xmippProjectionDirectory,
                            target,
                            showProgressBar=False,
                            verbose=False):
    """
    
    """
    from pytom.tools.ProgressBar import FixedProgBar
    from pytom.tools.files import readSpider, writeSpider
    import os

    projectionPropertiesList = []
    lineOffset = 1

    for i in range(len(particleList)):

        particle = particleList[i]

        projectionList = projectionLists[i]

        particleTupleList = getParticleTransformLines(particle, projectionList,
                                                      lineOffset)

        lineOffset = lineOffset + len(particleTupleList)

        projectionPropertiesList.extend(particleTupleList)

    if showProgressBar:
        progressBar = FixedProgBar(0, len(projectionPropertiesList),
                                   'Projections modified ')
        progressBar.update(0)

    for i in range(len(projectionPropertiesList)):

        projectionName = projectionPropertiesList[i][0]

        filename = projectionName.rsplit('/')
        filename = filename[len(filename) - 1]

        newFilename = xmippProjectionDirectory + os.sep + filename[
            0:len(filename) - 4] + '_P' + str(i) + '.spi'

        projection = readSpider(projectionName)

        #print projectionPropertiesList[i][1][2], projectionPropertiesList[i][1][3], projectionPropertiesList[i][1][4], projectionPropertiesList[i][1][5], projectionPropertiesList[i][1][6], 0
        writeSpider(projection, newFilename, projectionPropertiesList[i][1][2],
                    projectionPropertiesList[i][1][3],
                    projectionPropertiesList[i][1][4],
                    projectionPropertiesList[i][1][5],
                    projectionPropertiesList[i][1][6], 0)

        if showProgressBar:
            progressBar.update(i)

    cmd = 'ls ' + xmippProjectionDirectory + '/*.spi | awk \'{print $1 \" 1\"}\' > ' + xmippProjectionDirectory + '/reconstruction.sel'
    os.system(cmd)

    cmd = 'xmipp_reconstruct_fourier -i ' + xmippProjectionDirectory + '/reconstruction.sel -o ' + xmippProjectionDirectory + '/result.out -sym C1 -thr 1  -pad_vol 3'
    os.system(cmd)

    v = readSpider(xmippProjectionDirectory + '/result.out')
    v.write(target)
Example #5
0
def average2(particleList, weighting=False, norm=False, determine_resolution=False,
             mask=None, binning=1, verbose=False):
    """
    2nd version of average function. Will not write the averages to the disk. Also support internal \
    resolution determination.
    """
    from pytom_volume import read, vol, complexDiv, complexRealMult
    from pytom_volume import transformSpline as transform
    from pytom.basic.fourier import fft, ifft, convolute
    from pytom.basic.normalise import mean0std1
    from pytom.tools.ProgressBar import FixedProgBar
    from pytom.basic.filter import lowpassFilter, rotateWeighting
    from math import exp
    
    if len(particleList) == 0:
        raise RuntimeError('The particlelist provided is empty. Aborting!')
    
    if verbose:
        progressBar = FixedProgBar(0,len(particleList),'Particles averaged ')
        progressBar.update(0)
        numberAlignedParticles = 0
    
    even = None
    odd = None
    wedgeSum_even = None
    wedgeSum_odd = None
    newParticle = None
    
    is_odd = True
    for particleObject in particleList:
        particle = read(particleObject.getFilename(), 0,0,0,0,0,0,0,0,0, binning,binning,binning)
        if norm:
            mean0std1(particle)
        wedgeInfo = particleObject.getWedge()
        
        # apply its wedge to itself
        particle = wedgeInfo.apply(particle)
        
        if odd is None: # initialization
            sizeX = particle.sizeX() 
            sizeY = particle.sizeY()
            sizeZ = particle.sizeZ()
            
            newParticle = vol(sizeX,sizeY,sizeZ)
            
            centerX = sizeX/2 
            centerY = sizeY/2 
            centerZ = sizeZ/2 
            
            odd = vol(sizeX,sizeY,sizeZ)
            odd.setAll(0.0)
            even = vol(sizeX,sizeY,sizeZ)
            even.setAll(0.0)
            
            wedgeSum_odd = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ)
            wedgeSum_odd.setAll(0)
            wedgeSum_even = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ)
            wedgeSum_even.setAll(0)
        

        # create spectral wedge weighting
        rotation = particleObject.getRotation()
        rotinvert =  rotation.invert()
        if analytWedge:
            # > original buggy version
            wedge = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False, rotinvert)
            # < original buggy version
        else:
            # > FF: interpol bugfix
            wedge = rotateWeighting( weighting=wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False),
                                     z1=rotinvert[0], z2=rotinvert[1], x=rotinvert[2], mask=None,
                                     isReducedComplex=True, returnReducedComplex=True)
            # < FF
            # > TH bugfix
            #wedgeVolume = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ,
            #                                          humanUnderstandable=True, rotation=rotinvert)
            #wedge = rotate(volume=wedgeVolume, rotation=rotinvert, imethod='linear')
            # < TH
        if is_odd:
            wedgeSum_odd = wedgeSum_odd + wedge
        else:
            wedgeSum_even = wedgeSum_even + wedge
        
        # shift and rotate particle
        shiftV = particleObject.getShift()
        newParticle.setAll(0)
        transform(particle,newParticle,-rotation[1],-rotation[0],-rotation[2],
                  centerX,centerY,centerZ,-shiftV[0]/binning,
                  -shiftV[1]/binning,-shiftV[2]/binning,0,0,0)

        if is_odd:
            if weighting:
                weight = 1. - particleObject.getScore().getValue()
                #weight = weight**2
                weight = exp(-1.*weight)
                odd = odd + newParticle * weight
            else:
                odd = odd + newParticle
        else:
            if weighting:
                weight = 1. - particleObject.getScore().getValue()
                #weight = weight**2
                weight = exp(-1.*weight)
                even = even + newParticle * weight
            else:
                even = even + newParticle
        
        is_odd = not is_odd
        
        if verbose:
            numberAlignedParticles = numberAlignedParticles + 1
            progressBar.update(numberAlignedParticles)

    # determine resolution if needed
    fsc = None
    if determine_resolution:
        # apply spectral weighting to sum
        f_even = fft(even)
        w_even = complexDiv(f_even, wedgeSum_even)
        w_even = ifft(w_even)        
        w_even.shiftscale(0.0,1/float(sizeX*sizeY*sizeZ))
        
        f_odd = fft(odd)
        w_odd = complexDiv(f_odd, wedgeSum_odd)
        w_odd = ifft(w_odd)        
        w_odd.shiftscale(0.0,1/float(sizeX*sizeY*sizeZ))
        
        from pytom.basic.correlation import FSC
        fsc = FSC(w_even, w_odd, sizeX/2, mask, verbose=False)
    
    # add together
    result = even+odd
    wedgeSum = wedgeSum_even+wedgeSum_odd

    invert_WedgeSum( invol=wedgeSum, r_max=sizeX/2-2., lowlimit=.05*len(particleList), lowval=.05*len(particleList))
    #wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumInverted.em')
    result = convolute(v=result, k=wedgeSum, kernel_in_fourier=True)
    # do a low pass filter
    #result = lowpassFilter(result, sizeX/2-2, (sizeX/2-1)/10.)[0]
    
    return (result, fsc)
Example #6
0
def average( particleList, averageName, showProgressBar=False, verbose=False,
        createInfoVolumes=False, weighting=False, norm=False):
    """
    average : Creates new average from a particleList
    @param particleList: The particles
    @param averageName: Filename of new average 
    @param verbose: Prints particle information. Disabled by default. 
    @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default.
    @param weighting: apply weighting to each average according to its correlation score
    @param norm: apply normalization for each particle
    @return: A new Reference object
    @rtype: L{pytom.basic.structures.Reference}
    @author: Thomas Hrabe
    @change: limit for wedgeSum set to 1% or particles to avoid division by small numbers - FF
    """
    from pytom_volume import read,vol,reducedToFull,limit, complexRealMult
    from pytom.basic.filter import lowpassFilter, rotateWeighting
    from pytom_volume import transformSpline as transform
    from pytom.basic.fourier import convolute
    from pytom.basic.structures import Reference
    from pytom.basic.normalise import mean0std1
    from pytom.tools.ProgressBar import FixedProgBar
    from math import exp
    import os

    if len(particleList) == 0:
        raise RuntimeError('The particle list is empty. Aborting!')
    
    if showProgressBar:
        progressBar = FixedProgBar(0,len(particleList),'Particles averaged ')
        progressBar.update(0)
        numberAlignedParticles = 0
    
    result = []
    wedgeSum = []
    
    newParticle = None
    # pre-check that scores != 0
    if weighting:
        wsum = 0.
        for particleObject in particleList:
            wsum += particleObject.getScore().getValue()
        if wsum < 0.00001:
            weighting = False
            print("Warning: all scores have been zero - weighting not applied")

    
    for particleObject in particleList:
        
        if verbose:
            print(particleObject)

    
        if not os.path.exists(particleObject.getFilename()): continue
        particle = read(particleObject.getFilename())
        if norm: # normalize the particle
            mean0std1(particle) # happen inplace
        
        wedgeInfo = particleObject.getWedge()
        # apply its wedge to itself
        particle = wedgeInfo.apply(particle)
        
        if result == []:
            sizeX = particle.sizeX() 
            sizeY = particle.sizeY()
            sizeZ = particle.sizeZ()
            
            newParticle = vol(sizeX,sizeY,sizeZ)
            
            centerX = sizeX/2 
            centerY = sizeY/2 
            centerZ = sizeZ/2 
            
            result = vol(sizeX,sizeY,sizeZ)
            result.setAll(0.0)
            if analytWedge:
                wedgeSum = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ)
            else:
                # > FF bugfix
                wedgeSum = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ)
                # < FF
                # > TH bugfix
                #wedgeSum = vol(sizeX,sizeY,sizeZ)
                # < TH
                #wedgeSum.setAll(0)
            assert wedgeSum.sizeX() == sizeX and wedgeSum.sizeY() == sizeY and wedgeSum.sizeZ() == sizeZ/2+1, \
                    "wedge initialization result in wrong dims :("
            wedgeSum.setAll(0)

        ### create spectral wedge weighting
        rotation = particleObject.getRotation()
        rotinvert = rotation.invert()
        if analytWedge:
            # > analytical buggy version
            wedge = wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False, rotinvert)
        else:
            # > FF: interpol bugfix
            wedge = rotateWeighting( weighting=wedgeInfo.returnWedgeVolume(sizeX,sizeY,sizeZ,False),
                                     z1=rotinvert[0], z2=rotinvert[1], x=rotinvert[2], mask=None,
                                     isReducedComplex=True, returnReducedComplex=True)
            # < FF
            # > TH bugfix
            #wedgeVolume = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ,
            #                                    humanUnderstandable=True, rotation=rotinvert)
            #wedge = rotate(volume=wedgeVolume, rotation=rotinvert, imethod='linear')
            # < TH

        ### shift and rotate particle
        shiftV = particleObject.getShift()
        newParticle.setAll(0)
            
        transform(particle,newParticle,-rotation[1],-rotation[0],-rotation[2],
                  centerX,centerY,centerZ,-shiftV[0],-shiftV[1],-shiftV[2],0,0,0)
        
        if weighting:
            weight = 1.-particleObject.getScore().getValue()
            #weight = weight**2
            weight = exp(-1.*weight)
            result = result + newParticle * weight
            wedgeSum = wedgeSum + wedge * weight
        else:
            result = result + newParticle
            wedgeSum = wedgeSum + wedge
        
        if showProgressBar:
            numberAlignedParticles = numberAlignedParticles + 1
            progressBar.update(numberAlignedParticles)

    ###apply spectral weighting to sum
    result = lowpassFilter(result, sizeX/2-1, 0.)[0]
    #if createInfoVolumes:
    result.write(averageName[:len(averageName)-3]+'-PreWedge.em')
    wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumUnscaled.em')
        
    invert_WedgeSum( invol=wedgeSum, r_max=sizeX/2-2., lowlimit=.05*len(particleList), lowval=.05*len(particleList))
    
    if createInfoVolumes:
        wedgeSum.write(averageName[:len(averageName)-3] + '-WedgeSumInverted.em')
        
    result = convolute(v=result, k=wedgeSum, kernel_in_fourier=True)

    # do a low pass filter
    #result = lowpassFilter(result, sizeX/2-2, (sizeX/2-1)/10.)[0]
    result.write(averageName)
    
    if createInfoVolumes:
        resultINV = result * -1
        #write sign inverted result to disk (good for chimera viewing ... )
        resultINV.write(averageName[:len(averageName)-3]+'-INV.em')
    newReference = Reference(averageName,particleList)
    
    return newReference
    def run(self, verbose=False):
        from pytom_volume import read, sum
        from pytom.basic.filter import lowpassFilter
        from pytom.basic.correlation import nxcc
        from pytom.basic.structures import Rotation
        from pytom.tools.ProgressBar import FixedProgBar
        
        while True:
            # get the job
            job = self.get_job()
            
            try:
                pairs = job["Pairs"]
                pl_filename = job["ParticleList"]
            except:
                if verbose:
                    print(self.node_name + ': end')
                break # get some non-job message, break it

            from pytom.basic.structures import ParticleList
            pl = ParticleList('.')
            pl.fromXMLFile(pl_filename)

            if verbose:
                prog = FixedProgBar(0, len(pairs)-1, self.node_name+':')
                i = 0

            # run the job
            result = {}
            last_filename = None
            binning = int(job["Binning"])

            mask = read(job["Mask"], 0, 0, 0, 0, 0, 0, 0, 0, 0, binning, binning, binning)
            
            
            for pair in pairs:
                if verbose:
                    prog.update(i)
                    i += 1
                g = pl[pair[0]]
                f = pl[pair[1]]
                vf = f.getTransformedVolume(binning)
                wf = f.getWedge().getWedgeObject()
                wf_rotation = f.getRotation().invert()
                # wf.setRotation(Rotation(-rotation[1],-rotation[0],-rotation[2]))
                # wf_vol = wf.returnWedgeVolume(vf.sizeX(), vf.sizeY(), vf.sizeZ(), True, -rotation[1],-rotation[0],-rotation[2])
                vf = lowpassFilter(vf, job["Frequency"], 0)[0]

                if g.getFilename() != last_filename:
                    vg = g.getTransformedVolume(binning)
                    wg = g.getWedge().getWedgeObject()
                    wg_rotation = g.getRotation().invert()
                    # wg.setRotation(Rotation(-rotation[1],-rotation[0],-rotation[2]))
                    # wg_vol = wg.returnWedgeVolume(vg.sizeX(), vg.sizeY(), vg.sizeZ(), True, -rotation[1],-rotation[0],-rotation[2])
                    vg = lowpassFilter(vg, job["Frequency"], 0)[0]

                    last_filename = g.getFilename()

                score = nxcc( wg.apply(vf, wg_rotation), wf.apply(vg, wf_rotation), mask)
                # overlapped_wedge_vol = wf_vol * wg_vol
                # scaling = float(overlapped_wedge_vol.numelem())/sum(overlapped_wedge_vol)
                # score *= scaling

                result[pair] = score
            
            # send back the result
            self.send_result(result)
        
        pytom_mpi.finalise()
Example #8
0
    def run(self, verbose=False):
        from sh_alignment.frm import frm_align
        from sh_alignment.constrained_frm import frm_constrained_align, AngularConstraint
        from pytom.basic.structures import Shift, Rotation
        from pytom.tools.ProgressBar import FixedProgBar
        from pytom.basic.transformations import resize, resizeFourier
        binningType = 'Fourier'

        while True:
            # get the job
            try:
                job = self.get_job()
            except:
                if verbose:
                    print(self.node_name + ': end')
                break  # get some non-job message, break it

            if verbose:
                prog = FixedProgBar(0,
                                    len(job.particleList) - 1,
                                    self.node_name + ':')
                i = 0
            ref = job.reference.getVolume()
            if job.binning > 1:
                ref = resize(volume=ref,
                             factor=1. / job.binning,
                             interpolation=binningType)
                if type(ref) == tuple:
                    ref = ref[0]
            # re-set max frequency in case it exceeds Nyquist - a bit brute force
            job.freq = min(job.freq, ref.sizeX() // 2 - 1)
            # run the job
            for p in job.particleList:
                if verbose:
                    prog.update(i)
                    i += 1
                v = p.getVolume()
                if job.binning > 1:
                    v = resize(volume=v,
                               factor=1. / job.binning,
                               interpolation=binningType)
                    if type(v) == tuple:
                        v = v[0]
                mask = job.mask.getVolume()
                if job.binning > 1:
                    mask = resize(volume=mask,
                                  factor=1. / job.binning,
                                  interpolation='Spline')
                    if type(mask) == tuple:
                        mask = mask[0]
                if job.constraint:
                    constraint = job.constraint
                    if job.constraint.type == AngularConstraint.ADP_ANGLE:  # set the constraint around certain angle
                        rot = p.getRotation()
                        constraint.setAngle(rot.getPhi(), rot.getPsi(),
                                            rot.getTheta())
                    #pos, angle, score = frm_constrained_align(v, p.getWedge(), ref, None, job.bw_range, job.freq, job.peak_offset, job.mask.getVolume(), constraint)
                    if job.binning > 1:
                        pos, angle, score = frm_constrained_align(
                            v, p.getWedge(), ref, None, job.bw_range, job.freq,
                            job.peak_offset / job.binning, mask, constraint)
                    else:
                        pos, angle, score = frm_constrained_align(
                            v, p.getWedge(), ref, None, job.bw_range, job.freq,
                            job.peak_offset, mask, constraint)
                else:
                    #pos, angle, score = frm_align(v, p.getWedge(), ref, None, job.bw_range, job.freq, job.peak_offset, job.mask.getVolume())
                    #if job.binning >1:
                    #    print(job.peak_offset)
                    #    print(type(job.peak_offset))
                    #    print(job.peak_offset/job.binning)
                    #    print(type(job.binning))
                    #    pos, angle, score = frm_align(v, p.getWedge(), ref, None, job.bw_range, job.freq,
                    #                            job.peak_offset/job.binning, mask)
                    #else:
                    pos, angle, score = frm_align(v, p.getWedge(), ref, None,
                                                  job.bw_range, job.freq,
                                                  job.peak_offset, mask)

                if job.binning > 1:
                    pos[0] = job.binning * (pos[0] - v.sizeX() / 2)
                    pos[1] = job.binning * (pos[1] - v.sizeY() / 2)
                    pos[2] = job.binning * (pos[2] - v.sizeZ() / 2)
                    p.setShift(Shift([pos[0], pos[1], pos[2]]))
                else:
                    p.setShift(
                        Shift([
                            pos[0] - v.sizeX() / 2, pos[1] - v.sizeY() / 2,
                            pos[2] - v.sizeZ() / 2
                        ]))
                p.setRotation(Rotation(angle))
                p.setScore(FRMScore(score))

            # average the particle list
            name_prefix = os.path.join(
                job.destination, self.node_name + '_' + str(job.max_iter))
            self.average_sub_pl(job.particleList, name_prefix, job.weighting)

            # send back the result
            self.send_result(
                FRMResult(name_prefix, job.particleList, self.mpi_id))

        pytom_mpi.finalise()
Example #9
0
def extractPeaks(volume,
                 reference,
                 rotations,
                 scoreFnc=None,
                 mask=None,
                 maskIsSphere=False,
                 wedgeInfo=None,
                 **kwargs):
    '''
    Created on May 17, 2010
    @param volume: target volume
    @type volume: L{pytom_volume.vol}
    @param reference: reference
    @type reference: L{pytom_volume.vol}
    @param rotations: rotation angle list
    @type rotations: L{pytom.angles.globalSampling.GlobalSampling}
    @param scoreFnc: score function that is used
    @type scoreFnc: L{pytom.basic.correlation}
    @param mask: mask volume
    @type mask: L{pytom_volume.vol}
    @param maskIsSphere: flag to indicate whether the mask is sphere or not
    @type maskIsSphere: boolean
    @param wedgeInfo: wedge information
    @type wedgeInfo: L{pytom.basic.structures.WedgeInfo}
    @return: both the score volume and the corresponding rotation index volume
    @rtype: L{pytom_volume.vol}
    @author: chen
    '''
    #    from pytom.tools.timing import timing
    #    t = timing(); t.start()

    # parse the parameters
    nodeName = kwargs.get('nodeName', '')
    verbose = kwargs.get('verboseMode', True)
    if verbose not in [True, False]:
        verbose = True
    moreInfo = kwargs.get('moreInfo', False)
    if moreInfo not in [True, False]:
        moreInfo = False

    from pytom.basic.correlation import FLCF
    from pytom.basic.structures import WedgeInfo, Wedge
    from pytom_volume import vol, pasteCenter
    from pytom_volume import rotateSpline as rotate  # for more accuracy
    from pytom_volume import updateResFromIdx
    from pytom.basic.files import write_em

    if scoreFnc == None:
        scoreFnc = FLCF

    # only FLCF needs mask
    if scoreFnc == FLCF:
        if mask.__class__ != vol:  # construct a sphere mask by default
            from pytom_volume import initSphere
            mask = vol(reference.sizeX(), reference.sizeY(), reference.sizeZ())
            mask.setAll(0)
            initSphere(mask,
                       reference.sizeX() / 2, 0, 0,
                       reference.sizeX() / 2,
                       reference.sizeX() / 2,
                       reference.sizeX() / 2)
            maskIsSphere = True

    # result volume which stores the score
    result = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
    result.setAll(-1)

    # result orientation of the peak value (index)
    orientation = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
    orientation.setAll(0)

    currentRotation = rotations.nextRotation()
    index = 0

    if verbose == True:
        from pytom.tools.ProgressBar import FixedProgBar
        max = rotations.numberRotations() - 1
        prog = FixedProgBar(0, max, nodeName)
    if moreInfo:
        sumV = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
        sumV.setAll(0)
        sqrV = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
        sqrV.setAll(0)
    else:
        sumV = None
        sqrV = None

    if wedgeInfo.__class__ == WedgeInfo or wedgeInfo.__class__ == Wedge:
        print('Applied wedge to volume')
        volume = wedgeInfo.apply(volume)

    while currentRotation != [None, None, None]:
        if verbose == True:
            prog.update(index)

        # rotate the reference
        ref = vol(reference.sizeX(), reference.sizeY(), reference.sizeZ())
        rotate(reference, ref, currentRotation[0], currentRotation[1],
               currentRotation[2])

        # apply wedge
        if wedgeInfo.__class__ == WedgeInfo or wedgeInfo.__class__ == Wedge:
            ref = wedgeInfo.apply(ref)

        # rotate the mask if it is asymmetric
        if scoreFnc == FLCF:
            if maskIsSphere == False:  # if mask is not a sphere, then rotate it
                m = vol(mask.sizeX(), mask.sizeY(), mask.sizeZ())
                rotate(mask, m, currentRotation[0], currentRotation[1],
                       currentRotation[2])
            else:
                m = mask

        # compute the score
        # if mask is sphere and it is the first run, compute the standard deviation of the volume under mask for late use
        if scoreFnc == FLCF and index == 0 and maskIsSphere == True:
            # compute standard deviation of the volume under mask
            maskV = m
            if volume.sizeX() != m.sizeX() or volume.sizeY() != m.sizeY(
            ) or volume.sizeZ() != m.sizeZ():
                maskV = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
                maskV.setAll(0)
                pasteCenter(m, maskV)
            from pytom_volume import sum
            p = sum(m)
            from pytom.basic.correlation import meanUnderMask, stdUnderMask
            meanV = meanUnderMask(volume, maskV, p)
            stdV = stdUnderMask(volume, maskV, p, meanV)

        # ref.write('template_cpu.em')

        if scoreFnc == FLCF:
            if maskIsSphere == True:
                score = scoreFnc(volume, ref, m, stdV, wedge=1)
            else:
                score = scoreFnc(volume, ref, m)
        else:  # not FLCF, so doesn't need mask as parameter and perhaps the reference should have the same size
            _ref = vol(volume.sizeX(), volume.sizeY(), volume.sizeZ())
            _ref.setAll(0)
            pasteCenter(ref, _ref)

            score = scoreFnc(volume, _ref)

        # update the result volume and the orientation volume
        updateResFromIdx(result, score, orientation, index)

        if moreInfo:
            sumV = sumV + score
            sqrV = sqrV + score * score

        currentRotation = rotations.nextRotation()
        index = index + 1


#    if moreInfo:
#        sumV = sumV/rotations.numberRotations()
#        sqrV = sqrV/rotations.numberRotations()

#    time = t.end(); print 'The overall execution time: %f' % time

    return [result, orientation, sumV, sqrV]
Example #10
0
    def distributeCalculation(self, mpi_myid, verbose=False):
        """
        distributeCalculation: Distribute calculation of matrix to multiple nodes.
        """
        import pytom_mpi
        from pytom.cluster.correlationMatrixStructures import CorrelationVectorJob
        from pytom.parallel.clusterMessages import CorrelationVectorJobMessage, CorrelationVectorMessage
        from pytom.tools.ProgressBar import FixedProgBar

        if not mpi_myid == 0:
            raise Exception(
                'This function (distributeCalculation) can only be processed by mpi_id = 0! ID == '
                + mpi_myid.__str__() + ' Aborting!')

        mpi_myname = 'node_' + mpi_myid.__str__()
        mpi_numberNodes = pytom_mpi.size()

        particleIndex = 0

        progressBar = FixedProgBar(0, len(self._particleList),
                                   'Particles correlated ')
        progressBar.update(0)

        #distribute on all nodes
        for nodeIndex in range(1, mpi_numberNodes):

            if particleIndex < len(self._particleList):

                particle = self._particleList[particleIndex]

                reducedParticleList = self._particleList[particleIndex + 1:]

                job = CorrelationVectorJob(particle, reducedParticleList,
                                           self._mask, particleIndex,
                                           self._applyWedge,
                                           self._binningFactor,
                                           self._lowestFrequency,
                                           self._highestFrequency)

                jobMsg = CorrelationVectorJobMessage(str(mpi_myid),
                                                     str(nodeIndex))
                jobMsg.setJob(job)

                if verbose:
                    print(jobMsg)

                pytom_mpi.send(str(jobMsg), nodeIndex)

                particleIndex = particleIndex + 1

        numberVectorsReceived = 0

        finished = numberVectorsReceived > len(self._particleList)

        while not finished:

            #listen until numberVectorsReceived > len(self._particleList) and continue distributing
            mpi_msgString = pytom_mpi.receive()

            if verbose:
                print(mpi_msgString)

            correlationVectorMsg = CorrelationVectorMessage()
            correlationVectorMsg.fromStr(mpi_msgString)

            assert correlationVectorMsg.__str__() == mpi_msgString

            vector = correlationVectorMsg.getVector()
            self._setMatrixValuesFromVector(vector.getParticleIndex(), vector)

            self._savePreliminaryResult()

            #print 'Result received from ' + correlationVectorMsg.getSender().__str__() + ' and matrix saved to disk.'

            numberVectorsReceived = numberVectorsReceived + 1

            if particleIndex < len(self._particleList):
                #print 'Send particle number :' , particleIndex
                particle = self._particleList[particleIndex]

                reducedParticleList = self._particleList[particleIndex + 1:]

                job = CorrelationVectorJob(particle, reducedParticleList,
                                           self._mask, particleIndex,
                                           self._applyWedge,
                                           self._binningFactor,
                                           self._lowestFrequency,
                                           self._highestFrequency)

                jobMsg = CorrelationVectorJobMessage(
                    mpi_myid.__str__(),
                    correlationVectorMsg.getSender().__str__())
                jobMsg.setJob(job)

                pytom_mpi.send(jobMsg.__str__(),
                               int(correlationVectorMsg.getSender()))

                particleIndex = particleIndex + 1

            #update progress bar
            progressBar.update(numberVectorsReceived)

            finished = numberVectorsReceived >= len(self._particleList)
Example #11
0
        particleList.fromXMLFile(plFilename)
    except:
        from pytom.localization.structures import readParticleFile
        particles = readParticleFile(plFilename)

        particleList = ParticleList()
        for particle in particles:
            particleList.append(particle.toParticle())

    particlePath = particleList[0].getFilename()
    particleFolder = particlePath[0:particlePath.rfind('/')]

    if not checkDirExists(particleFolder):
        os.makedirs(particleFolder)

    prog = FixedProgBar(0, len(particleList) - 1, '')

    newParticleList = ParticleList()

    vol = read(volFilename)
    volX = vol.sizeX()
    volY = vol.sizeY()
    volZ = vol.sizeZ()

    i = 0
    for particle in particleList:
        i = i + 1  # even if some particles are skipped, the progress shall be updated
        prog.update(i)

        pi = particle.getPickPosition()
        try:
Example #12
0
def averageGPU(particleList,
               averageName,
               showProgressBar=False,
               verbose=False,
               createInfoVolumes=False,
               weighting=False,
               norm=False,
               gpuId=None,
               profile=True):
    """
    average : Creates new average from a particleList
    @param particleList: The particles
    @param averageName: Filename of new average
    @param verbose: Prints particle information. Disabled by default.
    @param createInfoVolumes: Create info data (wedge sum, inverted density) too? False by default.
    @param weighting: apply weighting to each average according to its correlation score
    @param norm: apply normalization for each particle
    @return: A new Reference object
    @rtype: L{pytom.basic.structures.Reference}
    @author: Thomas Hrabe
    @change: limit for wedgeSum set to 1% or particles to avoid division by small numbers - FF
    """
    import time
    from pytom.tompy.io import read, write, read_size
    from pytom.tompy.filter import bandpass as lowpassFilter, rotateWeighting, applyFourierFilter, applyFourierFilterFull, create_wedge
    from pytom.voltools import transform, StaticVolume
    from pytom.basic.structures import Reference
    from pytom.tompy.normalise import mean0std1
    from pytom.tompy.tools import volumesSameSize, invert_WedgeSum, create_sphere
    from pytom.tompy.transform import fourier_full2reduced, fourier_reduced2full
    from cupyx.scipy.fftpack.fft import fftn as fftnP
    from cupyx.scipy.fftpack.fft import ifftn as ifftnP
    from cupyx.scipy.fftpack.fft import get_fft_plan
    from pytom.tools.ProgressBar import FixedProgBar
    from multiprocessing import RawArray
    import numpy as np
    import cupy as xp

    if not gpuId is None:
        device = f'gpu:{gpuId}'
        xp.cuda.Device(gpuId).use()
    else:
        print(gpuId)
        raise Exception('Running gpu code on non-gpu device')
    print(device)
    cstream = xp.cuda.Stream()
    if profile:
        stream = xp.cuda.Stream.null
        t_start = stream.record()

    # from pytom.tools.ProgressBar import FixedProgBar
    from math import exp
    import os

    if len(particleList) == 0:
        raise RuntimeError('The particle list is empty. Aborting!')

    if showProgressBar:
        progressBar = FixedProgBar(0, len(particleList), 'Particles averaged ')
        progressBar.update(0)
        numberAlignedParticles = 0

    # pre-check that scores != 0
    if weighting:
        wsum = 0.
        for particleObject in particleList:
            wsum += particleObject.getScore().getValue()
        if wsum < 0.00001:
            weighting = False
            print("Warning: all scores have been zero - weighting not applied")
    import time
    sx, sy, sz = read_size(particleList[0].getFilename())
    wedgeInfo = particleList[0].getWedge().convert2numpy()
    print('angle: ', wedgeInfo.getWedgeAngle())
    wedgeZero = xp.fft.fftshift(
        xp.array(wedgeInfo.returnWedgeVolume(sx, sy, sz, True).get(),
                 dtype=xp.float32))
    # wedgeZeroReduced = fourier_full2reduced(wedgeZero)
    wedge = xp.zeros_like(wedgeZero, dtype=xp.float32)
    wedgeSum = xp.zeros_like(wedge, dtype=xp.float32)
    print('init texture')
    wedgeText = StaticVolume(xp.fft.fftshift(wedgeZero),
                             device=device,
                             interpolation='filt_bspline')

    newParticle = xp.zeros((sx, sy, sz), dtype=xp.float32)

    centerX = sx // 2
    centerY = sy // 2
    centerZ = sz // 2

    result = xp.zeros((sx, sy, sz), dtype=xp.float32)

    fftplan = get_fft_plan(wedge.astype(xp.complex64))

    n = 0

    total = len(particleList)
    # total = int(np.floor((11*1024**3 - mempool.total_bytes())/(sx*sy*sz*4)))
    # total = 128
    #
    #
    # particlesNP = np.zeros((total, sx, sy, sz),dtype=np.float32)
    # particles = []
    # mask = create_sphere([sx,sy,sz], sx//2-6, 2)
    # raw = RawArray('f', int(particlesNP.size))
    # shared_array = np.ctypeslib.as_array(raw)
    # shared_array[:] = particlesNP.flatten()
    # procs = allocateProcess(particleList, shared_array, n, total, wedgeZero.size)
    # del particlesNP

    if profile:
        t_end = stream.record()
        t_end.synchronize()

        time_took = xp.cuda.get_elapsed_time(t_start, t_end)
        print(f'startup time {n:5d}: \t{time_took:.3f}ms')
        t_start = stream.record()

    for particleObject in particleList:

        rotation = particleObject.getRotation()
        rotinvert = rotation.invert()
        shiftV = particleObject.getShift()

        # if n % total == 0:
        #     while len(procs):
        #         procs =[proc for proc in procs if proc.is_alive()]
        #         time.sleep(0.1)
        #         print(0.1)
        #     # del particles
        #     # xp._default_memory_pool.free_all_blocks()
        #     # pinned_mempool.free_all_blocks()
        #     particles = xp.array(shared_array.reshape(total, sx, sy, sz), dtype=xp.float32)
        #     procs = allocateProcess(particleList, shared_array, n, total, size=wedgeZero.size)
        #     #pinned_mempool.free_all_blocks()
        #     #print(mempool.total_bytes()/1024**3)

        particle = read(particleObject.getFilename(), deviceID=device)

        #particle = particles[n%total]

        if norm:  # normalize the particle
            mean0std1(particle)  # happen inplace

        # apply its wedge to
        #particle = applyFourierFilter(particle, wedgeZeroReduced)
        #particle = (xp.fft.ifftn( xp.fft.fftn(particle) * wedgeZero)).real
        particle = (ifftnP(fftnP(particle, plan=fftplan) * wedgeZero,
                           plan=fftplan)).real

        ### create spectral wedge weighting

        wedge *= 0

        wedgeText.transform(
            rotation=[rotinvert[0], rotinvert[2], rotinvert[1]],
            rotation_order='rzxz',
            output=wedge)
        #wedge = xp.fft.fftshift(fourier_reduced2full(create_wedge(30, 30, 21, 42, 42, 42, rotation=[rotinvert[0],rotinvert[2], rotinvert[1]])))
        # if analytWedge:
        #     # > analytical buggy version
        # wedge = wedgeInfo.returnWedgeVolume(sx, sy, sz, True, rotinvert)
        # else:
        #     # > FF: interpol bugfix

        # wedge = rotateWeighting(weighting=wedgeInfo.returnWedgeVolume(sx, sy, sz, True), rotation=[rotinvert[0], rotinvert[2], rotinvert[1]])
        #     # < FF
        #     # > TH bugfix
        #     # wedgeVolume = wedgeInfo.returnWedgeVolume(wedgeSizeX=sizeX, wedgeSizeY=sizeY, wedgeSizeZ=sizeZ,
        #     #                                    humanUnderstandable=True, rotation=rotinvert)
        #     # wedge = rotate(volume=wedgeVolume, rotation=rotinvert, imethod='linear')
        #     # < TH

        ### shift and rotate particle

        newParticle *= 0
        transform(particle,
                  output=newParticle,
                  rotation=[-rotation[1], -rotation[2], -rotation[0]],
                  center=[centerX, centerY, centerZ],
                  translation=[-shiftV[0], -shiftV[1], -shiftV[2]],
                  device=device,
                  interpolation='filt_bspline',
                  rotation_order='rzxz')

        #write(f'trash/GPU_{n}.em', newParticle)
        # print(rotation.toVector())
        # break
        result += newParticle
        wedgeSum += xp.fft.fftshift(wedge)
        # if showProgressBar:
        #     numberAlignedParticles = numberAlignedParticles + 1
        #     progressBar.update(numberAlignedParticles)

        if n % total == 0:
            if profile:
                t_end = stream.record()
                t_end.synchronize()

                time_took = xp.cuda.get_elapsed_time(t_start, t_end)
                print(f'total time {n:5d}: \t{time_took:.3f}ms')
                t_start = stream.record()
        cstream.synchronize()
        n += 1

    print('averaged particles')
    ###apply spectral weighting to sum

    result = lowpassFilter(result, high=sx / 2 - 1, sigma=0)
    # if createInfoVolumes:
    write(averageName[:len(averageName) - 3] + '-PreWedge.em', result)
    write(averageName[:len(averageName) - 3] + '-WedgeSumUnscaled.em',
          fourier_full2reduced(wedgeSum))

    wedgeSumINV = invert_WedgeSum(wedgeSum,
                                  r_max=sx // 2 - 2.,
                                  lowlimit=.05 * len(particleList),
                                  lowval=.05 * len(particleList))
    wedgeSumINV = wedgeSumINV

    #print(wedgeSum.mean(), wedgeSum.std())
    if createInfoVolumes:
        write(averageName[:len(averageName) - 3] + '-WedgeSumInverted.em',
              xp.fft.fftshift(wedgeSumINV))

    result = applyFourierFilterFull(result, xp.fft.fftshift(wedgeSumINV))

    # do a low pass filter
    result = lowpassFilter(result, sx / 2 - 2, (sx / 2 - 1) / 10.)[0]
    write(averageName, result)

    if createInfoVolumes:
        resultINV = result * -1
        # write sign inverted result to disk (good for chimera viewing ... )
        write(averageName[:len(averageName) - 3] + '-INV.em', resultINV)

    newReference = Reference(averageName, particleList)

    return newReference
Example #13
0
    def run(self, verbose=False):
        from pytom.gui.additional.frm import frm_align
        from sh_alignment.constrained_frm import frm_constrained_align, AngularConstraint
        from pytom.basic.structures import Shift, Rotation
        from pytom.tools.ProgressBar import FixedProgBar

        while True:
            # get the job
            try:
                job = self.get_job()
            except:
                if verbose:
                    print self.node_name + ': end'
                break  # get some non-job message, break it

            if verbose:
                prog = FixedProgBar(0,
                                    len(job.particleList) - 1,
                                    self.node_name + ':')
                i = 0

            ref = job.reference.getVolume()
            # run the job
            for p in job.particleList:
                if verbose:
                    prog.update(i)
                    i += 1
                v = p.getVolume()

                if job.constraint:
                    constraint = job.constraint
                    if job.constraint.type == AngularConstraint.ADP_ANGLE:  # set the constraint around certain angle
                        rot = p.getRotation()
                        constraint.setAngle(rot.getPhi(), rot.getPsi(),
                                            rot.getTheta())
                    pos, angle, score = frm_constrained_align(
                        v, p.getWedge(), ref, None, job.bw_range, job.freq,
                        job.peak_offset, job.mask.getVolume(), constraint)
                else:
                    pos, angle, score = frm_align(v, p.getWedge(), ref, None,
                                                  job.bw_range, job.freq,
                                                  job.peak_offset,
                                                  job.mask.getVolume())

                p.setShift(
                    Shift([
                        pos[0] - v.sizeX() / 2, pos[1] - v.sizeY() / 2,
                        pos[2] - v.sizeZ() / 2
                    ]))
                p.setRotation(Rotation(angle))
                p.setScore(FRMScore(score))

            # average the particle list
            name_prefix = self.node_name + '_' + str(job.max_iter)
            self.average_sub_pl(job.particleList, name_prefix, job.weighting)

            # send back the result
            self.send_result(
                FRMResult(name_prefix, job.particleList, self.mpi_id))

        pytom_mpi.finalise()
Example #14
0
    def findParticles(self,
                      sizeParticle,
                      maxNumParticle=0,
                      minScore=-1,
                      write2disk=0,
                      margin=None,
                      offset=[0, 0, 0]):
        """
        findParticles: Find particles in target volume according to the result volume.
        @param sizeParticle: size or radius of searched particle
        @type sizeParticle: [x,y,z] or integer
        @param maxNumParticle: maximal number of particles you want to pick
        @type maxNumParticle: integer
        @param minScore: minimal score as threshold
        @type minScore: float 
        @param write2disk: write the found particles to the disk or not (0: do not write, otherwise the length of each dimension)
        @type write2disk: integer
        @param margin: set the margin of the score volume
        @param margin: [x,y,z] or integer
        
        @return: list of found particles
        @rtype: L{pytom.localization.structures.FoundParticle}
        """
        from pytom_volume import vol, peak, putSubVolume, read
        from pytom.localization.structures import FoundParticle

        # prepare the mask
        x = self.result.sizeX()
        y = self.result.sizeY()
        z = self.result.sizeZ()

        if sizeParticle.__class__ == list:
            xP = sizeParticle[0]
            yP = sizeParticle[1]
            zP = sizeParticle[2]
        elif sizeParticle.__class__ == vol:
            xP = sizeParticle.sizeX()
            yP = sizeParticle.sizeY()
            zP = sizeParticle.sizeZ()
        else:
            radius = sizeParticle
            xP = 2 * sizeParticle
            yP = 2 * sizeParticle
            zP = 2 * sizeParticle

        if margin:
            if margin.__class__ == list:
                marginX, marginY, marginZ = margin
            else:
                marginX = marginY = marginZ = margin
        else:  # no margin given, set automatically
            marginX = int(xP / 2)
            marginY = int(yP / 2)
            marginZ = int(zP / 2)

        mask = vol(x, y, z)
        mask.setAll(0)

        maskIn = vol(x - 2 * marginX, y - 2 * marginY, z - 2 * marginZ)
        maskIn.setAll(1)
        putSubVolume(maskIn, mask, marginX, marginY, marginZ)

        # progress bar
        from pytom.tools.ProgressBar import FixedProgBar
        prog = FixedProgBar(0, maxNumParticle - 1, '')

        # find the particles
        resList = []
        for i in range(maxNumParticle):
            prog.update(i)

            try:
                posV = peak(self.result, mask)
            except:
                break  # the mask is all zero

            [scoreV, orientV] = self.get(posV[0], posV[1], posV[2])
            # test if the peak score is bigger than minimal threshold
            if scoreV > minScore:
                particleFilename = 'particle_' + str(i) + '.em'
                if write2disk:
                    # write the found particle to the disk
                    l = write2disk
                    v = read(self.volFilename, posV[0] - l / 2,
                             posV[1] - l / 2, posV[2] - l / 2, l, l, l, 0, 0,
                             0, 0, 0, 0)
                    v.write(particleFilename)

                score = self.score()
                score.setValue(scoreV)
                from pytom.basic.structures import PickPosition, Rotation
                pos = PickPosition(posV, originFilename=self.volFilename)
                pos + offset
                orientation = Rotation(orientV)
                p = FoundParticle(pos, orientation, score, particleFilename)

                resList.append(p)

                if sizeParticle.__class__ == list:
                    self.maskOut(mask, posV, [xP, yP, zP])
                elif sizeParticle.__class__ == vol:
                    self.maskOut(mask, posV, sizeParticle)
                else:
                    self.maskOut(mask, posV, radius)
            else:
                break

        return resList
Example #15
0
    def run(self, verbose=False):
        from sh_alignment.frm import frm_align
        from pytom.basic.structures import Shift, Rotation
        from pytom.tools.ProgressBar import FixedProgBar
        from pytom.basic.fourier import convolute
        from pytom_volume import read, power

        while True:
            # get the job
            try:
                job = self.get_job()
            except:
                if verbose:
                    print(self.node_name + ': end')
                break  # get some non-job message, break it

            if verbose:
                prog = FixedProgBar(0,
                                    len(job.particleList) - 1,
                                    self.node_name + ':')
                i = 0

            ref = []
            ref.append(job.reference[0].getVolume())
            ref.append(job.reference[1].getVolume())

            # convolute with the approximation of the CTF
            if job.sum_ctf_sqr:
                ctf = read(job.sum_ctf_sqr)
                power(ctf,
                      0.5)  # the number of CTFs should not matter, should it?
                ref0 = ref[0]
                ref1 = ref[1]
                ref0 = convolute(ref0, ctf, True)
                ref1 = convolute(ref1, ctf, True)
                ref = [ref0, ref1]

            if job.bfactor and job.bfactor != 'None':
                #                restore_kernel = create_bfactor_restore_vol(ref.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor)
                from pytom_volume import vol, read
                bfactor_kernel = read(job.bfactor)
                unit = vol(bfactor_kernel)
                unit.setAll(1)
                restore_kernel = unit / bfactor_kernel

            # run the job
            for p in job.particleList:
                if verbose:
                    prog.update(i)
                    i += 1
                v = p.getVolume()

                #                if weights is None: # create the weights according to the bfactor
                #                    if job.bfactor == 0:
                #                        weights = [1 for k in xrange(job.freq)]
                #                    else:
                #                        restore_fnc = create_bfactor_restore_fnc(ref.sizeX(), job.sampleInformation.getPixelSize(), job.bfactor)
                #                        # cut out the corresponding part and square it to get the weights!
                #                        weights = restore_fnc[1:job.freq+1]**2

                if job.bfactor and job.bfactor != 'None':
                    v = convolute(v, restore_kernel,
                                  True)  # if bfactor is set, restore it

                pos, angle, score = frm_align(v, p.getWedge(),
                                              ref[int(p.getClass())], None,
                                              job.bw_range, job.freq,
                                              job.peak_offset,
                                              job.mask.getVolume())

                p.setShift(
                    Shift([
                        pos[0] - v.sizeX() / 2, pos[1] - v.sizeY() / 2,
                        pos[2] - v.sizeZ() / 2
                    ]))
                p.setRotation(Rotation(angle))
                p.setScore(FRMScore(score))

            # average the particle list
            name_prefix = self.node_name + '_' + str(job.max_iter)
            pair = ParticleListPair('', job.ctf_conv_pl, None, None)
            pair.set_phase_flip_pl(job.particleList)
            self.average_sub_pl(
                pair.get_ctf_conv_pl(),
                name_prefix)  # operate on the CTF convoluted projection!

            # send back the result
            self.send_result(
                FRMResult(name_prefix, job.particleList, self.mpi_id))

        pytom_mpi.finalise()
    # read all the projections in tompy
    import numpy as np
    from pytom.tompy.io import read, write
    proj = []
    tilt_angles = []
    for p in projections:
        proj.append(read(p.getFilename()))
        tilt_angles.append(p.getTiltAngle())

    # reconstruct each particles
    from math import cos, sin, pi, ceil
    from pytom.tompy.transform import cut_from_projection
    from nufft.reconstruction import fourier_2d1d_iter_reconstruct, fourier_2d1d_gridding_reconstruct
    from pytom.tools.ProgressBar import FixedProgBar
    prog = FixedProgBar(0, len(pl) - 1, '')

    i = 0
    for p in pl:
        prog.update(i)
        i += 1

        # transfer the coordinate system
        x, y, z = p.getPickPosition().toVector()
        x = (x + offset[0]) * binning
        y = (y + offset[1]) * binning
        z = (z + offset[2]) * binning

        # cut out corresponding parts from projections
        subregions = []
        l = (vol_size / 2) * 2**0.5