示例#1
0
def autocorrelateStack(origStack, gpu=True):
    # pop out a warning
    print('Calculating the stack auto-correlation...')

    # create the stack
    spimAcorr = np.zeros_like(origStack)

    if gpu == True:
        # perform the rotation
        print('Running on the GPU')
        # calculating autocorrelations from spim detections
        for i in range(origStack.shape[0]):
            print('Auto-correlating view ' + str(i))
            tempStack = cp.asarray(origStack[i,:,:,:])
            spimAcorr[i,:,:,:] = cp.abs(pf.my_autocorrelation(tempStack)).get()
    else:
        # perform the rotation
        print('Running on the CPU')
        # calculating autocorrelations from spim detections
        for i in range(origStack.shape[0]):
            print('Auto-correlating view ' + str(i))
            spimAcorr[i,:,:,:] = np.abs(pf.my_autocorrelation(origStack[i,:,:,:]))

    return spimAcorr
import numpy as np
import cupy as cp
import matplotlib.pyplot as plt
import pyphret.functions as pf
import pyphret.retrievals as pr

# we produce a test image by generating randomly distributed beads
#Read in source image
test_image = plt.imread("test_images//einstein.bmp")
test_image = np.pad(test_image, (0,300))
test_image = test_image/np.max(test_image)
# test_image = pf.my_gaussblur(test_image,4)

# calculate the autocorrelation of the test image
test_xcorr = pf.my_autocorrelation(test_image)


# calculate the magnitude of the test image
fftmagnitude = np.abs(np.fft.rfft2(test_image))
fftmagnitude = cp.asarray(fftmagnitude)

# running a phase retrieval problem
(retrieved, mask, _) = pr.phaseRet(fftmagnitude, rec_prior=None, phase_prior=None, masked='half',
                                    method='HIO', mode='classical',
                                    beta=0.9, steps=2000)
(retrieved, mask, _) = pr.phaseRet(fftmagnitude, rec_prior=retrieved, phase_prior=None, masked='half',
                                    method='ER', mode='normal',
                                    beta=0.9, steps=2000)
retrieved = retrieved.get()
mask = mask.get()
psf_long = tiff.imread('..//test_images//psf_long.tiff')
psf_round = tiff.imread('..//test_images//psf_round.tiff')

satellite = satellite / satellite.mean()

# psf normalization
psf_long /= psf_long.sum()
psf_round /= psf_round.sum()

# noise parameters and number of iterations
lambd = 2**4
iterations = 10000

# %% creating the measurement described in the experiment A - if results do not converse, re-run several times until snr grows
noise = (np.random.poisson(lam=lambd, size=satellite.shape))
measureA = pf.my_autocorrelation(satellite)
measureA = (2**16) * measureA / measureA.max()
measureA_blur = pf.my_convolution(measureA, psf_long)
measureA_blur_noise = np.abs(measureA_blur + noise - lambd)

# running the algorithm
deconvolved_A, error_A = pd.anchorUpdateX(cp.asarray(measureA_blur_noise),
                                          cp.asarray(psf_long),
                                          cp.asarray(0),
                                          kerneltype='A',
                                          iterations=iterations)

deconvolved_A, error_A = deconvolved_A.get(), error_A.get()
deconvolved_A = pf.my_alignND(satellite, (deconvolved_A))
deconvolved_A = deconvolved_A / deconvolved_A.mean()