Ejemplo n.º 1
0
def g1(x, kimax=None, kjmax=None, cross=True):
    i, j = np.meshgrid(ifreq, jfreq, indexing="ij")

    x = np.asarray(x)

    #add dimensions for broadcasting
    x = x[..., None, None]

    #expected signal
    a = NUM_PARTICLES * formf**2 * AREA_RATIO
    #expected variance (signal + noise)
    if APPLY_DUST:
        if cross:
            v = a + noise_level(
                (window * dust1 + window * dust2) / 2, BACKGROUND
            )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5
        else:
            v = a + noise_level(
                window * dust1, BACKGROUND
            )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5

    else:
        v = a + noise_level(
            window, BACKGROUND
        )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5

    #expected scalling factor
    a = a / v

    a = rfft2_crop(a, kimax, kjmax)
    i = rfft2_crop(i, kimax, kjmax)
    j = rfft2_crop(j, kimax, kjmax)

    return a * np.exp(-D * (i**2 + j**2) * x)
Ejemplo n.º 2
0
def fvar(kimax=None, kjmax=None, cross=True):
    #expected signal
    a = NUM_PARTICLES * formf**2 * AREA_RATIO
    if APPLY_DUST:
        if cross:
            v = a + noise_level(
                (window * dust1 + window * dust2) / 2, BACKGROUND
            )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5
        else:
            v = a + noise_level(
                window * dust1, BACKGROUND
            )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5
    else:
        v = a + noise_level(
            window, BACKGROUND
        )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5

    v = v / (ADC_SCALE_FACTOR**2)
    return rfft2_crop(v, kimax, kjmax)
Ejemplo n.º 3
0
                     "auto_standard_error_corr.npy"))[...,
                                                      0:NFRAMES // PERIOD * 2]
    #data_regular = data
else:
    data = np.load(os.path.join(DATA_PATH, "auto_random_error_corr.npy"))
    bgs = np.load(os.path.join(DATA_PATH, "auto_random_error_bg.npy"))
    vars = np.load(os.path.join(DATA_PATH, "auto_random_error_var.npy"))

    data_regular = np.load(os.path.join(DATA_PATH,
                                        "auto_fast_error_corr.npy"))[...,
                                                                     0:NFRAMES]

#form factor, for relative signal intensity calculation
formf = rfft2_crop(
    form_factor(window,
                sigma=SIGMA,
                intensity=INTENSITY,
                dtype="uint16",
                navg=30), 51, 0)
#formf2 = rfft2_crop(form_factor(window*video_simulator.dust2, sigma = SIGMA, intensity = INTENSITY, dtype = "uint16", navg = 200), 51, 0)


def g1(x, i, j):
    #expected signal
    a = NUM_PARTICLES * formf[i, j]**2 * AREA_RATIO
    #expected variance (signal + noise)
    if CROSS:
        v = a + noise_level(
            (window * video_simulator.dust1 + window * video_simulator.dust2) /
            2, BACKGROUND
        )  #expected abs FFT squared of gaussian noise with std of BACKGROUND**0.5
    else:
Ejemplo n.º 4
0

#: create window for multiplication...
window = blackman(SHAPE)

if APPLY_WINDOW == False:
    #we still need window for form_factor calculation
    window[...] = 1.

#: we must create a video of windows for multiplication
window_video = ((window,window),)*NFRAMES

#: how many runs to perform
NRUN = 16*2

bg1 = rfft2_crop(bg1(),KIMAX,0)
bg2 = rfft2_crop(bg2(),KIMAX,0)
delta = 0.

g1 = g1(np.arange(NFRAMES), KIMAX, 0)

w = weight_from_g(g1,delta)
wp = weight_prime_from_g(g1,delta, bg1,bg2)

def calculate():
    out = None
    bgs = []
    vars = []
    
    for i in range(NRUN):
        
Ejemplo n.º 5
0
from examples.paper.form_factor import g1

import os.path as path

SAVEFIG = True

colors = ["C{}".format(i) for i in range(10)]

LABELS = {0: "B'", 1: "C'", 2: "B", 3: "C", 4: "W'", 6: "W"}
MARKERS = {0: "1", 1: "2", 2: "3", 3: "4", 4: "+", 6: "x"}

#which method... either fast, standard, full, random or dual
METHOD = "dual"

#theoretical amplitude of the signal.
amp = rfft2_crop(g1(0), KIMAX, KJMAX)


def _g1(x, f, a, b):
    """g1: exponential decay"""
    return a * np.exp(-f * x) + b


def _fit_k(x, ys, p0):
    for y in ys:
        try:
            popt, pcov = curve_fit(_g1, x, y, p0=p0)
            yield popt, np.diag(pcov)
        except:
            yield (np.nan, ) * 3, (np.nan, ) * 3
Ejemplo n.º 6
0
def bg2(kimax=None, kjmax=None):
    if APPLY_DUST:
        a = (np.fft.fft2(dust2 * window) * VMEAN) / (fvar())**0.5
    else:
        a = (np.fft.fft2(window) * VMEAN) / (fvar())**0.5
    return rfft2_crop(a, kimax, kjmax)
Ejemplo n.º 7
0
                out[0,norm] = y
            else:
                out[i,norm] = y 
        
    return x, out

try:
    out
except NameError:
    x,data_0 = calculate(0)
    #x,data_1 = calculate(1)
    out = [data_0, data_0]


#compute form factors, for relative signal amplitudes
formf1 = rfft2_crop(form_factor(SHAPE, sigma = SIGMA1, intensity = INTENSITY1, dtype = "uint16"), 51, 0)
formf2 = rfft2_crop(form_factor(SHAPE, sigma = SIGMA2, intensity = INTENSITY2, dtype = "uint16"), 51, 0)

def g1(x,i,j):
    a = formf1[i,j]**2
    b = formf2[i,j]**2
    return a/(a+b)*np.exp(-D1*(i**2+j**2)*x)+b/(a+b)*np.exp(-D2*(i**2+j**2)*x)


for binning in (0,):
    plt.figure()

    clin,cmulti = ccorr_multi_count(NFRAMES, period = PERIOD, level_size = 16, binning = binning)
    
    #get eefective count in aveariging... 
    x,n = log_merge_count(clin, cmulti, binning = binning)