Пример #1
0
 def test_equivalence_diff_3(self):
     norm = 3
     bg, var = stats(self.test_data1)
     data = multitau.acorr_multi(self.test_data1,
                                 level_size=16,
                                 norm=1,
                                 method="corr",
                                 binning=0)
     data = multitau.normalize_multi(data, bg, var, norm=1)
     x_, out0 = multitau.log_merge(*data)
     data = multitau.ccorr_multi(self.test_data1,
                                 self.test_data1,
                                 level_size=16,
                                 norm=norm,
                                 method="diff",
                                 binning=0)
     data = multitau.normalize_multi(data, bg, var, norm=norm)
     x_, out = multitau.log_merge(*data)
     self.assertTrue(np.allclose(out0, out))
     data, bg, var = multitau.iacorr_multi(fromarrays((self.test_data1, )),
                                           count=64,
                                           level_size=16,
                                           norm=1,
                                           method="diff",
                                           binning=0)
     data = multitau.normalize_multi(data, bg, var, norm=1)
     x_, out = multitau.log_merge(*data)
     self.assertTrue(np.allclose(out0, out))
Пример #2
0
 def test_equivalence_norm_2(self):
     norm = 2
     bg, var = stats(self.test_data1)
     data= multitau.acorr_multi(self.test_data1, level_size = 16, norm = norm)
     data = multitau.normalize_multi(data,bg,var, norm = norm)
     x_, out0 = multitau.log_merge(*data)
     data = multitau.ccorr_multi(self.test_data1,self.test_data1, level_size = 16, norm = norm)
     data = multitau.normalize_multi(data,bg,var, norm = norm)
     x_, out = multitau.log_merge(*data)
     self.assertTrue(np.allclose(out0,out))
     
     data,bg,var = multitau.iacorr_multi(fromarrays((self.test_data1,)),count = 64, level_size = 16,  norm = norm)
     data = multitau.normalize_multi(data,bg,var, norm = norm)
     x_, out = multitau.log_merge(*data)
     self.assertTrue(np.allclose(out0,out))
Пример #3
0
 def _get_avg_data(self):
     data = normalize_multi(self.data,
                            self.background,
                            self.variance,
                            norm=self.norm,
                            scale=self.scale,
                            mask=self.mask)
     t, data = log_merge(*data)
     avg_data = np.nanmean(data, axis=-2)
     return t, avg_data
Пример #4
0
def calculate(binning=1):
    out = None

    for i in range(NRUN):

        print("Run {}/{}".format(i + 1, NRUN))

        importlib.reload(dual_video_simulator)  #recreates iterator

        #reset seed... because we use seed(0) in dual_video_simulator
        seed(i)

        t1, t2 = dual_video_simulator.t1, dual_video_simulator.t2

        video = multiply(dual_video_simulator.video, window_video)

        #: if the intesity of light source flickers you can normalize each frame to the intensity of the frame
        #video = normalize_video(video)

        #: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers.
        fft = rfft2(video, kimax=51, kjmax=0)

        #: you can also normalize each frame with respect to the [0,0] component of the fft
        #: this it therefore equivalent to  normalize_video
        #fft = normalize_fft(fft)

        #: now perform auto correlation calculation with default parameters and show live
        data, bg, var = iccorr_multi(fft,
                                     t1,
                                     t2,
                                     level_size=16,
                                     binning=binning,
                                     period=PERIOD,
                                     auto_background=True)
        #perform normalization and merge data

        #5 and 7 are redundand, but we are calulating it for easier indexing
        for norm in (1, 2, 3, 5, 6, 7, 9, 10, 11, 13, 14, 15):

            fast, slow = normalize_multi(data, bg, var, norm=norm, scale=True)

            #we merge with binning (averaging) of linear data enabled/disabled
            x, y = log_merge(fast, slow, binning=binning)

            if out is None:
                out = np.empty(shape=(NRUN, 16) + y.shape, dtype=y.dtype)
                out[0, norm] = y
            else:
                out[i, norm] = y

    return x, out
Пример #5
0
#: you can also normalize each frame with respect to the [0,0] component of the fft
#: this it therefore equivalent to  normalize_video
#fft = normalize_fft(fft)

if __name__ == "__main__":
    import os.path as p

    #: now perform auto correlation calculation with default parameters using iterative algorithm
    data, bg, var = iacorr_multi(fft, count=NFRAMES)

    #: inspect the data
    viewer = MultitauViewer(scale=True)
    viewer.set_data(data, bg, var)
    viewer.set_mask(k=25, angle=0, sector=30)
    viewer.plot()
    viewer.show()

    #perform normalization and merge data
    fast, slow = normalize_multi(data, bg, var, scale=True)

    #: save the normalized raw data to numpy files
    np.save(p.join(DATA_PATH, "auto_correlate_multi_raw_fast.npy"), fast)
    np.save(p.join(DATA_PATH, "auto_correlate_multi_raw_slow.npy"), slow)

    x, y = log_merge(fast, slow)

    #: save the normalized merged data to numpy files
    np.save(p.join(DATA_PATH, "auto_correlate_multi_t.npy"), x)
    np.save(p.join(DATA_PATH, "auto_correlate_multi_data.npy"), y)