def test_equivalence_diff_3(self): norm = 3 bg, var = stats(self.test_data1) data = multitau.acorr_multi(self.test_data1, level_size=16, norm=1, method="corr", binning=0) data = multitau.normalize_multi(data, bg, var, norm=1) x_, out0 = multitau.log_merge(*data) data = multitau.ccorr_multi(self.test_data1, self.test_data1, level_size=16, norm=norm, method="diff", binning=0) data = multitau.normalize_multi(data, bg, var, norm=norm) x_, out = multitau.log_merge(*data) self.assertTrue(np.allclose(out0, out)) data, bg, var = multitau.iacorr_multi(fromarrays((self.test_data1, )), count=64, level_size=16, norm=1, method="diff", binning=0) data = multitau.normalize_multi(data, bg, var, norm=1) x_, out = multitau.log_merge(*data) self.assertTrue(np.allclose(out0, out))
def test_equivalence_norm_2(self): norm = 2 bg, var = stats(self.test_data1) data= multitau.acorr_multi(self.test_data1, level_size = 16, norm = norm) data = multitau.normalize_multi(data,bg,var, norm = norm) x_, out0 = multitau.log_merge(*data) data = multitau.ccorr_multi(self.test_data1,self.test_data1, level_size = 16, norm = norm) data = multitau.normalize_multi(data,bg,var, norm = norm) x_, out = multitau.log_merge(*data) self.assertTrue(np.allclose(out0,out)) data,bg,var = multitau.iacorr_multi(fromarrays((self.test_data1,)),count = 64, level_size = 16, norm = norm) data = multitau.normalize_multi(data,bg,var, norm = norm) x_, out = multitau.log_merge(*data) self.assertTrue(np.allclose(out0,out))
#: if the intesity of light source flickers you can normalize each frame to the intensity of the frame #video = normalize_video(video) #: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax=KIMAX, kjmax=KJMAX) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) if __name__ == "__main__": import os.path as p #: now perform auto correlation calculation with default parameters using iterative algorithm data, bg, var = iacorr_multi(fft, count=NFRAMES) #: inspect the data viewer = MultitauViewer(scale=True) viewer.set_data(data, bg, var) viewer.set_mask(k=25, angle=0, sector=30) viewer.plot() viewer.show() #perform normalization and merge data fast, slow = normalize_multi(data, bg, var, scale=True) #: save the normalized raw data to numpy files np.save(p.join(DATA_PATH, "auto_correlate_multi_raw_fast.npy"), fast) np.save(p.join(DATA_PATH, "auto_correlate_multi_raw_slow.npy"), slow)