def test_acorr_regular_1(self): for scale in (True, False): for mode in ("corr", "diff"): for axis in (0, 1, 2): bg, var = core.stats(test_data1, axis=axis) data = core.acorr(test_data1, norm=1, method="fft", axis=axis) self.out = core.normalize(data, bg, var, norm=1, mode=mode, scale=scale) data = core.acorr(test_data1, norm=1, method="corr", axis=axis) out_other = core.normalize(data, bg, var, norm=1, mode=mode, scale=scale) self.assertTrue(allclose(self.out, out_other))
def acorr_save(fft_array, path_out, method='diff', mode='diff'): # These codes are copied from the examples in cddm package #: now perform auto correlation calculation with default parameters data = acorr(fft_array, method=method) bg, var = stats(fft_array) #: perform normalization and merge data data_lin = normalize(data, bg, var, scale=True, mode=mode) np.save(path_out / f'auto_correlate_data_lin_{method}_{mode}.npy', data_lin) # #: change size, to define time resolution in log space # x, y = log_average(data_lin, size=16) # #: save the normalized data to numpy files # np.save(path_out / 'auto_correlate_t.npy', x) # np.save(path_out / 'auto_correlate_data.npy', y) return ( { 'bg': bg, 'var': var, 'data_lin_shape': data_lin.shape, # 't_shape': x.shape, # 'data_shape': y.shape, }, data_lin) # , x, y)
def calculate(): out = None bgs = [] vars = [] for i in range(NRUN): print("Run {}/{}".format(i + 1, NRUN)) seed(i) importlib.reload(video_simulator) #recreates iterator with new seed video = multiply(video_simulator.video, window_video) fft = rfft2(video, kimax=51, kjmax=0) fft_array, = asarrays(fft, NFRAMES_RANDOM) data = acorr(fft_array) bg, var = stats(fft_array) bgs.append(bg) vars.append(var) for norm in (1, 2, 3, 5, 6, 7, 9, 10, 11): y = normalize(data, bg, var, norm=norm, scale=True) if out is None: out = np.empty(shape=(NRUN, 12) + y.shape, dtype=y.dtype) out[i, norm] = y return out, bgs, vars
def test_auto_equivalence_1(self): for method in ("corr","fft","diff"): bg,var = core.stats(test_data1, axis = 0) data1 = core.acorr(test_data1, n = 8, norm = 1, method = method) out1 = core.normalize(data1, bg, var, norm = 1) data2,bg,var = core.iacorr(test_data1, n = 8, norm = 1, method = method) out2 = core.normalize(data2, bg, var, norm = 1) self.assertTrue(np.allclose(out1, out2))
def calculate(): out = None bgs = [] vars = [] for i in range(NRUN): print("Run {}/{}".format(i + 1, NRUN)) seed(i) importlib.reload(video_simulator) #recreates iterator with new seed t = video_simulator.t video = multiply(video_simulator.video, window_video) #: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax=KIMAX, kjmax=0) fft_array, = asarrays(fft, NFRAMES_RANDOM) data = acorr(fft_array, t=t, n=int(NFRAMES / DT_RANDOM)) bg, var = stats(fft_array) bgs.append(bg) vars.append(var) for norm in (1, 2, 3, 5, 6, 7, 9, 10, 11): # weighted (subtracted) if norm in (7, 11): y = normalize(data, bg, var, norm=norm, scale=True, weight=np.moveaxis(w, 0, -1)) # weighted prime (baseline) elif norm in (3, ): y = normalize(data, bg, var, norm=norm, scale=True, weight=np.moveaxis(wp, 0, -1)) else: y = normalize(data, bg, var, norm=norm, scale=True) if out is None: out = np.empty(shape=(NRUN, 12) + y.shape, dtype=y.dtype) out[i, norm] = y return out, bgs, vars
def calculate(): out = None bgs = [] vars = [] for i in range(NRUN): print("Run {}/{}".format(i+1,NRUN)) seed(i) importlib.reload(video_simulator) #recreates iterator with new seed t = video_simulator.t video = multiply(video_simulator.video, window_video) #: if the intesity of light source flickers you can normalize each frame to the intensity of the frame #video = normalize_video(video) #: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax = 51, kjmax = 0) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) fft_array, = asarrays(fft,NFRAMES_RANDOM) data = acorr(fft_array, t = t, n = int(NFRAMES/DT_RANDOM)) bg, var = stats(fft_array) #: now perform auto correlation calculation with default parameters and show live #data, bg, var = iacorr(fft, t, auto_background = True, n = NFRAMES) #perform normalization and merge data #5 and 7 are redundand, but we are calulating it for easier indexing bgs.append(bg) vars.append(var) for norm in (1,2,3,5,6,7,9,10,11): y = normalize(data, bg, var, norm = norm, scale = True) if out is None: out = np.empty(shape = (NRUN,12)+ y.shape, dtype = y.dtype) out[i,norm] = y return out, bgs, vars
#: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax = KIMAX, kjmax = KJMAX) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) #load in numpy array fft_array, = asarrays(fft, NFRAMES_RANDOM) if __name__ == "__main__": import os.path as p #: now perform auto correlation calculation with default parameters data = acorr(fft_array, t = video_simulator.t, n = int(NFRAMES/DT_RANDOM)) bg, var = stats(fft_array) for norm in (1,2,3,5,6,7,9,10,11): #: perform normalization and merge data data_lin = normalize(data, bg, var, scale = True, norm = norm) #: change size, to define time resolution in log space x,y = log_average(data_lin, size = 16) #: save the normalized data to numpy files np.save(p.join(DATA_PATH, "corr_random_t.npy"),x*DT_RANDOM) np.save(p.join(DATA_PATH, "corr_random_data_norm{}.npy".format(norm)),y)
#: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax=KIMAX, kjmax=KJMAX) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) #load in numpy array fft_array, = asarrays(fft, NFRAMES_FAST) if __name__ == "__main__": import os.path as p #: now perform auto correlation calculation with default parameters data = acorr(fft_array, n=int(NFRAMES / DT_FAST), method="fft") bg, var = stats(fft_array) for norm in range(8): #: perform normalization and merge data data_lin = normalize(data, bg, var, scale=True, norm=norm) if norm == 6: np.save(p.join(DATA_PATH, "corr_fast_linear.npy"), data_lin) #: perform log averaging x, y = log_average(data_lin, size=16) #: save the normalized data to numpy files np.save(p.join(DATA_PATH, "corr_fast_t.npy"), x * DT_FAST)
""" Demonstrates the use and equivalence of method and mode options """ from examples.auto_correlate import fft_array from cddm.core import acorr, normalize, stats import matplotlib.pyplot as plt bg, var = stats(fft_array) for method in ("corr","diff"): if method == "corr": data = acorr(fft_array, method = "fft") #fft,so that it is faster else: data = acorr(fft_array, method = "diff", n = 256) for mode in ("diff", "corr"): data_lin = normalize(data, bg, var, mode = mode, norm = 2, scale = True) plt.semilogx(data_lin[4,12], label = "mode = {}; method = {}".format(mode, method)) plt.legend() plt.show()
#: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax=KIMAX, kjmax=KJMAX) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) #load in numpy array fft_array, = asarrays(fft, NFRAMES) if __name__ == "__main__": import os.path as p #: now perform auto correlation calculation with default parameters data = acorr(fft_array) bg, var = stats(fft_array) #: perform normalization and merge data data_lin = normalize(data, bg, var, scale=True) #: inspect the data viewer = DataViewer() viewer.set_data(data_lin) viewer.set_mask(k=25, angle=0, sector=30) viewer.plot() viewer.show() #: change size, to define time resolution in log space x, y = log_average(data_lin, size=16)
#: perform rfft2 and crop results, to take only first kimax and first kjmax wavenumbers. fft = rfft2(video, kimax=KIMAX, kjmax=KJMAX) #: you can also normalize each frame with respect to the [0,0] component of the fft #: this it therefore equivalent to normalize_video #fft = normalize_fft(fft) #load in numpy array fft_array, = asarrays(fft, NFRAMES_STANDARD) if __name__ == "__main__": import os.path as p #: now perform auto correlation calculation with default parameters data = acorr(fft_array, n=int(NFRAMES / DT_STANDARD), method="fft") bg, var = stats(fft_array) for norm in range(8): #: perform normalization and merge data data_lin = normalize(data, bg, var, scale=True, norm=norm) if norm == 6: np.save(p.join(DATA_PATH, "corr_standard_linear.npy"), data_lin) #: perform log averaging x, y = log_average(data_lin, size=16) #: save the normalized data to numpy files np.save(p.join(DATA_PATH, "corr_standard_t.npy"), x * DT_STANDARD) np.save( p.join(DATA_PATH, "corr_standard_data_norm{}.npy".format(norm)), y)