def read_piv_stack(folder, cutoff=None): """Read PIV data in given folder and stack the velocity data Args: folder -- PIV data folder Returns: ustack, vstack -- 3D arrays of (t, x, y)""" l = readdata(folder, "csv") u_list = [] v_list = [] for num, i in l.iterrows(): x, y, u, v = read_piv(i.Dir) u_list.append(u) v_list.append(v) if cutoff is not None: if num > cutoff: break return np.stack(u_list, axis=0), np.stack(v_list, axis=0)
def average_data(directory, columns=['CA', 'CV']): """ Take the average of all data in given directory Args: directory -- folder which contains *.csv data, with columns columns -- (optional) list of column labels of columns to be averaged Returns: averaged -- DataFrame with averaged data """ k = 0 l = corrLib.readdata(directory) for num, i in l.iterrows(): data = pd.read_csv(i.Dir) # check if given label exists in data for label in columns: if label not in data: raise IndexError( 'Column \'{0}\' does not exist in given data'.format( label)) if k == 0: temp = data[columns] else: temp += data[columns] k += 1 # finally, append all other columns (in data but not columns) to averaged other_cols = [] for label in data.columns: if label not in columns: other_cols.append(label) averaged = pd.concat([temp / k, data[other_cols]], axis=1) return averaged
fps = int(sys.argv[5]) step = int(sys.argv[6]) if os.path.exists(folder_out) == False: os.makedirs(folder_out) with open(os.path.join(folder_out, 'log.txt'), 'w') as f: f.write('adv = dc/interval + ux/fps*dcx/step + uy/fps*dcy/step\n') f.write('vdc = ux/fps*dcx/step + uy/fps*dcy/step\n') f.write('interval = {:d} frames\n'.format(interval)) f.write('fps = {:d}\n'.format(fps)) f.write('step = {:d}\n'.format(step)) limg = cl.readseq(folder_img) # load piv and corresponding images l = cl.readdata(folder_piv) for num, i in l.iterrows(): if num >= int(len(l)/3*2): name = i.Name n0 = int(name.split('-')[0]) n1 = n0 + interval if n1 <= len(limg) - 1: I0 = io.imread(os.path.join(folder_img, '{:04d}.tif'.format(n0))) I1 = io.imread(os.path.join(folder_img, '{:04d}.tif'.format(n1))) X, Y, I0s = cl.divide_windows(I0, windowsize=[50, 50], step=25) X, Y, I1s = cl.divide_windows(I1, windowsize=[50, 50], step=25) pivData = pd.read_csv(i.Dir) ux = np.array(pivData.u).reshape(I0s.shape) uy = np.array(pivData.v).reshape(I0s.shape) dcx = I0s - np.roll(I0s, 1, axis=1) dcy = I0s - np.roll(I0s, 1, axis=0)
else: options = 'default' tauL = range(-200, 200, 10) # folder_den = r'E:\Google Drive\data_share\Dynamics_raw\processed_image\60_bp' # folder_div = r'E:\Google Drive\data_share\Dynamics_raw\concentration_velocity_field\div_result_50\60' # folder_ixdiv = r'E:\Github\Python\Correlation\test_images\div\ixdiv_test\60' # tauL = range(-90, 90, 3) if os.path.exists(folder_ixdiv) == False: os.makedirs(folder_ixdiv) with open(os.path.join(folder_ixdiv, 'log.txt'), 'w') as f: pass lden = cl.readseq(folder_den) ldiv = cl.readdata(folder_div) CLL = [] for tau in tauL: CL = [] tL = [] for num, i in ldiv.iterrows(): div = pd.read_csv(i.Dir) name = i.Name.split('-')[0] # img_name = str(int(name) - tau) img_name = str('{:04d}'.format(int(name) - tau)) if os.path.exists(os.path.join(folder_den, img_name + '.tif')) == False: print('no match image') continue img = io.imread(os.path.join(folder_den, img_name + '.tif')) if options == 'raw':
small_img_folder = sys.argv[1] piv_folder = sys.argv[2] out_folder = sys.argv[3] if os.path.exists(out_folder) == False: os.makedirs(out_folder) with open(os.path.join(out_folder, 'log.txt'), 'w') as f: f.write('small_img_folder ' + small_img_folder + '\n') f.write('piv_folder: ' + piv_folder + '\n') f.write('out_folder: ' + out_folder + '\n') f.write(time.asctime() + ' // Computation starts!\n') stack = np.load(os.path.join(small_img_folder, 'stack.npy')) l = corrLib.readdata(piv_folder, 'csv') corr_list = [] # whole field corr_sn_list = [] # single number for num, i in l.iterrows(): pivData = pd.read_csv(i.Dir) rearranged_pivData = rearrange_pivdata(pivData) n = int(i.Name.split('-')[0]) I = stack[n] grad = conc_grad(I) corr = np.sum(grad * rearranged_pivData, axis=0) corr_list.append(corr) corr_sn_list.append(corr.mean() / corr.std()) with open(os.path.join(out_folder, 'log.txt'), 'a') as f: f.write(time.asctime() + ' // computing frame {:04d}\n'.format(n)) corr_stack = np.stack(corr_list, axis=0) np.save(os.path.join(out_folder, 'corr_whole.npy'), corr_stack)
u_1=coefL[:, 0], u_x=coefL[:, 1], u_y=coefL[:, 2], u_x2=coefL[:, 3], u_y2=coefL[:, 4], u_xy=coefL[:, 5], u_x3=coefL[:, 6], u_y3=coefL[:, 7], u_x2y=coefL[:, 8], u_y2x=coefL[:, 9], v_1=coefL[:, 10], v_x=coefL[:, 11], v_y=coefL[:, 12], v_x2=coefL[:, 13], v_y2=coefL[:, 14], v_xy=coefL[:, 15], v_x3=coefL[:, 16], v_y3=coefL[:, 17], v_x2y=coefL[:, 18], v_y2x=coefL[:, 19]) return smoothed_piv, coef # multi-frame process using MLS folder = r'E:\moreData\02042020\piv_result_50\80-1' l = cl.readdata(folder) pivData = pd.read_csv(l.Dir[0]) dm = 100 smoothed_piv, coef = mls_smoothing_1(pivData, dm) pdb.set_trace()
# winsize = int(sys.argv[4]) # step = int(sys.argv[5]) piv_folder = r'E:\moreData\02042020\piv_result_50\80-1' img_folder = r'E:\moreData\02042020\80-1\bp' output_folder = r'E:\moreData\02042020\fields\80-1' winsize = 50 step = 25 if os.path.exists(output_folder) == 0: os.makedirs(output_folder) with open(os.path.join(output_folder, 'log.txt'), 'w') as f: pass count = 0 ld = corrLib.readdata(piv_folder) for num, i in ld.iterrows(): pivData = pd.read_csv(os.path.join(piv_folder, i.Dir)) folder, file = os.path.split(i.Dir) name_ind = file.find('-') name = file[0:name_ind] imgDir = os.path.join(img_folder, name + '.tif') img = io.imread(imgDir) c, v, divcn, divcv, divv = corrLib.div_field(img, pivData, winsize, step) # Plot fig, ax = plt.subplots(nrows=2, ncols=2, figsize=(10, 10), dpi=100) ax[0, 0].imshow(c, cmap='seismic') ax[0, 0].set_title('$c$ field') ax[0, 1].imshow(divv, cmap='seismic') ax[0, 1].set_title('$\\nabla \cdot v$ field') ax[1, 0].imshow(divcn, cmap='seismic')
piv_folder = sys.argv[1] out_folder = sys.argv[2] percentile = 0.8 if len(sys.argv) > 3: percentile = float(sys.argv[3]) if len(sys.argv) > 4: sample_spacing = float(sys.argv[4]) if os.path.exists(out_folder) == False: os.makedirs(out_folder) with open(os.path.join(out_folder, 'log.txt'), 'w') as f: f.write('piv_folder: ' + piv_folder + '\n') f.write('out_folder: ' + out_folder + '\n') f.write('percentile: ' + str(percentile) + '\n') f.write(time.asctime() + ' // Computation starts!\n') l = readdata(piv_folder, 'csv') l_crop = l[l.index>l.index.max()*percentile] for num, i in l_crop.iterrows(): pivData = pd.read_csv(i.Dir) es = energy_spectrum(pivData, sample_spacing) es.to_csv(os.path.join(out_folder, i.Name+'.csv'), index=False) with open(os.path.join(out_folder, 'log.txt'), 'a') as f: f.write(time.asctime() + ' // {} finished!\n'.format(i.Name)) with open(os.path.join(out_folder, 'log.txt'), 'a') as f: f.write(time.asctime() + ' // Computation ends!\n') """ EDIT 10022020 -- First edit 10192020 -- Add sample_spacing argument """
x, y, u, v = read_piv(i.Dir) if mode == "abs": vm = np.nanmean((u**2 + v**2)**0.5) elif mode == "square": vm = np.nanmean((u**2 + v**2)**0.5) vm_list.append(vm) if plot == True: fig, ax = plt.subplots(figsize=(3.5, 3), dpi=100) ax.plot(np.arange(len(vm_list)) * self.dt, vm_list) ax.set_xlabel("time (s)") ax.set_ylabel("mean velocity (px/s)") return pd.DataFrame({ "t": np.arange(len(vm_list)) * self.dt, "v_mean": vm_list }) # %% codecell if __name__ == '__main__': # %% codecell folder = r"test_images\moving_mask_piv\piv_result" l = readdata(folder, "csv") piv = piv_data(l, fps=50) vacf = piv.vacf(smooth_window=2, xlim=[0, 0.1]) # autocorr1d(np.array([1,1,1])) # %% codecell corr1d = piv.corrS1d(n=600, xlim=[0, 170], plot=True) # %% codecell piv.mean_velocity(plot=True) # %% codecell