def local_df(img_folder, seg_length=50, winsize=50, step=25): """ Compute local density fluctuations of given image sequence in img_folder Args: img_folder -- folder containing .tif image sequence seg_length -- number of frames of each segment of video, for evaluating standard deviations winsize -- step -- Returns: df -- dict containing 't' and 'local_df', 't' is a list of time (frame), 'std' is a list of 2d array with local standard deviations corresponding to 't' """ l = corrLib.readseq(img_folder) num_frames = len(l) assert (num_frames > seg_length) stdL = [] tL = range(0, num_frames, seg_length) for n in tL: img_nums = range(n, min(n + seg_length, num_frames)) l_sub = l.loc[img_nums] img_seq = [] for num, i in l_sub.iterrows(): img = io.imread(i.Dir) X, Y, I = corrLib.divide_windows(img, windowsize=[50, 50], step=25) img_seq.append(I) img_stack = np.stack([img_seq], axis=0) img_stack = np.squeeze(img_stack) std = np.std(img_stack, axis=0) stdL.append(std) return {'t': tL, 'std': stdL}
def dt_track(folder, target_number, min_dist=20, feature_size=7000, feature_number=1): traj = pd.DataFrame() l = readseq(folder) for num, i in l.iterrows(): print('Processing frame ' + i.Name + ' ...') img = io.imread(i.Dir) try: coords = dt_track_1(img, 15, min_dist=15) except: print( 'Frame {:s} tracking failed, use dt_track_1(img) to find out the cause' .format(i.Name)) continue coords = coords.assign(Name=i.Name) traj = traj.append(coords) return traj
def down_size_imseq(folder, windowsize=[50, 50], step=25): """ Downsizing an image sequence of k images in given folder and save them as an numpy array of size k*m*n, where m and n are the downsized dimension of each image. Args: folder -- folder of image sequence windowsize -- parameter of corrLib.divide_windows(), pixel step -- parameter of corrLib.divide_windows(), pixel Returns: stack -- numpy array of size k*m*n """ l = corrLib.readseq(folder) I_list = [] for num, i in l.iterrows(): img = io.imread(i.Dir) X, Y, I = corrLib.divide_windows(img, windowsize=windowsize, step=step) I_list.append(I) stack = np.stack(I_list, axis=0) return stack
folder_piv = sys.argv[2] folder_out = sys.argv[3] interval = int(sys.argv[4]) fps = int(sys.argv[5]) step = int(sys.argv[6]) if os.path.exists(folder_out) == False: os.makedirs(folder_out) with open(os.path.join(folder_out, 'log.txt'), 'w') as f: f.write('adv = dc/interval + ux/fps*dcx/step + uy/fps*dcy/step\n') f.write('vdc = ux/fps*dcx/step + uy/fps*dcy/step\n') f.write('interval = {:d} frames\n'.format(interval)) f.write('fps = {:d}\n'.format(fps)) f.write('step = {:d}\n'.format(step)) limg = cl.readseq(folder_img) # load piv and corresponding images l = cl.readdata(folder_piv) for num, i in l.iterrows(): if num >= int(len(l)/3*2): name = i.Name n0 = int(name.split('-')[0]) n1 = n0 + interval if n1 <= len(limg) - 1: I0 = io.imread(os.path.join(folder_img, '{:04d}.tif'.format(n0))) I1 = io.imread(os.path.join(folder_img, '{:04d}.tif'.format(n1))) X, Y, I0s = cl.divide_windows(I0, windowsize=[50, 50], step=25) X, Y, I1s = cl.divide_windows(I1, windowsize=[50, 50], step=25) pivData = pd.read_csv(i.Dir) ux = np.array(pivData.u).reshape(I0s.shape)
folder = sys.argv[1] folder_out = sys.argv[2] seg_length = int(sys.argv[3]) # optional normalize = 0 # default to no normalization if len(sys.argv) > 4: normalize = int(sys.argv[4]) if os.path.exists(folder_out) == False: os.makedirs(folder_out) with open(os.path.join(folder_out, 'log.txt'), 'w') as f: f.write('img folder: {}\n'.format(folder)) f.write('out folder: {}\n'.format(folder_out)) f.write('sengment length: {:d} frames\n'.format(seg_length)) l = cl.readseq(folder) length = len(l) seg = range(0, length, seg_length) # img = io.imread(l.Dir.loc[0]) # size_min = 5 # L = min(img.shape) # boxsize = np.unique(np.floor(np.logspace(np.log10(size_min), np.log10((L-size_min)/2),50))) data = pd.DataFrame() for idx in range(1, len(seg)): l_crop = l.loc[(l.index>=seg[idx-1])&(l.index<seg[idx])] img_list = [] for num, i in l_crop.iterrows(): img_list.append(io.imread(i.Dir)) img_stack = np.stack(img_list, axis=0)
from scipy.interpolate import griddata from matplotlib_scalebar.scalebar import ScaleBar from matplotlib_scalebar.scalebar import SI_LENGTH import matplotlib as mpl import sys import time folder = sys.argv[1] output_folder = sys.argv[2] if os.path.exists(output_folder) == False: os.makedirs(output_folder) with open(os.path.join(output_folder, 'log.txt'), 'w') as f: pass l = readseq(folder) img = io.imread(l.Dir.loc[0]) size_min = 20 L = min(img.shape) boxsize = np.unique(np.floor(np.logspace(np.log10(size_min), np.log10((L-size_min)/2),100))) df = pd.DataFrame() for num, i in l.iterrows(): with open(os.path.join(output_folder, 'log.txt'), 'a') as f: f.write(time.asctime() + ' // ' + i.Name + ' calculated\n') img = io.imread(i.Dir) bp = bpass(img, 3, 100) bp_mh = match_hist(bp, img) framedf = pd.DataFrame() for bs in boxsize: X, Y, I = divide_windows(bp_mh, windowsize=[bs, bs], step=50*size_min)
if len(sys.argv) == 7: smooth = bool(int(sys.argv[6])) if len(sys.argv) == 8: header = bool(int(sys.argv[7])) if os.path.exists(output_folder) == 0: os.makedirs(output_folder) with open(os.path.join(output_folder, 'log.txt'), 'w') as f: f.write('Params\n') f.write('winsize: ' + str(winsize) + '\n') f.write('overlap: ' + str(overlap) + '\n') f.write('fps: ' + str(fps) + '\n') dt = 1 / fps l = readseq(input_folder) k = 0 # serve as a flag for I0 and I1 for num, i in l.iterrows(): if k % 2 == 0: I0 = io.imread(i.Dir) n0 = i.Name k += 1 else: I1 = io.imread(i.Dir) k += 1 frame_data = PIV1(I0, I1, winsize, overlap, (int(i.Name)-int(n0))*dt, smooth=smooth) frame_data.to_csv(os.path.join(output_folder, n0 + '-' + i.Name+'.csv'), index=False, header=header) with open(os.path.join(output_folder, 'log.txt'), 'a') as f: f.write(time.asctime() + ' // ' + n0 + '-' + i.Name + ' calculated\n')
area_min = int(sys.argv[3]) area_max = int(sys.argv[4]) header = bool(int(sys.argv[5])) filter = 'yen' if len(sys.argv) > 6: filter = sys.argv[6] if os.path.exists(output_folder) == False: os.makedirs(output_folder) with open(os.path.join(output_folder, 'log.txt'), 'w') as f: f.write('img_folder: ' + img_folder + '\n') f.write('output_folder: ' + output_folder + '\n') f.write('area_min: ' + str(area_min) + '\n') f.write('filter: ' + str(filter) + '\n') l = readseq(img_folder) for num, i in l.iterrows(): img = io.imread(i.Dir) if filter == 'yen': filt = filters.threshold_yen(img) filtered_img = img < filt label_image = measure.label(filtered_img) # temp data in this frame, save in DataFrame Area_temp = [] X_temp = [] Y_temp = [] Major_temp = [] Minor_temp = [] Angle_temp = [] Slice_temp = []
options = sys.argv[4] else: options = 'default' tauL = range(-200, 200, 10) # folder_den = r'E:\Google Drive\data_share\Dynamics_raw\processed_image\60_bp' # folder_div = r'E:\Google Drive\data_share\Dynamics_raw\concentration_velocity_field\div_result_50\60' # folder_ixdiv = r'E:\Github\Python\Correlation\test_images\div\ixdiv_test\60' # tauL = range(-90, 90, 3) if os.path.exists(folder_ixdiv) == False: os.makedirs(folder_ixdiv) with open(os.path.join(folder_ixdiv, 'log.txt'), 'w') as f: pass lden = cl.readseq(folder_den) ldiv = cl.readdata(folder_div) CLL = [] for tau in tauL: CL = [] tL = [] for num, i in ldiv.iterrows(): div = pd.read_csv(i.Dir) name = i.Name.split('-')[0] # img_name = str(int(name) - tau) img_name = str('{:04d}'.format(int(name) - tau)) if os.path.exists(os.path.join(folder_den, img_name + '.tif')) == False: print('no match image') continue img = io.imread(os.path.join(folder_den, img_name + '.tif'))